Remove and forbid use of guava Function, Charsets, Collections2
This commit removes and now forbids all uses of Function, Charsets, Collections2 across the codebase. This is one of many steps in the eventual removal of Guava as a dependency. Relates #13224
This commit is contained in:
parent
d0deb28336
commit
40959068d5
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.action.admin.indices.create;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import org.elasticsearch.ElasticsearchGenerationException;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
|
@ -338,7 +338,7 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
|
|||
* Sets the settings and mappings as a single source.
|
||||
*/
|
||||
public CreateIndexRequest source(String source) {
|
||||
return source(source.getBytes(Charsets.UTF_8));
|
||||
return source(source.getBytes(StandardCharsets.UTF_8));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -374,7 +374,7 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
|
|||
throw new ElasticsearchParseException("failed to parse source for create index", e);
|
||||
}
|
||||
} else {
|
||||
settings(new String(source.toBytes(), Charsets.UTF_8));
|
||||
settings(new String(source.toBytes(), StandardCharsets.UTF_8));
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.action.index;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import org.elasticsearch.ElasticsearchGenerationException;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.Version;
|
||||
|
@ -377,7 +377,7 @@ public class IndexRequest extends ReplicationRequest<IndexRequest> implements Do
|
|||
* or using the {@link #source(byte[])}.
|
||||
*/
|
||||
public IndexRequest source(String source) {
|
||||
this.source = new BytesArray(source.getBytes(Charsets.UTF_8));
|
||||
this.source = new BytesArray(source.getBytes(StandardCharsets.UTF_8));
|
||||
return this;
|
||||
}
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.action.indexedscripts.put;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import org.elasticsearch.ElasticsearchGenerationException;
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
|
@ -205,7 +205,7 @@ public class PutIndexedScriptRequest extends ActionRequest<PutIndexedScriptReque
|
|||
* or using the {@link #source(byte[])}.
|
||||
*/
|
||||
public PutIndexedScriptRequest source(String source) {
|
||||
this.source = new BytesArray(source.getBytes(Charsets.UTF_8));
|
||||
this.source = new BytesArray(source.getBytes(StandardCharsets.UTF_8));
|
||||
return this;
|
||||
}
|
||||
|
||||
|
|
|
@ -21,7 +21,7 @@ package org.elasticsearch.cluster.metadata;
|
|||
|
||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||
import com.google.common.base.Charsets;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import org.apache.lucene.util.CollectionUtil;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.Version;
|
||||
|
@ -489,7 +489,7 @@ public class MetaDataCreateIndexService extends AbstractComponent {
|
|||
}
|
||||
int lastDotIndex = fileName.lastIndexOf('.');
|
||||
String mappingType = lastDotIndex != -1 ? mappingFile.getFileName().toString().substring(0, lastDotIndex) : mappingFile.getFileName().toString();
|
||||
try (BufferedReader reader = Files.newBufferedReader(mappingFile, Charsets.UTF_8)) {
|
||||
try (BufferedReader reader = Files.newBufferedReader(mappingFile, StandardCharsets.UTF_8)) {
|
||||
String mappingSource = Streams.copyToString(reader);
|
||||
if (mappings.containsKey(mappingType)) {
|
||||
XContentHelper.mergeDefaults(mappings.get(mappingType), parseMapping(mappingSource));
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
*/
|
||||
package org.elasticsearch.common;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.monitor.jvm.JvmInfo;
|
||||
|
||||
|
@ -74,7 +74,7 @@ public final class PidFile {
|
|||
}
|
||||
|
||||
try(OutputStream stream = Files.newOutputStream(path, StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING)) {
|
||||
stream.write(Long.toString(pid).getBytes(Charsets.UTF_8));
|
||||
stream.write(Long.toString(pid).getBytes(StandardCharsets.UTF_8));
|
||||
}
|
||||
|
||||
if (deleteOnExit) {
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.common.bytes;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.io.Channels;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
|
@ -151,7 +151,7 @@ public class BytesArray implements BytesReference {
|
|||
if (length == 0) {
|
||||
return "";
|
||||
}
|
||||
return new String(bytes, offset, length, Charsets.UTF_8);
|
||||
return new String(bytes, offset, length, StandardCharsets.UTF_8);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
*/
|
||||
package org.elasticsearch.common.bytes;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.io.Channels;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
|
@ -111,7 +111,7 @@ public class ChannelBufferBytesReference implements BytesReference {
|
|||
|
||||
@Override
|
||||
public String toUtf8() {
|
||||
return buffer.toString(Charsets.UTF_8);
|
||||
return buffer.toString(StandardCharsets.UTF_8);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -20,6 +20,8 @@
|
|||
package org.elasticsearch.common.collect;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import com.google.common.base.Supplier;
|
||||
import com.google.common.base.Suppliers;
|
||||
import com.google.common.collect.UnmodifiableIterator;
|
||||
import org.apache.lucene.util.mutable.MutableValueInt;
|
||||
|
||||
|
@ -34,6 +36,9 @@ import java.util.Iterator;
|
|||
import java.util.Map;
|
||||
import java.util.NoSuchElementException;
|
||||
import java.util.Set;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
/**
|
||||
* An immutable map whose writes result in a new copy of the map to be created.
|
||||
|
@ -514,14 +519,18 @@ public final class CopyOnWriteHashMap<K, V> extends AbstractMap<K, V> {
|
|||
return copyAndPutAll(other.entrySet());
|
||||
}
|
||||
|
||||
<K1 extends K, V1 extends V> CopyOnWriteHashMap<K, V> copyAndPutAll(Collection<Map.Entry<K1, V1>> entries) {
|
||||
public <K1 extends K, V1 extends V> CopyOnWriteHashMap<K, V> copyAndPutAll(Iterable<Entry<K1, V1>> entries) {
|
||||
CopyOnWriteHashMap<K, V> result = this;
|
||||
for (Map.Entry<K1, V1> entry : entries) {
|
||||
for (Entry<K1, V1> entry : entries) {
|
||||
result = result.copyAndPut(entry.getKey(), entry.getValue());
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public <K1 extends K, V1 extends V> CopyOnWriteHashMap<K, V> copyAndPutAll(Stream<Entry<K1, V1>> entries) {
|
||||
return copyAndPutAll(entries::iterator);
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove the given key from this map. The current hash table is not modified.
|
||||
*/
|
||||
|
|
|
@ -19,14 +19,10 @@
|
|||
|
||||
package org.elasticsearch.common.collect;
|
||||
|
||||
import com.google.common.base.Function;
|
||||
import com.google.common.collect.Collections2;
|
||||
import com.google.common.collect.ForwardingSet;
|
||||
|
||||
import java.util.AbstractMap;
|
||||
import java.util.Collection;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
|
@ -78,13 +74,9 @@ public class CopyOnWriteHashSet<T> extends ForwardingSet<T> {
|
|||
* case of equality.
|
||||
*/
|
||||
public CopyOnWriteHashSet<T> copyAndAddAll(Collection<? extends T> entries) {
|
||||
final Collection<Entry<T, Boolean>> asMapEntries = Collections2.transform(entries,new Function<T, Map.Entry<T, Boolean>>() {
|
||||
@Override
|
||||
public Entry<T, Boolean> apply(T input) {
|
||||
return new AbstractMap.SimpleImmutableEntry<>(input, true);
|
||||
}
|
||||
});
|
||||
CopyOnWriteHashMap<T, Boolean> updated = this.map.copyAndPutAll(asMapEntries);
|
||||
CopyOnWriteHashMap<T, Boolean> updated = this.map.copyAndPutAll(entries.stream().map(
|
||||
p -> new AbstractMap.SimpleImmutableEntry<>(p, true)
|
||||
));
|
||||
return new CopyOnWriteHashSet<>(updated);
|
||||
}
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.common.http.client;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import com.google.common.hash.Hashing;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.elasticsearch.*;
|
||||
|
@ -133,7 +133,7 @@ public class HttpDownloadHelper {
|
|||
try {
|
||||
if (download(checksumURL, checksumFile, progress, timeout)) {
|
||||
byte[] fileBytes = Files.readAllBytes(originalFile);
|
||||
List<String> checksumLines = Files.readAllLines(checksumFile, Charsets.UTF_8);
|
||||
List<String> checksumLines = Files.readAllLines(checksumFile, StandardCharsets.UTF_8);
|
||||
if (checksumLines.size() != 1) {
|
||||
throw new ElasticsearchCorruptionException("invalid format for checksum file (" +
|
||||
hashFunc.name() + "), expected 1 line, got: " + checksumLines.size());
|
||||
|
@ -345,7 +345,7 @@ public class HttpDownloadHelper {
|
|||
if (!isSecureProcotol) {
|
||||
throw new IOException("Basic auth is only supported for HTTPS!");
|
||||
}
|
||||
String basicAuth = Base64.encodeBytes(aSource.getUserInfo().getBytes(Charsets.UTF_8));
|
||||
String basicAuth = Base64.encodeBytes(aSource.getUserInfo().getBytes(StandardCharsets.UTF_8));
|
||||
connection.setRequestProperty("Authorization", "Basic " + basicAuth);
|
||||
}
|
||||
|
||||
|
|
|
@ -19,6 +19,7 @@ package org.elasticsearch.common.inject.internal;
|
|||
import com.google.common.cache.CacheBuilder;
|
||||
import com.google.common.cache.CacheLoader;
|
||||
import com.google.common.cache.LoadingCache;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
|
||||
import java.util.concurrent.ExecutionException;
|
||||
|
||||
|
@ -28,6 +29,8 @@ import java.util.concurrent.ExecutionException;
|
|||
*
|
||||
* @author jessewilson@google.com (Jesse Wilson)
|
||||
*/
|
||||
// TODO remove this suppression once we get rid of the CacheBuilder and friends
|
||||
@SuppressForbidden(reason = "this uses Function in it's method declaration somewhere")
|
||||
public abstract class FailableCache<K, V> {
|
||||
|
||||
private final LoadingCache<K, Object> delegate = CacheBuilder.newBuilder().build(new CacheLoader<K, Object>() {
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.common.io;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import org.elasticsearch.common.util.Callback;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
|
@ -234,7 +234,7 @@ public abstract class Streams {
|
|||
}
|
||||
|
||||
public static void readAllLines(InputStream input, Callback<String> callback) throws IOException {
|
||||
try (BufferedReader reader = new BufferedReader(new InputStreamReader(input, Charsets.UTF_8))) {
|
||||
try (BufferedReader reader = new BufferedReader(new InputStreamReader(input, StandardCharsets.UTF_8))) {
|
||||
String line;
|
||||
while ((line = reader.readLine()) != null) {
|
||||
callback.handle(line);
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.common.settings;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Booleans;
|
||||
|
@ -1066,7 +1066,7 @@ public final class Settings implements ToXContent {
|
|||
public Builder loadFromStream(String resourceName, InputStream is) throws SettingsException {
|
||||
SettingsLoader settingsLoader = SettingsLoaderFactory.loaderFromResource(resourceName);
|
||||
try {
|
||||
Map<String, String> loadedSettings = settingsLoader.load(Streams.copyToString(new InputStreamReader(is, Charsets.UTF_8)));
|
||||
Map<String, String> loadedSettings = settingsLoader.load(Streams.copyToString(new InputStreamReader(is, StandardCharsets.UTF_8)));
|
||||
put(loadedSettings);
|
||||
} catch (Exception e) {
|
||||
throw new SettingsException("Failed to load settings from [" + resourceName + "]", e);
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
*/
|
||||
package org.elasticsearch.common.text;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
|
||||
/**
|
||||
|
@ -54,7 +54,7 @@ public class BytesText implements Text {
|
|||
if (!bytes.hasArray()) {
|
||||
bytes = bytes.toBytesArray();
|
||||
}
|
||||
return new String(bytes.array(), bytes.arrayOffset(), bytes.length(), Charsets.UTF_8);
|
||||
return new String(bytes.array(), bytes.arrayOffset(), bytes.length(), StandardCharsets.UTF_8);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
*/
|
||||
package org.elasticsearch.common.text;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
|
||||
|
@ -61,7 +61,7 @@ public class StringAndBytesText implements Text {
|
|||
@Override
|
||||
public BytesReference bytes() {
|
||||
if (bytes == null) {
|
||||
bytes = new BytesArray(text.getBytes(Charsets.UTF_8));
|
||||
bytes = new BytesArray(text.getBytes(StandardCharsets.UTF_8));
|
||||
}
|
||||
return bytes;
|
||||
}
|
||||
|
@ -78,7 +78,7 @@ public class StringAndBytesText implements Text {
|
|||
if (!bytes.hasArray()) {
|
||||
bytes = bytes.toBytesArray();
|
||||
}
|
||||
text = new String(bytes.array(), bytes.arrayOffset(), bytes.length(), Charsets.UTF_8);
|
||||
text = new String(bytes.array(), bytes.arrayOffset(), bytes.length(), StandardCharsets.UTF_8);
|
||||
}
|
||||
return text;
|
||||
}
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
*/
|
||||
package org.elasticsearch.common.text;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
|
||||
|
@ -54,7 +54,7 @@ public class StringText implements Text {
|
|||
|
||||
@Override
|
||||
public BytesReference bytes() {
|
||||
return new BytesArray(text.getBytes(Charsets.UTF_8));
|
||||
return new BytesArray(text.getBytes(StandardCharsets.UTF_8));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -23,7 +23,6 @@ import com.carrotsearch.hppc.DoubleArrayList;
|
|||
import com.carrotsearch.hppc.FloatArrayList;
|
||||
import com.carrotsearch.hppc.LongArrayList;
|
||||
import com.carrotsearch.hppc.ObjectArrayList;
|
||||
import com.google.common.base.Function;
|
||||
import com.google.common.base.Preconditions;
|
||||
import com.google.common.collect.Iterators;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
|
@ -394,20 +393,6 @@ public enum CollectionUtils {
|
|||
}
|
||||
}
|
||||
|
||||
public static <E, T> List<T> eagerTransform(List<E> list, Function<E, T> transform) {
|
||||
if (list == null) {
|
||||
throw new NullPointerException("list");
|
||||
}
|
||||
if (transform == null) {
|
||||
throw new NullPointerException("transform");
|
||||
}
|
||||
List<T> result = new ArrayList<>(list.size());
|
||||
for (E element : list) {
|
||||
result.add(transform.apply(element));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public static <E> ArrayList<E> arrayAsArrayList(E... elements) {
|
||||
if (elements == null) {
|
||||
throw new NullPointerException("elements");
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
*/
|
||||
package org.elasticsearch.common.util;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import com.google.common.primitives.Ints;
|
||||
import org.apache.lucene.index.CheckIndex;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
|
@ -129,14 +129,14 @@ public class MultiDataPathUpgrader {
|
|||
*/
|
||||
public void checkIndex(ShardPath targetPath) throws IOException {
|
||||
BytesStreamOutput os = new BytesStreamOutput();
|
||||
PrintStream out = new PrintStream(os, false, Charsets.UTF_8.name());
|
||||
PrintStream out = new PrintStream(os, false, StandardCharsets.UTF_8.name());
|
||||
try (Directory directory = new SimpleFSDirectory(targetPath.resolveIndex());
|
||||
final CheckIndex checkIndex = new CheckIndex(directory)) {
|
||||
checkIndex.setInfoStream(out);
|
||||
CheckIndex.Status status = checkIndex.checkIndex();
|
||||
out.flush();
|
||||
if (!status.clean) {
|
||||
logger.warn("check index [failure]\n{}", new String(os.bytes().toBytes(), Charsets.UTF_8));
|
||||
logger.warn("check index [failure]\n{}", new String(os.bytes().toBytes(), StandardCharsets.UTF_8));
|
||||
throw new IllegalStateException("index check failure");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.common.xcontent;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.Strings;
|
||||
|
@ -1226,7 +1226,7 @@ public final class XContentBuilder implements BytesStream, Releasable {
|
|||
public String string() throws IOException {
|
||||
close();
|
||||
BytesArray bytesArray = bytes().toBytesArray();
|
||||
return new String(bytesArray.array(), bytesArray.arrayOffset(), bytesArray.length(), Charsets.UTF_8);
|
||||
return new String(bytesArray.array(), bytesArray.arrayOffset(), bytesArray.length(), StandardCharsets.UTF_8);
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.common.xcontent;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
|
@ -100,7 +100,7 @@ public class XContentHelper {
|
|||
XContentType xContentType = XContentFactory.xContentType(bytes);
|
||||
if (xContentType == XContentType.JSON && !reformatJson) {
|
||||
BytesArray bytesArray = bytes.toBytesArray();
|
||||
return new String(bytesArray.array(), bytesArray.arrayOffset(), bytesArray.length(), Charsets.UTF_8);
|
||||
return new String(bytesArray.array(), bytesArray.arrayOffset(), bytesArray.length(), StandardCharsets.UTF_8);
|
||||
}
|
||||
XContentParser parser = null;
|
||||
try {
|
||||
|
@ -126,7 +126,7 @@ public class XContentHelper {
|
|||
public static String convertToJson(byte[] data, int offset, int length, boolean reformatJson, boolean prettyPrint) throws IOException {
|
||||
XContentType xContentType = XContentFactory.xContentType(data, offset, length);
|
||||
if (xContentType == XContentType.JSON && !reformatJson) {
|
||||
return new String(data, offset, length, Charsets.UTF_8);
|
||||
return new String(data, offset, length, StandardCharsets.UTF_8);
|
||||
}
|
||||
XContentParser parser = null;
|
||||
try {
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
package org.elasticsearch.gateway;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import com.google.common.collect.Collections2;
|
||||
|
||||
import org.apache.lucene.codecs.CodecUtil;
|
||||
import org.apache.lucene.index.CorruptIndexException;
|
||||
|
|
|
@ -19,9 +19,7 @@
|
|||
|
||||
package org.elasticsearch.index;
|
||||
|
||||
import com.google.common.base.Function;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.common.collect.Iterators;
|
||||
import org.apache.lucene.util.Accountable;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
|
@ -177,12 +175,7 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone
|
|||
|
||||
@Override
|
||||
public Iterator<IndexShard> iterator() {
|
||||
return Iterators.transform(shards.values().iterator(), new Function<IndexShardInjectorPair, IndexShard>() {
|
||||
@Override
|
||||
public IndexShard apply(IndexShardInjectorPair input) {
|
||||
return input.getIndexShard();
|
||||
}
|
||||
});
|
||||
return shards.values().stream().map((p) -> p.getIndexShard()).iterator();
|
||||
}
|
||||
|
||||
public boolean hasShard(int shardId) {
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.index.analysis;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.NumericTokenStream;
|
||||
|
@ -235,7 +235,7 @@ public class Analysis {
|
|||
|
||||
final Path wordListFile = env.configFile().resolve(wordListPath);
|
||||
|
||||
try (BufferedReader reader = FileSystemUtils.newBufferedReader(wordListFile.toUri().toURL(), Charsets.UTF_8)) {
|
||||
try (BufferedReader reader = FileSystemUtils.newBufferedReader(wordListFile.toUri().toURL(), StandardCharsets.UTF_8)) {
|
||||
return loadWordList(reader, "#");
|
||||
} catch (IOException ioe) {
|
||||
String message = String.format(Locale.ROOT, "IOException while reading %s_path: %s", settingPrefix, ioe.getMessage());
|
||||
|
@ -283,7 +283,7 @@ public class Analysis {
|
|||
final Path path = env.configFile().resolve(filePath);
|
||||
|
||||
try {
|
||||
return FileSystemUtils.newBufferedReader(path.toUri().toURL(), Charsets.UTF_8);
|
||||
return FileSystemUtils.newBufferedReader(path.toUri().toURL(), StandardCharsets.UTF_8);
|
||||
} catch (IOException ioe) {
|
||||
String message = String.format(Locale.ROOT, "IOException while reading %s_path: %s", settingPrefix, ioe.getMessage());
|
||||
throw new IllegalArgumentException(message);
|
||||
|
|
|
@ -23,8 +23,10 @@ import org.apache.lucene.analysis.Analyzer;
|
|||
import org.apache.lucene.analysis.DelegatingAnalyzerWrapper;
|
||||
import org.elasticsearch.common.collect.CopyOnWriteHashMap;
|
||||
|
||||
import java.util.AbstractMap;
|
||||
import java.util.Collection;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -35,7 +37,7 @@ public final class FieldNameAnalyzer extends DelegatingAnalyzerWrapper {
|
|||
private final Analyzer defaultAnalyzer;
|
||||
|
||||
public FieldNameAnalyzer(Analyzer defaultAnalyzer) {
|
||||
this(new CopyOnWriteHashMap<String, Analyzer>(), defaultAnalyzer);
|
||||
this(new CopyOnWriteHashMap<>(), defaultAnalyzer);
|
||||
}
|
||||
|
||||
public FieldNameAnalyzer(Map<String, Analyzer> analyzers, Analyzer defaultAnalyzer) {
|
||||
|
@ -66,16 +68,14 @@ public final class FieldNameAnalyzer extends DelegatingAnalyzerWrapper {
|
|||
/**
|
||||
* Return a new instance that contains the union of this and of the provided analyzers.
|
||||
*/
|
||||
public FieldNameAnalyzer copyAndAddAll(Collection<? extends Map.Entry<String, Analyzer>> mappers) {
|
||||
CopyOnWriteHashMap<String, Analyzer> analyzers = this.analyzers;
|
||||
for (Map.Entry<String, Analyzer> entry : mappers) {
|
||||
Analyzer analyzer = entry.getValue();
|
||||
if (analyzer == null) {
|
||||
analyzer = defaultAnalyzer;
|
||||
public FieldNameAnalyzer copyAndAddAll(Stream<? extends Map.Entry<String, Analyzer>> mappers) {
|
||||
CopyOnWriteHashMap<String, Analyzer> result = analyzers.copyAndPutAll(mappers.map((e) -> {
|
||||
if (e.getValue() == null) {
|
||||
return new AbstractMap.SimpleImmutableEntry<>(e.getKey(), defaultAnalyzer);
|
||||
}
|
||||
analyzers = analyzers.copyAndPut(entry.getKey(), analyzer);
|
||||
}
|
||||
return new FieldNameAnalyzer(analyzers, defaultAnalyzer);
|
||||
return e;
|
||||
}));
|
||||
return new FieldNameAnalyzer(result, defaultAnalyzer);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -19,8 +19,6 @@
|
|||
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import com.google.common.base.Function;
|
||||
import com.google.common.collect.Collections2;
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.elasticsearch.common.collect.CopyOnWriteHashMap;
|
||||
import org.elasticsearch.common.regex.Regex;
|
||||
|
@ -65,28 +63,19 @@ public final class DocumentFieldMappers implements Iterable<FieldMapper> {
|
|||
for (FieldMapper fieldMapper : newMappers) {
|
||||
map = map.copyAndPut(fieldMapper.fieldType().names().fullName(), fieldMapper);
|
||||
}
|
||||
FieldNameAnalyzer indexAnalyzer = this.indexAnalyzer.copyAndAddAll(Collections2.transform(newMappers, new Function<FieldMapper, Map.Entry<String, Analyzer>>() {
|
||||
@Override
|
||||
public Map.Entry<String, Analyzer> apply(FieldMapper input) {
|
||||
return new AbstractMap.SimpleImmutableEntry<>(input.fieldType().names().indexName(), (Analyzer)input.fieldType().indexAnalyzer());
|
||||
}
|
||||
}));
|
||||
FieldNameAnalyzer searchAnalyzer = this.searchAnalyzer.copyAndAddAll(Collections2.transform(newMappers, new Function<FieldMapper, Map.Entry<String, Analyzer>>() {
|
||||
@Override
|
||||
public Map.Entry<String, Analyzer> apply(FieldMapper input) {
|
||||
return new AbstractMap.SimpleImmutableEntry<>(input.fieldType().names().indexName(), (Analyzer)input.fieldType().searchAnalyzer());
|
||||
}
|
||||
}));
|
||||
FieldNameAnalyzer searchQuoteAnalyzer = this.searchQuoteAnalyzer.copyAndAddAll(Collections2.transform(newMappers, new Function<FieldMapper, Map.Entry<String, Analyzer>>() {
|
||||
@Override
|
||||
public Map.Entry<String, Analyzer> apply(FieldMapper input) {
|
||||
return new AbstractMap.SimpleImmutableEntry<>(input.fieldType().names().indexName(), (Analyzer)input.fieldType().searchQuoteAnalyzer());
|
||||
}
|
||||
}));
|
||||
return new DocumentFieldMappers(map, indexAnalyzer, searchAnalyzer, searchQuoteAnalyzer);
|
||||
FieldNameAnalyzer indexAnalyzer = this.indexAnalyzer.copyAndAddAll(newMappers.stream().map((input) ->
|
||||
new AbstractMap.SimpleImmutableEntry<>(input.fieldType().names().indexName(), (Analyzer)input.fieldType().indexAnalyzer())
|
||||
));
|
||||
FieldNameAnalyzer searchAnalyzer = this.searchAnalyzer.copyAndAddAll(newMappers.stream().map((input) ->
|
||||
new AbstractMap.SimpleImmutableEntry<>(input.fieldType().names().indexName(), (Analyzer)input.fieldType().searchAnalyzer())
|
||||
));
|
||||
FieldNameAnalyzer searchQuoteAnalyzer = this.searchQuoteAnalyzer.copyAndAddAll(newMappers.stream().map((input) ->
|
||||
new AbstractMap.SimpleImmutableEntry<>(input.fieldType().names().indexName(), (Analyzer) input.fieldType().searchQuoteAnalyzer())
|
||||
));
|
||||
return new DocumentFieldMappers(map,indexAnalyzer,searchAnalyzer,searchQuoteAnalyzer);
|
||||
}
|
||||
|
||||
/** Returns the mapper for the given field */
|
||||
/** Returns the mapper for the given field */
|
||||
public FieldMapper getMapper(String field) {
|
||||
return fieldMappers.get(field);
|
||||
}
|
||||
|
|
|
@ -21,8 +21,6 @@ package org.elasticsearch.index.mapper;
|
|||
|
||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||
import com.google.common.base.Function;
|
||||
import com.google.common.collect.Iterators;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
|
@ -47,6 +45,7 @@ import java.util.Comparator;
|
|||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.stream.StreamSupport;
|
||||
|
||||
public abstract class FieldMapper extends Mapper {
|
||||
|
||||
|
@ -658,12 +657,7 @@ public abstract class FieldMapper extends Mapper {
|
|||
}
|
||||
|
||||
public Iterator<Mapper> iterator() {
|
||||
return Iterators.transform(mappers.values().iterator(), new Function<ObjectCursor<FieldMapper>, Mapper>() {
|
||||
@Override
|
||||
public Mapper apply(@Nullable ObjectCursor<FieldMapper> cursor) {
|
||||
return cursor.value;
|
||||
}
|
||||
});
|
||||
return StreamSupport.stream(mappers.values().spliterator(), false).map((p) -> (Mapper)p.value).iterator();
|
||||
}
|
||||
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
|
|
|
@ -19,8 +19,6 @@
|
|||
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import com.google.common.base.Function;
|
||||
import com.google.common.collect.Iterators;
|
||||
import org.elasticsearch.common.collect.CopyOnWriteHashMap;
|
||||
import org.elasticsearch.common.regex.Regex;
|
||||
|
||||
|
@ -35,12 +33,6 @@ import java.util.Set;
|
|||
* An immutable container for looking up {@link MappedFieldType}s by their name.
|
||||
*/
|
||||
class FieldTypeLookup implements Iterable<MappedFieldType> {
|
||||
private static final Function<MappedFieldTypeReference, MappedFieldType> UNWRAPPER = new Function<MappedFieldTypeReference, MappedFieldType>() {
|
||||
@Override
|
||||
public MappedFieldType apply(MappedFieldTypeReference ref) {
|
||||
return ref.get();
|
||||
}
|
||||
};
|
||||
|
||||
/** Full field name to field type */
|
||||
private final CopyOnWriteHashMap<String, MappedFieldTypeReference> fullNameToFieldType;
|
||||
|
@ -179,6 +171,6 @@ class FieldTypeLookup implements Iterable<MappedFieldType> {
|
|||
}
|
||||
|
||||
public Iterator<MappedFieldType> iterator() {
|
||||
return Iterators.transform(fullNameToFieldType.values().iterator(), UNWRAPPER);
|
||||
return fullNameToFieldType.values().stream().map((p) -> p.get()).iterator();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectHashSet;
|
||||
import com.google.common.base.Function;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
import com.google.common.collect.Iterators;
|
||||
|
@ -70,6 +69,7 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
import java.util.concurrent.CopyOnWriteArrayList;
|
||||
import java.util.concurrent.locks.ReentrantReadWriteLock;
|
||||
import java.util.function.Function;
|
||||
|
||||
import static org.elasticsearch.common.collect.MapBuilder.newMapBuilder;
|
||||
|
||||
|
@ -84,22 +84,6 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
"_size", "_timestamp", "_ttl"
|
||||
);
|
||||
|
||||
private static final Function<MappedFieldType, Analyzer> INDEX_ANALYZER_EXTRACTOR = new Function<MappedFieldType, Analyzer>() {
|
||||
public Analyzer apply(MappedFieldType fieldType) {
|
||||
return fieldType.indexAnalyzer();
|
||||
}
|
||||
};
|
||||
private static final Function<MappedFieldType, Analyzer> SEARCH_ANALYZER_EXTRACTOR = new Function<MappedFieldType, Analyzer>() {
|
||||
public Analyzer apply(MappedFieldType fieldType) {
|
||||
return fieldType.searchAnalyzer();
|
||||
}
|
||||
};
|
||||
private static final Function<MappedFieldType, Analyzer> SEARCH_QUOTE_ANALYZER_EXTRACTOR = new Function<MappedFieldType, Analyzer>() {
|
||||
public Analyzer apply(MappedFieldType fieldType) {
|
||||
return fieldType.searchQuoteAnalyzer();
|
||||
}
|
||||
};
|
||||
|
||||
private final AnalysisService analysisService;
|
||||
|
||||
/**
|
||||
|
@ -142,9 +126,9 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
this.analysisService = analysisService;
|
||||
this.fieldTypes = new FieldTypeLookup();
|
||||
this.documentParser = new DocumentMapperParser(indexSettings, this, analysisService, similarityLookupService, scriptService);
|
||||
this.indexAnalyzer = new MapperAnalyzerWrapper(analysisService.defaultIndexAnalyzer(), INDEX_ANALYZER_EXTRACTOR);
|
||||
this.searchAnalyzer = new MapperAnalyzerWrapper(analysisService.defaultSearchAnalyzer(), SEARCH_ANALYZER_EXTRACTOR);
|
||||
this.searchQuoteAnalyzer = new MapperAnalyzerWrapper(analysisService.defaultSearchQuoteAnalyzer(), SEARCH_QUOTE_ANALYZER_EXTRACTOR);
|
||||
this.indexAnalyzer = new MapperAnalyzerWrapper(analysisService.defaultIndexAnalyzer(), p -> p.indexAnalyzer());
|
||||
this.searchAnalyzer = new MapperAnalyzerWrapper(analysisService.defaultSearchAnalyzer(), p -> p.searchAnalyzer());
|
||||
this.searchQuoteAnalyzer = new MapperAnalyzerWrapper(analysisService.defaultSearchQuoteAnalyzer(), p -> p.searchQuoteAnalyzer());
|
||||
|
||||
this.dynamic = indexSettings.getAsBoolean("index.mapper.dynamic", true);
|
||||
defaultPercolatorMappingSource = "{\n" +
|
||||
|
@ -194,17 +178,14 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
* As is this not really an active type, you would typically set this to false
|
||||
*/
|
||||
public Iterable<DocumentMapper> docMappers(final boolean includingDefaultMapping) {
|
||||
return new Iterable<DocumentMapper>() {
|
||||
@Override
|
||||
public Iterator<DocumentMapper> iterator() {
|
||||
final Iterator<DocumentMapper> iterator;
|
||||
if (includingDefaultMapping) {
|
||||
iterator = mappers.values().iterator();
|
||||
} else {
|
||||
iterator = mappers.values().stream().filter(mapper -> !DEFAULT_MAPPING.equals(mapper.type())).iterator();
|
||||
}
|
||||
return Iterators.unmodifiableIterator(iterator);
|
||||
return () -> {
|
||||
final Iterator<DocumentMapper> iterator;
|
||||
if (includingDefaultMapping) {
|
||||
iterator = mappers.values().iterator();
|
||||
} else {
|
||||
iterator = mappers.values().stream().filter(mapper -> !DEFAULT_MAPPING.equals(mapper.type())).iterator();
|
||||
}
|
||||
return Iterators.unmodifiableIterator(iterator);
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
|
@ -49,7 +49,7 @@ public class WrapperQueryBuilder extends QueryBuilder {
|
|||
* Creates a query builder given a query provided as a string
|
||||
*/
|
||||
public WrapperQueryBuilder(String source) {
|
||||
this.source = source.getBytes(Charsets.UTF_8);
|
||||
this.source = source.getBytes(StandardCharsets.UTF_8);
|
||||
this.offset = 0;
|
||||
this.length = this.source.length;
|
||||
}
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.index.shard;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import org.apache.lucene.codecs.PostingsFormat;
|
||||
import org.apache.lucene.index.CheckIndex;
|
||||
import org.apache.lucene.search.QueryCachingPolicy;
|
||||
|
@ -1202,7 +1202,7 @@ public class IndexShard extends AbstractIndexShardComponent {
|
|||
return;
|
||||
}
|
||||
BytesStreamOutput os = new BytesStreamOutput();
|
||||
PrintStream out = new PrintStream(os, false, Charsets.UTF_8.name());
|
||||
PrintStream out = new PrintStream(os, false, StandardCharsets.UTF_8.name());
|
||||
|
||||
if ("checksum".equalsIgnoreCase(checkIndexOnStartup)) {
|
||||
// physical verification only: verify all checksums for the latest commit
|
||||
|
@ -1220,7 +1220,7 @@ public class IndexShard extends AbstractIndexShardComponent {
|
|||
}
|
||||
out.flush();
|
||||
if (corrupt != null) {
|
||||
logger.warn("check index [failure]\n{}", new String(os.bytes().toBytes(), Charsets.UTF_8));
|
||||
logger.warn("check index [failure]\n{}", new String(os.bytes().toBytes(), StandardCharsets.UTF_8));
|
||||
throw corrupt;
|
||||
}
|
||||
} else {
|
||||
|
@ -1235,7 +1235,7 @@ public class IndexShard extends AbstractIndexShardComponent {
|
|||
// ignore if closed....
|
||||
return;
|
||||
}
|
||||
logger.warn("check index [failure]\n{}", new String(os.bytes().toBytes(), Charsets.UTF_8));
|
||||
logger.warn("check index [failure]\n{}", new String(os.bytes().toBytes(), StandardCharsets.UTF_8));
|
||||
if ("fix".equalsIgnoreCase(checkIndexOnStartup)) {
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("fixing index, writing new segments file ...");
|
||||
|
@ -1253,7 +1253,7 @@ public class IndexShard extends AbstractIndexShardComponent {
|
|||
}
|
||||
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("check index [success]\n{}", new String(os.bytes().toBytes(), Charsets.UTF_8));
|
||||
logger.debug("check index [success]\n{}", new String(os.bytes().toBytes(), StandardCharsets.UTF_8));
|
||||
}
|
||||
|
||||
recoveryState.getVerifyIndex().checkIndexTime(Math.max(0, TimeValue.nsecToMSec(System.nanoTime() - timeNS)));
|
||||
|
|
|
@ -19,11 +19,8 @@
|
|||
|
||||
package org.elasticsearch.indices;
|
||||
|
||||
import com.google.common.base.Function;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
import com.google.common.collect.Iterables;
|
||||
import com.google.common.collect.Iterators;
|
||||
import org.apache.lucene.store.LockObtainFailedException;
|
||||
import org.apache.lucene.util.CollectionUtil;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
|
@ -97,6 +94,8 @@ import java.util.concurrent.CountDownLatch;
|
|||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS;
|
||||
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS;
|
||||
|
@ -277,12 +276,7 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
|
|||
|
||||
@Override
|
||||
public Iterator<IndexService> iterator() {
|
||||
return Iterators.transform(indices.values().iterator(), new Function<IndexServiceInjectorPair, IndexService>() {
|
||||
@Override
|
||||
public IndexService apply(IndexServiceInjectorPair input) {
|
||||
return input.getIndexService();
|
||||
}
|
||||
});
|
||||
return indices.values().stream().map((p) -> p.getIndexService()).iterator();
|
||||
}
|
||||
|
||||
public boolean hasIndex(String index) {
|
||||
|
@ -404,12 +398,8 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
|
|||
if (delete) {
|
||||
indicesLifecycle.beforeIndexDeleted(indexService);
|
||||
}
|
||||
IOUtils.close(Iterables.transform(pluginsService.indexServices(), new Function<Class<? extends Closeable>, Closeable>() {
|
||||
@Override
|
||||
public Closeable apply(Class<? extends Closeable> input) {
|
||||
return indexInjector.getInstance(input);
|
||||
}
|
||||
}));
|
||||
Stream<Closeable> closeables = pluginsService.indexServices().stream().map(p -> indexInjector.getInstance(p));
|
||||
IOUtils.close(closeables::iterator);
|
||||
|
||||
logger.debug("[{}] closing index service (reason [{}])", index, reason);
|
||||
indexService.close(reason, delete);
|
||||
|
|
|
@ -18,17 +18,16 @@
|
|||
*/
|
||||
package org.elasticsearch.indices.analysis;
|
||||
|
||||
import com.google.common.cache.CacheBuilder;
|
||||
import com.google.common.cache.CacheLoader;
|
||||
import com.google.common.cache.LoadingCache;
|
||||
import com.google.common.util.concurrent.UncheckedExecutionException;
|
||||
|
||||
import org.apache.lucene.analysis.hunspell.Dictionary;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.collect.CopyOnWriteHashMap;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.FileSystemUtils;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.concurrent.KeyedLock;
|
||||
import org.elasticsearch.env.Environment;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -72,32 +71,21 @@ public class HunspellService extends AbstractComponent {
|
|||
public final static String HUNSPELL_LAZY_LOAD = "indices.analysis.hunspell.dictionary.lazy";
|
||||
public final static String HUNSPELL_IGNORE_CASE = "indices.analysis.hunspell.dictionary.ignore_case";
|
||||
private final static String OLD_HUNSPELL_LOCATION = "indices.analysis.hunspell.dictionary.location";
|
||||
private final LoadingCache<String, Dictionary> dictionaries;
|
||||
private final Environment env;
|
||||
private volatile CopyOnWriteHashMap<String, Dictionary> dictionaries = new CopyOnWriteHashMap<>();
|
||||
private final Map<String, Dictionary> knownDictionaries;
|
||||
private KeyedLock<String> keyedLock = new KeyedLock<>();
|
||||
|
||||
private final boolean defaultIgnoreCase;
|
||||
private final Path hunspellDir;
|
||||
|
||||
public HunspellService(final Settings settings, final Environment env) throws IOException {
|
||||
this(settings, env, Collections.<String, Dictionary>emptyMap());
|
||||
}
|
||||
|
||||
@Inject
|
||||
public HunspellService(final Settings settings, final Environment env, final Map<String, Dictionary> knownDictionaries) throws IOException {
|
||||
super(settings);
|
||||
this.knownDictionaries = knownDictionaries;
|
||||
this.hunspellDir = resolveHunspellDirectory(settings, env);
|
||||
this.defaultIgnoreCase = settings.getAsBoolean(HUNSPELL_IGNORE_CASE, false);
|
||||
dictionaries = CacheBuilder.newBuilder().build(new CacheLoader<String, Dictionary>() {
|
||||
@Override
|
||||
public Dictionary load(String locale) throws Exception {
|
||||
Dictionary dictionary = knownDictionaries.get(locale);
|
||||
if (dictionary == null) {
|
||||
dictionary = loadDictionary(locale, settings, env);
|
||||
}
|
||||
return dictionary;
|
||||
}
|
||||
});
|
||||
this.env = env;
|
||||
if (!settings.getAsBoolean(HUNSPELL_LAZY_LOAD, false)) {
|
||||
scanAndLoadDictionaries();
|
||||
}
|
||||
|
@ -109,7 +97,24 @@ public class HunspellService extends AbstractComponent {
|
|||
* @param locale The name of the locale
|
||||
*/
|
||||
public Dictionary getDictionary(String locale) {
|
||||
return dictionaries.getUnchecked(locale);
|
||||
Dictionary dictionary = dictionaries.get(locale);
|
||||
if (dictionary == null) {
|
||||
dictionary = knownDictionaries.get(locale);
|
||||
if (dictionary == null) {
|
||||
keyedLock.acquire(locale);
|
||||
dictionary = dictionaries.get(locale);
|
||||
if (dictionary == null) {
|
||||
try {
|
||||
dictionary = loadDictionary(locale, settings, env);
|
||||
} catch (Exception e) {
|
||||
throw new IllegalStateException("failed to load hunspell dictionary for local: " + locale, e);
|
||||
}
|
||||
dictionaries = dictionaries.copyAndPut(locale, dictionary);
|
||||
}
|
||||
keyedLock.release(locale);
|
||||
}
|
||||
}
|
||||
return dictionary;
|
||||
}
|
||||
|
||||
private Path resolveHunspellDirectory(Settings settings, Environment env) {
|
||||
|
@ -131,7 +136,7 @@ public class HunspellService extends AbstractComponent {
|
|||
try (DirectoryStream<Path> inner = Files.newDirectoryStream(hunspellDir.resolve(file), "*.dic")) {
|
||||
if (inner.iterator().hasNext()) { // just making sure it's indeed a dictionary dir
|
||||
try {
|
||||
dictionaries.getUnchecked(file.getFileName().toString());
|
||||
getDictionary(file.getFileName().toString());
|
||||
} catch (UncheckedExecutionException e) {
|
||||
// The cache loader throws unchecked exception (see #loadDictionary()),
|
||||
// here we simply report the exception and continue loading the dictionaries
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.node.internal;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import org.elasticsearch.cluster.ClusterName;
|
||||
import org.elasticsearch.common.Booleans;
|
||||
import org.elasticsearch.common.Strings;
|
||||
|
@ -221,7 +221,7 @@ public class InternalSettingsPreparer {
|
|||
|
||||
try {
|
||||
List<String> names = new ArrayList<>();
|
||||
try (BufferedReader reader = new BufferedReader(new InputStreamReader(input, Charsets.UTF_8))) {
|
||||
try (BufferedReader reader = new BufferedReader(new InputStreamReader(input, StandardCharsets.UTF_8))) {
|
||||
String name = reader.readLine();
|
||||
while (name != null) {
|
||||
names.add(name);
|
||||
|
|
|
@ -99,8 +99,9 @@ import java.io.IOException;
|
|||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.StreamSupport;
|
||||
|
||||
import static org.elasticsearch.common.util.CollectionUtils.eagerTransform;
|
||||
import static org.elasticsearch.index.mapper.SourceToParse.source;
|
||||
import static org.elasticsearch.percolator.QueryCollector.count;
|
||||
import static org.elasticsearch.percolator.QueryCollector.match;
|
||||
|
@ -866,7 +867,9 @@ public class PercolatorService extends AbstractComponent {
|
|||
if (aggregations != null) {
|
||||
List<SiblingPipelineAggregator> pipelineAggregators = shardResults.get(0).pipelineAggregators();
|
||||
if (pipelineAggregators != null) {
|
||||
List<InternalAggregation> newAggs = new ArrayList<>(eagerTransform(aggregations.asList(), PipelineAggregator.AGGREGATION_TRANFORM_FUNCTION));
|
||||
List<InternalAggregation> newAggs = StreamSupport.stream(aggregations.spliterator(), false).map((p) -> {
|
||||
return (InternalAggregation) p;
|
||||
}).collect(Collectors.toList());
|
||||
for (SiblingPipelineAggregator pipelineAggregator : pipelineAggregators) {
|
||||
InternalAggregation newAgg = pipelineAggregator.doReduce(new InternalAggregations(newAggs), new ReduceContext(
|
||||
bigArrays, scriptService, headersContext));
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.rest.support;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.path.PathTrie;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -113,7 +113,7 @@ public class RestUtils {
|
|||
* escape sequence.
|
||||
*/
|
||||
public static String decodeComponent(final String s) {
|
||||
return decodeComponent(s, Charsets.UTF_8);
|
||||
return decodeComponent(s, StandardCharsets.UTF_8);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -134,7 +134,7 @@ public class RestUtils {
|
|||
*
|
||||
* @param s The string to decode (can be empty).
|
||||
* @param charset The charset to use to decode the string (should really
|
||||
* be {@link Charsets#UTF_8}.
|
||||
* be {@link StandardCharsets#UTF_8}.
|
||||
* @return The decoded string, or {@code s} if there's nothing to decode.
|
||||
* If the string to decode is {@code null}, returns an empty string.
|
||||
* @throws IllegalArgumentException if the string contains a malformed
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.script;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import com.google.common.cache.Cache;
|
||||
import com.google.common.cache.CacheBuilder;
|
||||
import com.google.common.cache.RemovalListener;
|
||||
|
@ -543,7 +543,7 @@ public class ScriptService extends AbstractComponent implements Closeable {
|
|||
// with file scripts are disabled, it makes no sense to even compile it and cache it.
|
||||
if (isAnyScriptContextEnabled(engineService.types()[0], engineService, ScriptType.FILE)) {
|
||||
logger.info("compiling script file [{}]", file.toAbsolutePath());
|
||||
try(InputStreamReader reader = new InputStreamReader(Files.newInputStream(file), Charsets.UTF_8)) {
|
||||
try(InputStreamReader reader = new InputStreamReader(Files.newInputStream(file), StandardCharsets.UTF_8)) {
|
||||
String script = Streams.copyToString(reader);
|
||||
String cacheKey = getCacheKey(engineService, scriptNameExt.v1(), null);
|
||||
staticCache.put(cacheKey, new CompiledScript(ScriptType.FILE, scriptNameExt.v1(), engineService.types()[0], engineService.compile(script)));
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.script.groovy;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import com.google.common.hash.Hashing;
|
||||
import groovy.lang.Binding;
|
||||
import groovy.lang.GroovyClassLoader;
|
||||
|
@ -111,7 +111,7 @@ public class GroovyScriptEngineService extends AbstractComponent implements Scri
|
|||
@Override
|
||||
public Object compile(String script) {
|
||||
try {
|
||||
return loader.parseClass(script, Hashing.sha1().hashString(script, Charsets.UTF_8).toString());
|
||||
return loader.parseClass(script, Hashing.sha1().hashString(script, StandardCharsets.UTF_8).toString());
|
||||
} catch (Throwable e) {
|
||||
if (logger.isTraceEnabled()) {
|
||||
logger.trace("exception compiling Groovy script:", e);
|
||||
|
|
|
@ -18,9 +18,7 @@
|
|||
*/
|
||||
package org.elasticsearch.search.aggregations;
|
||||
|
||||
import com.google.common.base.Function;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.common.collect.Iterators;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
|
@ -38,21 +36,13 @@ import java.util.HashMap;
|
|||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.common.util.CollectionUtils.eagerTransform;
|
||||
|
||||
import java.util.stream.Collectors;
|
||||
/**
|
||||
* An internal implementation of {@link Aggregations}.
|
||||
*/
|
||||
public class InternalAggregations implements Aggregations, ToXContent, Streamable {
|
||||
|
||||
public final static InternalAggregations EMPTY = new InternalAggregations();
|
||||
private static final Function<InternalAggregation, Aggregation> SUPERTYPE_CAST = new Function<InternalAggregation, Aggregation>() {
|
||||
@Override
|
||||
public Aggregation apply(InternalAggregation input) {
|
||||
return input;
|
||||
}
|
||||
};
|
||||
|
||||
private List<InternalAggregation> aggregations = Collections.emptyList();
|
||||
|
||||
|
@ -73,7 +63,7 @@ public class InternalAggregations implements Aggregations, ToXContent, Streamabl
|
|||
*/
|
||||
@Override
|
||||
public Iterator<Aggregation> iterator() {
|
||||
return Iterators.transform(aggregations.iterator(), SUPERTYPE_CAST);
|
||||
return aggregations.stream().map((p) -> (Aggregation) p).iterator();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -81,7 +71,7 @@ public class InternalAggregations implements Aggregations, ToXContent, Streamabl
|
|||
*/
|
||||
@Override
|
||||
public List<Aggregation> asList() {
|
||||
return eagerTransform(aggregations, SUPERTYPE_CAST);
|
||||
return aggregations.stream().map((p) -> (Aggregation) p).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -19,14 +19,12 @@
|
|||
|
||||
package org.elasticsearch.search.aggregations.pipeline;
|
||||
|
||||
import com.google.common.base.Function;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Streamable;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.search.aggregations.Aggregation;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
||||
|
@ -73,13 +71,6 @@ public abstract class PipelineAggregator implements Streamable {
|
|||
|
||||
}
|
||||
|
||||
public static final Function<Aggregation, InternalAggregation> AGGREGATION_TRANFORM_FUNCTION = new Function<Aggregation, InternalAggregation>() {
|
||||
@Override
|
||||
public InternalAggregation apply(Aggregation input) {
|
||||
return (InternalAggregation) input;
|
||||
}
|
||||
};
|
||||
|
||||
private String name;
|
||||
private String[] bucketsPaths;
|
||||
private Map<String, Object> metaData;
|
||||
|
|
|
@ -30,8 +30,8 @@ import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation.Buck
|
|||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.common.util.CollectionUtils.eagerTransform;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.StreamSupport;
|
||||
|
||||
public abstract class SiblingPipelineAggregator extends PipelineAggregator {
|
||||
|
||||
|
@ -54,8 +54,9 @@ public abstract class SiblingPipelineAggregator extends PipelineAggregator {
|
|||
for (int i = 0; i < buckets.size(); i++) {
|
||||
InternalMultiBucketAggregation.InternalBucket bucket = (InternalMultiBucketAggregation.InternalBucket) buckets.get(i);
|
||||
InternalAggregation aggToAdd = doReduce(bucket.getAggregations(), reduceContext);
|
||||
List<InternalAggregation> aggs = new ArrayList<>(eagerTransform(bucket.getAggregations().asList(),
|
||||
AGGREGATION_TRANFORM_FUNCTION));
|
||||
List<InternalAggregation> aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false).map((p) -> {
|
||||
return (InternalAggregation) p;
|
||||
}).collect(Collectors.toList());
|
||||
aggs.add(aggToAdd);
|
||||
InternalMultiBucketAggregation.InternalBucket newBucket = multiBucketsAgg.createBucket(new InternalAggregations(aggs),
|
||||
bucket);
|
||||
|
@ -66,8 +67,9 @@ public abstract class SiblingPipelineAggregator extends PipelineAggregator {
|
|||
} else if (aggregation instanceof InternalSingleBucketAggregation) {
|
||||
InternalSingleBucketAggregation singleBucketAgg = (InternalSingleBucketAggregation) aggregation;
|
||||
InternalAggregation aggToAdd = doReduce(singleBucketAgg.getAggregations(), reduceContext);
|
||||
List<InternalAggregation> aggs = new ArrayList<>(eagerTransform(singleBucketAgg.getAggregations().asList(),
|
||||
AGGREGATION_TRANFORM_FUNCTION));
|
||||
List<InternalAggregation> aggs = StreamSupport.stream(singleBucketAgg.getAggregations().spliterator(), false).map((p) -> {
|
||||
return (InternalAggregation) p;
|
||||
}).collect(Collectors.toList());
|
||||
aggs.add(aggToAdd);
|
||||
return singleBucketAgg.create(new InternalAggregations(aggs));
|
||||
} else {
|
||||
|
|
|
@ -19,14 +19,12 @@
|
|||
|
||||
package org.elasticsearch.search.aggregations.pipeline.bucketscript;
|
||||
|
||||
import com.google.common.base.Function;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.script.CompiledScript;
|
||||
import org.elasticsearch.script.ExecutableScript;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptContext;
|
||||
import org.elasticsearch.search.aggregations.Aggregation;
|
||||
import org.elasticsearch.search.aggregations.AggregationExecutionException;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext;
|
||||
|
@ -43,38 +41,26 @@ import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
|||
import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.StreamSupport;
|
||||
|
||||
import static org.elasticsearch.common.util.CollectionUtils.eagerTransform;
|
||||
import static org.elasticsearch.search.aggregations.pipeline.BucketHelpers.resolveBucketValue;
|
||||
|
||||
public class BucketScriptPipelineAggregator extends PipelineAggregator {
|
||||
|
||||
public final static Type TYPE = new Type("bucket_script");
|
||||
|
||||
public final static PipelineAggregatorStreams.Stream STREAM = new PipelineAggregatorStreams.Stream() {
|
||||
@Override
|
||||
public BucketScriptPipelineAggregator readResult(StreamInput in) throws IOException {
|
||||
BucketScriptPipelineAggregator result = new BucketScriptPipelineAggregator();
|
||||
result.readFrom(in);
|
||||
return result;
|
||||
}
|
||||
public final static PipelineAggregatorStreams.Stream STREAM = in -> {
|
||||
BucketScriptPipelineAggregator result = new BucketScriptPipelineAggregator();
|
||||
result.readFrom(in);
|
||||
return result;
|
||||
};
|
||||
|
||||
public static void registerStreams() {
|
||||
PipelineAggregatorStreams.registerStream(STREAM, TYPE.stream());
|
||||
}
|
||||
|
||||
private static final Function<Aggregation, InternalAggregation> FUNCTION = new Function<Aggregation, InternalAggregation>() {
|
||||
@Override
|
||||
public InternalAggregation apply(Aggregation input) {
|
||||
return (InternalAggregation) input;
|
||||
}
|
||||
};
|
||||
|
||||
private ValueFormatter formatter;
|
||||
private GapPolicy gapPolicy;
|
||||
|
||||
|
@ -134,9 +120,11 @@ public class BucketScriptPipelineAggregator extends PipelineAggregator {
|
|||
throw new AggregationExecutionException("series_arithmetic script for reducer [" + name()
|
||||
+ "] must return a Number");
|
||||
}
|
||||
List<InternalAggregation> aggs = new ArrayList<>(eagerTransform(bucket.getAggregations().asList(), FUNCTION));
|
||||
final List<InternalAggregation> aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false).map((p) -> {
|
||||
return (InternalAggregation) p;
|
||||
}).collect(Collectors.toList());
|
||||
aggs.add(new InternalSimpleValue(name(), ((Number) returned).doubleValue(), formatter,
|
||||
new ArrayList<PipelineAggregator>(), metaData()));
|
||||
new ArrayList<>(), metaData()));
|
||||
InternalMultiBucketAggregation.InternalBucket newBucket = originalAgg.createBucket(new InternalAggregations(aggs),
|
||||
(InternalMultiBucketAggregation.InternalBucket) bucket);
|
||||
newBuckets.add(newBucket);
|
||||
|
|
|
@ -40,21 +40,19 @@ import java.io.IOException;
|
|||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.StreamSupport;
|
||||
|
||||
import static org.elasticsearch.common.util.CollectionUtils.eagerTransform;
|
||||
import static org.elasticsearch.search.aggregations.pipeline.BucketHelpers.resolveBucketValue;
|
||||
|
||||
public class CumulativeSumPipelineAggregator extends PipelineAggregator {
|
||||
|
||||
public final static Type TYPE = new Type("cumulative_sum");
|
||||
|
||||
public final static PipelineAggregatorStreams.Stream STREAM = new PipelineAggregatorStreams.Stream() {
|
||||
@Override
|
||||
public CumulativeSumPipelineAggregator readResult(StreamInput in) throws IOException {
|
||||
CumulativeSumPipelineAggregator result = new CumulativeSumPipelineAggregator();
|
||||
result.readFrom(in);
|
||||
return result;
|
||||
}
|
||||
public final static PipelineAggregatorStreams.Stream STREAM = in -> {
|
||||
CumulativeSumPipelineAggregator result = new CumulativeSumPipelineAggregator();
|
||||
result.readFrom(in);
|
||||
return result;
|
||||
};
|
||||
|
||||
public static void registerStreams() {
|
||||
|
@ -88,8 +86,9 @@ public class CumulativeSumPipelineAggregator extends PipelineAggregator {
|
|||
for (InternalHistogram.Bucket bucket : buckets) {
|
||||
Double thisBucketValue = resolveBucketValue(histo, bucket, bucketsPaths()[0], GapPolicy.INSERT_ZEROS);
|
||||
sum += thisBucketValue;
|
||||
List<InternalAggregation> aggs = new ArrayList<>(eagerTransform(bucket.getAggregations().asList(),
|
||||
AGGREGATION_TRANFORM_FUNCTION));
|
||||
List<InternalAggregation> aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false).map((p) -> {
|
||||
return (InternalAggregation) p;
|
||||
}).collect(Collectors.toList());
|
||||
aggs.add(new InternalSimpleValue(name(), sum, formatter, new ArrayList<PipelineAggregator>(), metaData()));
|
||||
InternalHistogram.Bucket newBucket = factory.createBucket(bucket.getKey(), bucket.getDocCount(),
|
||||
new InternalAggregations(aggs), bucket.getKeyed(), bucket.getFormatter());
|
||||
|
|
|
@ -41,8 +41,9 @@ import java.io.IOException;
|
|||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.StreamSupport;
|
||||
|
||||
import static org.elasticsearch.common.util.CollectionUtils.eagerTransform;
|
||||
import static org.elasticsearch.search.aggregations.pipeline.BucketHelpers.resolveBucketValue;
|
||||
|
||||
public class DerivativePipelineAggregator extends PipelineAggregator {
|
||||
|
@ -100,8 +101,9 @@ public class DerivativePipelineAggregator extends PipelineAggregator {
|
|||
if (xAxisUnits != null) {
|
||||
xDiff = (thisBucketKey - lastBucketKey) / xAxisUnits;
|
||||
}
|
||||
List<InternalAggregation> aggs = new ArrayList<>(eagerTransform(bucket.getAggregations().asList(),
|
||||
AGGREGATION_TRANFORM_FUNCTION));
|
||||
final List<InternalAggregation> aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false).map((p) -> {
|
||||
return (InternalAggregation) p;
|
||||
}).collect(Collectors.toList());
|
||||
aggs.add(new InternalDerivative(name(), gradient, xDiff, formatter, new ArrayList<PipelineAggregator>(), metaData()));
|
||||
InternalHistogram.Bucket newBucket = factory.createBucket(bucket.getKey(), bucket.getDocCount(), new InternalAggregations(
|
||||
aggs), bucket.getKeyed(), bucket.getFormatter());
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.search.aggregations.pipeline.having;
|
||||
|
||||
import com.google.common.base.Function;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
|
@ -28,7 +27,6 @@ import org.elasticsearch.script.ExecutableScript;
|
|||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptContext;
|
||||
import org.elasticsearch.script.expression.ExpressionScriptEngineService;
|
||||
import org.elasticsearch.search.aggregations.Aggregation;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
||||
|
@ -64,13 +62,6 @@ public class BucketSelectorPipelineAggregator extends PipelineAggregator {
|
|||
PipelineAggregatorStreams.registerStream(STREAM, TYPE.stream());
|
||||
}
|
||||
|
||||
private static final Function<Aggregation, InternalAggregation> FUNCTION = new Function<Aggregation, InternalAggregation>() {
|
||||
@Override
|
||||
public InternalAggregation apply(Aggregation input) {
|
||||
return (InternalAggregation) input;
|
||||
}
|
||||
};
|
||||
|
||||
private GapPolicy gapPolicy;
|
||||
|
||||
private Script script;
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.search.aggregations.pipeline.movavg;
|
||||
|
||||
import com.google.common.base.Function;
|
||||
import com.google.common.collect.EvictingQueue;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
|
@ -48,8 +47,9 @@ import java.util.ArrayList;
|
|||
import java.util.List;
|
||||
import java.util.ListIterator;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.StreamSupport;
|
||||
|
||||
import static org.elasticsearch.common.util.CollectionUtils.eagerTransform;
|
||||
import static org.elasticsearch.search.aggregations.pipeline.BucketHelpers.resolveBucketValue;
|
||||
|
||||
public class MovAvgPipelineAggregator extends PipelineAggregator {
|
||||
|
@ -69,13 +69,6 @@ public class MovAvgPipelineAggregator extends PipelineAggregator {
|
|||
PipelineAggregatorStreams.registerStream(STREAM, TYPE.stream());
|
||||
}
|
||||
|
||||
private static final Function<Aggregation, InternalAggregation> FUNCTION = new Function<Aggregation, InternalAggregation>() {
|
||||
@Override
|
||||
public InternalAggregation apply(Aggregation input) {
|
||||
return (InternalAggregation) input;
|
||||
}
|
||||
};
|
||||
|
||||
private ValueFormatter formatter;
|
||||
private GapPolicy gapPolicy;
|
||||
private int window;
|
||||
|
@ -134,7 +127,9 @@ public class MovAvgPipelineAggregator extends PipelineAggregator {
|
|||
if (model.hasValue(values.size())) {
|
||||
double movavg = model.next(values);
|
||||
|
||||
List<InternalAggregation> aggs = new ArrayList<>(eagerTransform(bucket.getAggregations().asList(), AGGREGATION_TRANFORM_FUNCTION));
|
||||
List<InternalAggregation> aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false).map((p) -> {
|
||||
return (InternalAggregation) p;
|
||||
}).collect(Collectors.toList());
|
||||
aggs.add(new InternalSimpleValue(name(), movavg, formatter, new ArrayList<PipelineAggregator>(), metaData()));
|
||||
newBucket = factory.createBucket(bucket.getKey(), bucket.getDocCount(), new InternalAggregations(
|
||||
aggs), bucket.getKeyed(), bucket.getFormatter());
|
||||
|
@ -175,7 +170,9 @@ public class MovAvgPipelineAggregator extends PipelineAggregator {
|
|||
InternalHistogram.Bucket bucket = (InternalHistogram.Bucket) newBuckets.get(lastValidPosition + i + 1);
|
||||
|
||||
// Get the existing aggs in the bucket so we don't clobber data
|
||||
aggs = new ArrayList<>(eagerTransform(bucket.getAggregations().asList(), AGGREGATION_TRANFORM_FUNCTION));
|
||||
aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false).map((p) -> {
|
||||
return (InternalAggregation) p;
|
||||
}).collect(Collectors.toList());
|
||||
aggs.add(new InternalSimpleValue(name(), predictions[i], formatter, new ArrayList<PipelineAggregator>(), metaData()));
|
||||
|
||||
InternalHistogram.Bucket newBucket = factory.createBucket(newKey, 0, new InternalAggregations(
|
||||
|
|
|
@ -39,8 +39,9 @@ import java.io.IOException;
|
|||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.StreamSupport;
|
||||
|
||||
import static org.elasticsearch.common.util.CollectionUtils.eagerTransform;
|
||||
import static org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
|
||||
import static org.elasticsearch.search.aggregations.pipeline.BucketHelpers.resolveBucketValue;
|
||||
|
||||
|
@ -48,13 +49,10 @@ public class SerialDiffPipelineAggregator extends PipelineAggregator {
|
|||
|
||||
public final static Type TYPE = new Type("serial_diff");
|
||||
|
||||
public final static PipelineAggregatorStreams.Stream STREAM = new PipelineAggregatorStreams.Stream() {
|
||||
@Override
|
||||
public SerialDiffPipelineAggregator readResult(StreamInput in) throws IOException {
|
||||
SerialDiffPipelineAggregator result = new SerialDiffPipelineAggregator();
|
||||
result.readFrom(in);
|
||||
return result;
|
||||
}
|
||||
public final static PipelineAggregatorStreams.Stream STREAM = in -> {
|
||||
SerialDiffPipelineAggregator result = new SerialDiffPipelineAggregator();
|
||||
result.readFrom(in);
|
||||
return result;
|
||||
};
|
||||
|
||||
public static void registerStreams() {
|
||||
|
@ -114,7 +112,9 @@ public class SerialDiffPipelineAggregator extends PipelineAggregator {
|
|||
if (!Double.isNaN(thisBucketValue) && !Double.isNaN(lagValue)) {
|
||||
double diff = thisBucketValue - lagValue;
|
||||
|
||||
List<InternalAggregation> aggs = new ArrayList<>(eagerTransform(bucket.getAggregations().asList(), AGGREGATION_TRANFORM_FUNCTION));
|
||||
List<InternalAggregation> aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false).map((p) -> {
|
||||
return (InternalAggregation) p;
|
||||
}).collect(Collectors.toList());
|
||||
aggs.add(new InternalSimpleValue(name(), diff, formatter, new ArrayList<PipelineAggregator>(), metaData()));
|
||||
newBucket = factory.createBucket(bucket.getKey(), bucket.getDocCount(), new InternalAggregations(
|
||||
aggs), bucket.getKeyed(), bucket.getFormatter());
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
package org.elasticsearch.search.builder;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectFloatHashMap;
|
||||
import com.google.common.base.Charsets;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import org.elasticsearch.ElasticsearchGenerationException;
|
||||
import org.elasticsearch.action.support.QuerySourceBuilder;
|
||||
import org.elasticsearch.action.support.ToXContentToBytes;
|
||||
|
@ -175,7 +175,7 @@ public class SearchSourceBuilder extends ToXContentToBytes {
|
|||
* Constructs a new search source builder with a raw search query.
|
||||
*/
|
||||
public SearchSourceBuilder query(String queryString) {
|
||||
return query(queryString.getBytes(Charsets.UTF_8));
|
||||
return query(queryString.getBytes(StandardCharsets.UTF_8));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -214,7 +214,7 @@ public class SearchSourceBuilder extends ToXContentToBytes {
|
|||
* (and not aggs for example).
|
||||
*/
|
||||
public SearchSourceBuilder postFilter(String postFilterString) {
|
||||
return postFilter(postFilterString.getBytes(Charsets.UTF_8));
|
||||
return postFilter(postFilterString.getBytes(StandardCharsets.UTF_8));
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -63,8 +63,8 @@ import java.util.Comparator;
|
|||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.common.util.CollectionUtils.eagerTransform;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.StreamSupport;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -413,7 +413,9 @@ public class SearchPhaseController extends AbstractComponent {
|
|||
if (aggregations != null) {
|
||||
List<SiblingPipelineAggregator> pipelineAggregators = firstResult.pipelineAggregators();
|
||||
if (pipelineAggregators != null) {
|
||||
List<InternalAggregation> newAggs = new ArrayList<>(eagerTransform(aggregations.asList(), PipelineAggregator.AGGREGATION_TRANFORM_FUNCTION));
|
||||
List<InternalAggregation> newAggs = StreamSupport.stream(aggregations.spliterator(), false).map((p) -> {
|
||||
return (InternalAggregation) p;
|
||||
}).collect(Collectors.toList());
|
||||
for (SiblingPipelineAggregator pipelineAggregator : pipelineAggregators) {
|
||||
InternalAggregation newAgg = pipelineAggregator.doReduce(new InternalAggregations(newAggs), new ReduceContext(
|
||||
bigArrays, scriptService, headersContext));
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.transport.netty;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.Version;
|
||||
|
@ -724,7 +724,7 @@ public class NettyTransport extends AbstractLifecycleComponent<Transport> implem
|
|||
} else if (e.getCause() instanceof SizeHeaderFrameDecoder.HttpOnTransportException) {
|
||||
// in case we are able to return data, serialize the exception content and sent it back to the client
|
||||
if (ctx.getChannel().isOpen()) {
|
||||
ChannelBuffer buffer = ChannelBuffers.wrappedBuffer(e.getCause().getMessage().getBytes(Charsets.UTF_8));
|
||||
ChannelBuffer buffer = ChannelBuffers.wrappedBuffer(e.getCause().getMessage().getBytes(StandardCharsets.UTF_8));
|
||||
ChannelFuture channelFuture = ctx.getChannel().write(buffer);
|
||||
channelFuture.addListener(new ChannelFutureListener() {
|
||||
@Override
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
|
||||
package org.elasticsearch.action.bulk;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
|
@ -34,7 +34,7 @@ public class BulkIntegrationIT extends ESIntegTestCase {
|
|||
public void testBulkIndexCreatesMapping() throws Exception {
|
||||
String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/bulk-log.json");
|
||||
BulkRequestBuilder bulkBuilder = client().prepareBulk();
|
||||
bulkBuilder.add(bulkAction.getBytes(Charsets.UTF_8), 0, bulkAction.length(), null, null);
|
||||
bulkBuilder.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null);
|
||||
bulkBuilder.get();
|
||||
assertBusy(new Runnable() {
|
||||
@Override
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.action.bulk;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
import org.apache.lucene.util.Constants;
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
|
@ -52,7 +52,7 @@ public class BulkRequestTests extends ESTestCase {
|
|||
bulkAction = Strings.replace(bulkAction, "\r\n", "\n");
|
||||
}
|
||||
BulkRequest bulkRequest = new BulkRequest();
|
||||
bulkRequest.add(bulkAction.getBytes(Charsets.UTF_8), 0, bulkAction.length(), null, null);
|
||||
bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null);
|
||||
assertThat(bulkRequest.numberOfActions(), equalTo(3));
|
||||
assertThat(((IndexRequest) bulkRequest.requests().get(0)).source().toBytes(), equalTo(new BytesArray("{ \"field1\" : \"value1\" }").toBytes()));
|
||||
assertThat(bulkRequest.requests().get(1), instanceOf(DeleteRequest.class));
|
||||
|
@ -63,7 +63,7 @@ public class BulkRequestTests extends ESTestCase {
|
|||
public void testSimpleBulk2() throws Exception {
|
||||
String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk2.json");
|
||||
BulkRequest bulkRequest = new BulkRequest();
|
||||
bulkRequest.add(bulkAction.getBytes(Charsets.UTF_8), 0, bulkAction.length(), null, null);
|
||||
bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null);
|
||||
assertThat(bulkRequest.numberOfActions(), equalTo(3));
|
||||
}
|
||||
|
||||
|
@ -71,7 +71,7 @@ public class BulkRequestTests extends ESTestCase {
|
|||
public void testSimpleBulk3() throws Exception {
|
||||
String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk3.json");
|
||||
BulkRequest bulkRequest = new BulkRequest();
|
||||
bulkRequest.add(bulkAction.getBytes(Charsets.UTF_8), 0, bulkAction.length(), null, null);
|
||||
bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null);
|
||||
assertThat(bulkRequest.numberOfActions(), equalTo(3));
|
||||
}
|
||||
|
||||
|
@ -79,7 +79,7 @@ public class BulkRequestTests extends ESTestCase {
|
|||
public void testSimpleBulk4() throws Exception {
|
||||
String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk4.json");
|
||||
BulkRequest bulkRequest = new BulkRequest();
|
||||
bulkRequest.add(bulkAction.getBytes(Charsets.UTF_8), 0, bulkAction.length(), null, null);
|
||||
bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null);
|
||||
assertThat(bulkRequest.numberOfActions(), equalTo(4));
|
||||
assertThat(((UpdateRequest) bulkRequest.requests().get(0)).id(), equalTo("1"));
|
||||
assertThat(((UpdateRequest) bulkRequest.requests().get(0)).retryOnConflict(), equalTo(2));
|
||||
|
@ -102,14 +102,14 @@ public class BulkRequestTests extends ESTestCase {
|
|||
public void testBulkAllowExplicitIndex() throws Exception {
|
||||
String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk.json");
|
||||
try {
|
||||
new BulkRequest().add(new BytesArray(bulkAction.getBytes(Charsets.UTF_8)), null, null, false);
|
||||
new BulkRequest().add(new BytesArray(bulkAction.getBytes(StandardCharsets.UTF_8)), null, null, false);
|
||||
fail();
|
||||
} catch (Exception e) {
|
||||
|
||||
}
|
||||
|
||||
bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk5.json");
|
||||
new BulkRequest().add(new BytesArray(bulkAction.getBytes(Charsets.UTF_8)), "test", null, false);
|
||||
new BulkRequest().add(new BytesArray(bulkAction.getBytes(StandardCharsets.UTF_8)), "test", null, false);
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -131,7 +131,7 @@ public class BulkRequestTests extends ESTestCase {
|
|||
String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk6.json");
|
||||
BulkRequest bulkRequest = new BulkRequest();
|
||||
try {
|
||||
bulkRequest.add(bulkAction.getBytes(Charsets.UTF_8), 0, bulkAction.length(), null, null);
|
||||
bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null);
|
||||
fail("should have thrown an exception about the wrong format of line 1");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat("message contains error about the wrong format of line 1: " + e.getMessage(),
|
||||
|
@ -144,7 +144,7 @@ public class BulkRequestTests extends ESTestCase {
|
|||
String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk7.json");
|
||||
BulkRequest bulkRequest = new BulkRequest();
|
||||
try {
|
||||
bulkRequest.add(bulkAction.getBytes(Charsets.UTF_8), 0, bulkAction.length(), null, null);
|
||||
bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null);
|
||||
fail("should have thrown an exception about the wrong format of line 5");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat("message contains error about the wrong format of line 5: " + e.getMessage(),
|
||||
|
@ -157,7 +157,7 @@ public class BulkRequestTests extends ESTestCase {
|
|||
String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk8.json");
|
||||
BulkRequest bulkRequest = new BulkRequest();
|
||||
try {
|
||||
bulkRequest.add(bulkAction.getBytes(Charsets.UTF_8), 0, bulkAction.length(), null, null);
|
||||
bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null);
|
||||
fail("should have thrown an exception about the unknown paramater _foo");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat("message contains error about the unknown paramater _foo: " + e.getMessage(),
|
||||
|
@ -170,7 +170,7 @@ public class BulkRequestTests extends ESTestCase {
|
|||
String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk9.json");
|
||||
BulkRequest bulkRequest = new BulkRequest();
|
||||
try {
|
||||
bulkRequest.add(bulkAction.getBytes(Charsets.UTF_8), 0, bulkAction.length(), null, null);
|
||||
bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null);
|
||||
fail("should have thrown an exception about the wrong format of line 3");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat("message contains error about the wrong format of line 3: " + e.getMessage(),
|
||||
|
@ -182,7 +182,7 @@ public class BulkRequestTests extends ESTestCase {
|
|||
public void testSimpleBulk10() throws Exception {
|
||||
String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk10.json");
|
||||
BulkRequest bulkRequest = new BulkRequest();
|
||||
bulkRequest.add(bulkAction.getBytes(Charsets.UTF_8), 0, bulkAction.length(), null, null);
|
||||
bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null);
|
||||
assertThat(bulkRequest.numberOfActions(), equalTo(9));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.broadcast;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import org.elasticsearch.action.count.CountResponse;
|
||||
import org.elasticsearch.action.search.SearchPhaseExecutionException;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
@ -72,7 +72,7 @@ public class BroadcastActionsIT extends ESIntegTestCase {
|
|||
for (int i = 0; i < 5; i++) {
|
||||
// test failed (simply query that can't be parsed)
|
||||
try {
|
||||
client().count(countRequest("test").source("{ term : { _type : \"type1 } }".getBytes(Charsets.UTF_8))).actionGet();
|
||||
client().count(countRequest("test").source("{ term : { _type : \"type1 } }".getBytes(StandardCharsets.UTF_8))).actionGet();
|
||||
} catch(SearchPhaseExecutionException e) {
|
||||
assertThat(e.shardFailures().length, equalTo(numShards.numPrimaries));
|
||||
}
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.cluster.routing.allocation;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
|
@ -28,7 +28,6 @@ import org.elasticsearch.cluster.node.DiscoveryNodes;
|
|||
import org.elasticsearch.cluster.routing.*;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.test.ESAllocationTestCase;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
|
@ -58,7 +57,7 @@ public abstract class CatAllocationTestCase extends ESAllocationTestCase {
|
|||
public void run() throws IOException {
|
||||
Set<String> nodes = new HashSet<>();
|
||||
Map<String, Idx> indices = new HashMap<>();
|
||||
try (BufferedReader reader = Files.newBufferedReader(getCatPath(), Charsets.UTF_8)) {
|
||||
try (BufferedReader reader = Files.newBufferedReader(getCatPath(), StandardCharsets.UTF_8)) {
|
||||
String line = null;
|
||||
// regexp FTW
|
||||
Pattern pattern = Pattern.compile("^(.+)\\s+(\\d)\\s+([rp])\\s+(STARTED|RELOCATING|INITIALIZING|UNASSIGNED)\\s+\\d+\\s+[0-9.a-z]+\\s+(\\d+\\.\\d+\\.\\d+\\.\\d+).*$");
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
*/
|
||||
package org.elasticsearch.common;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.junit.Test;
|
||||
|
||||
|
@ -35,7 +35,7 @@ public class Base64Tests extends ESTestCase {
|
|||
@Test // issue #6334
|
||||
public void testBase64DecodeWithExtraCharactersAfterPadding() throws Exception {
|
||||
String plain = randomAsciiOfLengthBetween(1, 20) + ":" + randomAsciiOfLengthBetween(1, 20);
|
||||
String encoded = Base64.encodeBytes(plain.getBytes(Charsets.UTF_8));
|
||||
String encoded = Base64.encodeBytes(plain.getBytes(StandardCharsets.UTF_8));
|
||||
assertValidBase64(encoded, plain);
|
||||
|
||||
// lets append some trash here, if the encoded string has been padded
|
||||
|
@ -46,13 +46,13 @@ public class Base64Tests extends ESTestCase {
|
|||
}
|
||||
|
||||
private void assertValidBase64(String base64, String expected) throws IOException {
|
||||
String decoded = new String(Base64.decode(base64.getBytes(Charsets.UTF_8)), Charsets.UTF_8);
|
||||
String decoded = new String(Base64.decode(base64.getBytes(StandardCharsets.UTF_8)), StandardCharsets.UTF_8);
|
||||
assertThat(decoded, is(expected));
|
||||
}
|
||||
|
||||
private void assertInvalidBase64(String base64) {
|
||||
try {
|
||||
Base64.decode(base64.getBytes(Charsets.UTF_8));
|
||||
Base64.decode(base64.getBytes(StandardCharsets.UTF_8));
|
||||
fail(String.format(Locale.ROOT, "Expected IOException to be thrown for string %s (len %d)", base64, base64.length()));
|
||||
} catch (IOException e) {}
|
||||
}
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.common;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.junit.Test;
|
||||
|
||||
|
@ -38,7 +38,7 @@ public class PidFileTests extends ESTestCase {
|
|||
public void testParentIsFile() throws IOException {
|
||||
Path dir = createTempDir();
|
||||
Path parent = dir.resolve("foo");
|
||||
try(BufferedWriter stream = Files.newBufferedWriter(parent, Charsets.UTF_8, StandardOpenOption.CREATE_NEW)) {
|
||||
try(BufferedWriter stream = Files.newBufferedWriter(parent, StandardCharsets.UTF_8, StandardOpenOption.CREATE_NEW)) {
|
||||
stream.write("foo");
|
||||
}
|
||||
|
||||
|
@ -65,7 +65,7 @@ public class PidFileTests extends ESTestCase {
|
|||
Path pidFile = parent.resolve("foo.pid");
|
||||
long pid = randomLong();
|
||||
if (randomBoolean() && Files.exists(parent)) {
|
||||
try (BufferedWriter stream = Files.newBufferedWriter(pidFile, Charsets.UTF_8, StandardOpenOption.CREATE_NEW)) {
|
||||
try (BufferedWriter stream = Files.newBufferedWriter(pidFile, StandardCharsets.UTF_8, StandardOpenOption.CREATE_NEW)) {
|
||||
stream.write("foo");
|
||||
}
|
||||
}
|
||||
|
@ -75,6 +75,6 @@ public class PidFileTests extends ESTestCase {
|
|||
assertEquals(pid, inst.getPid());
|
||||
assertFalse(inst.isDeleteOnExit());
|
||||
assertTrue(Files.exists(pidFile));
|
||||
assertEquals(pid, Long.parseLong(new String(Files.readAllBytes(pidFile), Charsets.UTF_8)));
|
||||
assertEquals(pid, Long.parseLong(new String(Files.readAllBytes(pidFile), StandardCharsets.UTF_8)));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.common.bytes;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.io.Channels;
|
||||
import org.elasticsearch.common.io.stream.ByteBufferStreamInput;
|
||||
|
@ -148,7 +148,7 @@ public class ByteBufferBytesReference implements BytesReference {
|
|||
if (!buffer.hasRemaining()) {
|
||||
return "";
|
||||
}
|
||||
final CharsetDecoder decoder = CharsetUtil.getDecoder(Charsets.UTF_8);
|
||||
final CharsetDecoder decoder = CharsetUtil.getDecoder(StandardCharsets.UTF_8);
|
||||
final CharBuffer dst = CharBuffer.allocate(
|
||||
(int) ((double) buffer.remaining() * decoder.maxCharsPerByte()));
|
||||
try {
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.common.cli;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import com.google.common.jimfs.Configuration;
|
||||
import com.google.common.jimfs.Jimfs;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -153,7 +153,7 @@ public class CheckFileCommandTests extends ESTestCase {
|
|||
|
||||
try (FileSystem fs = Jimfs.newFileSystem(configuration)) {
|
||||
Path path = fs.getPath(randomAsciiOfLength(10));
|
||||
Files.write(path, "anything".getBytes(Charsets.UTF_8));
|
||||
Files.write(path, "anything".getBytes(StandardCharsets.UTF_8));
|
||||
|
||||
Settings settings = Settings.builder()
|
||||
.put("path.home", createTempDir().toString())
|
||||
|
@ -195,7 +195,7 @@ public class CheckFileCommandTests extends ESTestCase {
|
|||
|
||||
private Path writePath(FileSystem fs, String name, String content) throws IOException {
|
||||
Path path = fs.getPath(name);
|
||||
Files.write(path, content.getBytes(Charsets.UTF_8));
|
||||
Files.write(path, content.getBytes(StandardCharsets.UTF_8));
|
||||
return path;
|
||||
}
|
||||
|
||||
|
@ -220,11 +220,11 @@ public class CheckFileCommandTests extends ESTestCase {
|
|||
Path randomPath = paths[randomInt];
|
||||
switch (mode) {
|
||||
case CHANGE:
|
||||
Files.write(randomPath, randomAsciiOfLength(10).getBytes(Charsets.UTF_8));
|
||||
Files.write(randomPath, randomAsciiOfLength(10).getBytes(StandardCharsets.UTF_8));
|
||||
Files.setPosixFilePermissions(randomPath, Sets.newHashSet(PosixFilePermission.OWNER_EXECUTE, PosixFilePermission.OTHERS_EXECUTE, PosixFilePermission.GROUP_EXECUTE));
|
||||
break;
|
||||
case KEEP:
|
||||
Files.write(randomPath, randomAsciiOfLength(10).getBytes(Charsets.UTF_8));
|
||||
Files.write(randomPath, randomAsciiOfLength(10).getBytes(StandardCharsets.UTF_8));
|
||||
Set<PosixFilePermission> posixFilePermissions = Files.getPosixFilePermissions(randomPath);
|
||||
Files.setPosixFilePermissions(randomPath, posixFilePermissions);
|
||||
break;
|
||||
|
@ -249,12 +249,12 @@ public class CheckFileCommandTests extends ESTestCase {
|
|||
Path randomPath = paths[randomInt];
|
||||
switch (mode) {
|
||||
case CHANGE:
|
||||
Files.write(randomPath, randomAsciiOfLength(10).getBytes(Charsets.UTF_8));
|
||||
Files.write(randomPath, randomAsciiOfLength(10).getBytes(StandardCharsets.UTF_8));
|
||||
UserPrincipal randomOwner = fs.getUserPrincipalLookupService().lookupPrincipalByName(randomAsciiOfLength(10));
|
||||
Files.setOwner(randomPath, randomOwner);
|
||||
break;
|
||||
case KEEP:
|
||||
Files.write(randomPath, randomAsciiOfLength(10).getBytes(Charsets.UTF_8));
|
||||
Files.write(randomPath, randomAsciiOfLength(10).getBytes(StandardCharsets.UTF_8));
|
||||
UserPrincipal originalOwner = Files.getOwner(randomPath);
|
||||
Files.setOwner(randomPath, originalOwner);
|
||||
break;
|
||||
|
@ -279,12 +279,12 @@ public class CheckFileCommandTests extends ESTestCase {
|
|||
Path randomPath = paths[randomInt];
|
||||
switch (mode) {
|
||||
case CHANGE:
|
||||
Files.write(randomPath, randomAsciiOfLength(10).getBytes(Charsets.UTF_8));
|
||||
Files.write(randomPath, randomAsciiOfLength(10).getBytes(StandardCharsets.UTF_8));
|
||||
GroupPrincipal randomPrincipal = fs.getUserPrincipalLookupService().lookupPrincipalByGroupName(randomAsciiOfLength(10));
|
||||
Files.getFileAttributeView(randomPath, PosixFileAttributeView.class).setGroup(randomPrincipal);
|
||||
break;
|
||||
case KEEP:
|
||||
Files.write(randomPath, randomAsciiOfLength(10).getBytes(Charsets.UTF_8));
|
||||
Files.write(randomPath, randomAsciiOfLength(10).getBytes(StandardCharsets.UTF_8));
|
||||
GroupPrincipal groupPrincipal = Files.readAttributes(randomPath, PosixFileAttributes.class).group();
|
||||
Files.getFileAttributeView(randomPath, PosixFileAttributeView.class).setGroup(groupPrincipal);
|
||||
break;
|
||||
|
@ -308,7 +308,7 @@ public class CheckFileCommandTests extends ESTestCase {
|
|||
|
||||
@Override
|
||||
public CliTool.ExitStatus doExecute(Settings settings, Environment env) throws Exception {
|
||||
Files.write(pathToCreate, "anything".getBytes(Charsets.UTF_8));
|
||||
Files.write(pathToCreate, "anything".getBytes(StandardCharsets.UTF_8));
|
||||
return CliTool.ExitStatus.OK;
|
||||
}
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.common.io;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.apache.lucene.util.LuceneTestCase.SuppressFileSystems;
|
||||
|
@ -29,7 +29,6 @@ import org.junit.Test;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.net.URISyntaxException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Arrays;
|
||||
|
@ -120,9 +119,9 @@ public class FileSystemUtilsTests extends ESTestCase {
|
|||
public void testMoveFilesDoesNotCreateSameFileWithSuffix() throws Exception {
|
||||
Path[] dirs = new Path[] { createTempDir(), createTempDir(), createTempDir()};
|
||||
for (Path dir : dirs) {
|
||||
Files.write(dir.resolve("file1.txt"), "file1".getBytes(Charsets.UTF_8));
|
||||
Files.write(dir.resolve("file1.txt"), "file1".getBytes(StandardCharsets.UTF_8));
|
||||
Files.createDirectory(dir.resolve("dir"));
|
||||
Files.write(dir.resolve("dir").resolve("file2.txt"), "file2".getBytes(Charsets.UTF_8));
|
||||
Files.write(dir.resolve("dir").resolve("file2.txt"), "file2".getBytes(StandardCharsets.UTF_8));
|
||||
}
|
||||
|
||||
FileSystemUtils.moveFilesWithoutOverwriting(dirs[0], dst, ".new");
|
||||
|
@ -137,7 +136,7 @@ public class FileSystemUtilsTests extends ESTestCase {
|
|||
assertFileNotExists(dst.resolve("dir").resolve("file2.txt.new"));
|
||||
|
||||
// change file content, make sure it gets updated
|
||||
Files.write(dirs[2].resolve("dir").resolve("file2.txt"), "UPDATED".getBytes(Charsets.UTF_8));
|
||||
Files.write(dirs[2].resolve("dir").resolve("file2.txt"), "UPDATED".getBytes(StandardCharsets.UTF_8));
|
||||
FileSystemUtils.moveFilesWithoutOverwriting(dirs[2], dst, ".new");
|
||||
assertFileContent(dst, "file1.txt", "file1");
|
||||
assertFileContent(dst, "dir/file2.txt", "file2");
|
||||
|
@ -157,7 +156,7 @@ public class FileSystemUtilsTests extends ESTestCase {
|
|||
Assert.assertThat("file [" + file + "] should not exist.", Files.exists(file), is(false));
|
||||
} else {
|
||||
assertFileExists(file);
|
||||
String fileContent = new String(Files.readAllBytes(file), StandardCharsets.UTF_8);
|
||||
String fileContent = new String(Files.readAllBytes(file), java.nio.charset.StandardCharsets.UTF_8);
|
||||
// trim the string content to prevent different handling on windows vs. unix and CR chars...
|
||||
Assert.assertThat(fileContent.trim(), equalTo(expected.trim()));
|
||||
}
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.common.io;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
|
@ -39,7 +39,7 @@ public class StreamsTests extends ESTestCase {
|
|||
|
||||
@Test
|
||||
public void testCopyFromInputStream() throws IOException {
|
||||
byte[] content = "content".getBytes(Charsets.UTF_8);
|
||||
byte[] content = "content".getBytes(StandardCharsets.UTF_8);
|
||||
ByteArrayInputStream in = new ByteArrayInputStream(content);
|
||||
ByteArrayOutputStream out = new ByteArrayOutputStream(content.length);
|
||||
long count = copy(in, out);
|
||||
|
@ -50,7 +50,7 @@ public class StreamsTests extends ESTestCase {
|
|||
|
||||
@Test
|
||||
public void testCopyFromByteArray() throws IOException {
|
||||
byte[] content = "content".getBytes(Charsets.UTF_8);
|
||||
byte[] content = "content".getBytes(StandardCharsets.UTF_8);
|
||||
ByteArrayOutputStream out = new ByteArrayOutputStream(content.length);
|
||||
copy(content, out);
|
||||
assertThat(Arrays.equals(content, out.toByteArray()), equalTo(true));
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.common.lucene.store;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import org.apache.lucene.store.IndexInput;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.junit.Test;
|
||||
|
@ -33,7 +33,7 @@ public class ByteArrayIndexInputTests extends ESTestCase {
|
|||
@Test
|
||||
public void testRandomReads() throws IOException {
|
||||
for (int i = 0; i < 100; i++) {
|
||||
byte[] input = randomUnicodeOfLength(randomIntBetween(1, 1000)).getBytes(Charsets.UTF_8);
|
||||
byte[] input = randomUnicodeOfLength(randomIntBetween(1, 1000)).getBytes(StandardCharsets.UTF_8);
|
||||
ByteArrayIndexInput indexInput = new ByteArrayIndexInput("test", input);
|
||||
assertEquals(input.length, indexInput.length());
|
||||
assertEquals(0, indexInput.getFilePointer());
|
||||
|
@ -45,7 +45,7 @@ public class ByteArrayIndexInputTests extends ESTestCase {
|
|||
@Test
|
||||
public void testRandomOverflow() throws IOException {
|
||||
for (int i = 0; i < 100; i++) {
|
||||
byte[] input = randomUnicodeOfLength(randomIntBetween(1, 1000)).getBytes(Charsets.UTF_8);
|
||||
byte[] input = randomUnicodeOfLength(randomIntBetween(1, 1000)).getBytes(StandardCharsets.UTF_8);
|
||||
ByteArrayIndexInput indexInput = new ByteArrayIndexInput("test", input);
|
||||
int firstReadLen = randomIntBetween(0, input.length - 1);
|
||||
randomReadAndSlice(indexInput, firstReadLen);
|
||||
|
@ -64,7 +64,7 @@ public class ByteArrayIndexInputTests extends ESTestCase {
|
|||
@Test
|
||||
public void testSeekOverflow() throws IOException {
|
||||
for (int i = 0; i < 100; i++) {
|
||||
byte[] input = randomUnicodeOfLength(randomIntBetween(1, 1000)).getBytes(Charsets.UTF_8);
|
||||
byte[] input = randomUnicodeOfLength(randomIntBetween(1, 1000)).getBytes(StandardCharsets.UTF_8);
|
||||
ByteArrayIndexInput indexInput = new ByteArrayIndexInput("test", input);
|
||||
int firstReadLen = randomIntBetween(0, input.length - 1);
|
||||
randomReadAndSlice(indexInput, firstReadLen);
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
*/
|
||||
package org.elasticsearch.common.util;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import org.apache.lucene.util.CollectionUtil;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
|
@ -76,14 +76,14 @@ public class MultiDataPathUpgraderTests extends ESTestCase {
|
|||
int numFiles = randomIntBetween(1, 10);
|
||||
for (int i = 0; i < numFiles; i++, numIdxFiles++) {
|
||||
String filename = Integer.toString(numIdxFiles);
|
||||
try (BufferedWriter w = Files.newBufferedWriter(idx.resolve(filename + ".tst"), Charsets.UTF_8)) {
|
||||
try (BufferedWriter w = Files.newBufferedWriter(idx.resolve(filename + ".tst"), StandardCharsets.UTF_8)) {
|
||||
w.write(filename);
|
||||
}
|
||||
}
|
||||
numFiles = randomIntBetween(1, 10);
|
||||
for (int i = 0; i < numFiles; i++, numTranslogFiles++) {
|
||||
String filename = Integer.toString(numTranslogFiles);
|
||||
try (BufferedWriter w = Files.newBufferedWriter(translog.resolve(filename + ".translog"), Charsets.UTF_8)) {
|
||||
try (BufferedWriter w = Files.newBufferedWriter(translog.resolve(filename + ".translog"), StandardCharsets.UTF_8)) {
|
||||
w.write(filename);
|
||||
}
|
||||
}
|
||||
|
@ -124,14 +124,14 @@ public class MultiDataPathUpgraderTests extends ESTestCase {
|
|||
final String name = Integer.toString(i);
|
||||
translogFiles.contains(translog.resolve(name + ".translog"));
|
||||
byte[] content = Files.readAllBytes(translog.resolve(name + ".translog"));
|
||||
assertEquals(name , new String(content, Charsets.UTF_8));
|
||||
assertEquals(name , new String(content, StandardCharsets.UTF_8));
|
||||
}
|
||||
final HashSet<Path> idxFiles = Sets.newHashSet(FileSystemUtils.files(idx));
|
||||
for (int i = 0; i < numIdxFiles; i++) {
|
||||
final String name = Integer.toString(i);
|
||||
idxFiles.contains(idx.resolve(name + ".tst"));
|
||||
byte[] content = Files.readAllBytes(idx.resolve(name + ".tst"));
|
||||
assertEquals(name , new String(content, Charsets.UTF_8));
|
||||
assertEquals(name , new String(content, StandardCharsets.UTF_8));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.document;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.indices.alias.Alias;
|
||||
|
@ -578,7 +578,7 @@ public class BulkIT extends ESIntegTestCase {
|
|||
"{\"index\": {\"_id\": \"2\"}}\n" +
|
||||
"{\"name\": \"Good\", \"last_modified\" : \"2013-04-05\"}\n";
|
||||
|
||||
BulkResponse bulkResponse = client().prepareBulk().add(brokenBuildRequestData.getBytes(Charsets.UTF_8), 0, brokenBuildRequestData.length(), "test", "type").setRefresh(true).get();
|
||||
BulkResponse bulkResponse = client().prepareBulk().add(brokenBuildRequestData.getBytes(StandardCharsets.UTF_8), 0, brokenBuildRequestData.length(), "test", "type").setRefresh(true).get();
|
||||
assertThat(bulkResponse.getItems().length, is(2));
|
||||
assertThat(bulkResponse.getItems()[0].isFailed(), is(true));
|
||||
assertThat(bulkResponse.getItems()[1].isFailed(), is(false));
|
||||
|
@ -605,7 +605,7 @@ public class BulkIT extends ESIntegTestCase {
|
|||
"{\"index\": { \"_id\" : \"24000\" } }\n" +
|
||||
"{\"name\": \"Good\", \"my_routing\" : \"48000\"}\n";
|
||||
|
||||
BulkResponse bulkResponse = client().prepareBulk().add(brokenBuildRequestData.getBytes(Charsets.UTF_8), 0, brokenBuildRequestData.length(), "test", "type").setRefresh(true).get();
|
||||
BulkResponse bulkResponse = client().prepareBulk().add(brokenBuildRequestData.getBytes(StandardCharsets.UTF_8), 0, brokenBuildRequestData.length(), "test", "type").setRefresh(true).get();
|
||||
assertThat(bulkResponse.getItems().length, is(2));
|
||||
assertThat(bulkResponse.getItems()[0].isFailed(), is(true));
|
||||
assertThat(bulkResponse.getItems()[1].isFailed(), is(false));
|
||||
|
@ -632,7 +632,7 @@ public class BulkIT extends ESIntegTestCase {
|
|||
"{\"index\": {} }\n" +
|
||||
"{\"name\": \"Good\", \"my_id\" : \"48\"}\n";
|
||||
|
||||
BulkResponse bulkResponse = client().prepareBulk().add(brokenBuildRequestData.getBytes(Charsets.UTF_8), 0, brokenBuildRequestData.length(), "test", "type").setRefresh(true).get();
|
||||
BulkResponse bulkResponse = client().prepareBulk().add(brokenBuildRequestData.getBytes(StandardCharsets.UTF_8), 0, brokenBuildRequestData.length(), "test", "type").setRefresh(true).get();
|
||||
assertThat(bulkResponse.getItems().length, is(2));
|
||||
assertThat(bulkResponse.getItems()[0].isFailed(), is(true));
|
||||
assertThat(bulkResponse.getItems()[1].isFailed(), is(false));
|
||||
|
|
|
@ -18,14 +18,10 @@
|
|||
*/
|
||||
package org.elasticsearch.env;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import org.elasticsearch.common.io.FileSystemUtils;
|
||||
import org.elasticsearch.common.io.Streams;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.net.URL;
|
||||
|
||||
|
|
|
@ -18,9 +18,7 @@
|
|||
*/
|
||||
package org.elasticsearch.http.netty;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import com.google.common.base.Function;
|
||||
import com.google.common.collect.Collections2;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import org.elasticsearch.common.unit.ByteSizeUnit;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.jboss.netty.bootstrap.ClientBootstrap;
|
||||
|
@ -33,11 +31,9 @@ import java.net.SocketAddress;
|
|||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.lessThan;
|
||||
import static org.jboss.netty.handler.codec.http.HttpHeaders.Names.HOST;
|
||||
import static org.jboss.netty.handler.codec.http.HttpVersion.HTTP_1_1;
|
||||
|
||||
|
@ -46,26 +42,20 @@ import static org.jboss.netty.handler.codec.http.HttpVersion.HTTP_1_1;
|
|||
*/
|
||||
public class NettyHttpClient implements Closeable {
|
||||
|
||||
private static final Function<? super HttpResponse, String> FUNCTION_RESPONSE_TO_CONTENT = new Function<HttpResponse, String>() {
|
||||
@Override
|
||||
public String apply(HttpResponse response) {
|
||||
return response.getContent().toString(Charsets.UTF_8);
|
||||
}
|
||||
};
|
||||
|
||||
private static final Function<? super HttpResponse, String> FUNCTION_RESPONSE_OPAQUE_ID = new Function<HttpResponse, String>() {
|
||||
@Override
|
||||
public String apply(HttpResponse response) {
|
||||
return response.headers().get("X-Opaque-Id");
|
||||
}
|
||||
};
|
||||
|
||||
public static Collection<String> returnHttpResponseBodies(Collection<HttpResponse> responses) {
|
||||
return Collections2.transform(responses, FUNCTION_RESPONSE_TO_CONTENT);
|
||||
List<String> list = new ArrayList<>(responses.size());
|
||||
for (HttpResponse response : responses) {
|
||||
list.add(response.getContent().toString(StandardCharsets.UTF_8));
|
||||
}
|
||||
return list;
|
||||
}
|
||||
|
||||
public static Collection<String> returnOpaqueIds(Collection<HttpResponse> responses) {
|
||||
return Collections2.transform(responses, FUNCTION_RESPONSE_OPAQUE_ID);
|
||||
List<String> list = new ArrayList<>(responses.size());
|
||||
for (HttpResponse response : responses) {
|
||||
list.add(response.headers().get("X-Opaque-Id"));
|
||||
}
|
||||
return list;
|
||||
}
|
||||
|
||||
private final ClientBootstrap clientBootstrap;
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
*/
|
||||
package org.elasticsearch.http.netty;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import org.elasticsearch.cache.recycler.MockPageCacheRecycler;
|
||||
import org.elasticsearch.common.network.NetworkService;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -202,7 +202,7 @@ public class NettyHttpServerPipeliningTests extends ESTestCase {
|
|||
request = (HttpRequest) e.getMessage();
|
||||
}
|
||||
|
||||
ChannelBuffer buffer = ChannelBuffers.copiedBuffer(request.getUri(), Charsets.UTF_8);
|
||||
ChannelBuffer buffer = ChannelBuffers.copiedBuffer(request.getUri(), StandardCharsets.UTF_8);
|
||||
|
||||
DefaultHttpResponse httpResponse = new DefaultHttpResponse(HTTP_1_1, OK);
|
||||
httpResponse.headers().add(CONTENT_LENGTH, buffer.readableBytes());
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.index.mapper.simple;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -128,7 +128,7 @@ public class SimpleMapperTests extends ESSingleNodeTestCase {
|
|||
.add(object("name").add(stringField("first").store(true).index(false))),
|
||||
indexService.mapperService()).build(indexService.mapperService(), mapperParser);
|
||||
|
||||
BytesReference json = new BytesArray("".getBytes(Charsets.UTF_8));
|
||||
BytesReference json = new BytesArray("".getBytes(StandardCharsets.UTF_8));
|
||||
try {
|
||||
docMapper.parse("test", "person", "1", json).rootDoc();
|
||||
fail("this point is never reached");
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.index.shard;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
@ -50,7 +50,7 @@ public class CommitPointsTests extends ESTestCase {
|
|||
CommitPoint commitPoint = new CommitPoint(1, "test", CommitPoint.Type.GENERATED, indexFiles, translogFiles);
|
||||
|
||||
byte[] serialized = CommitPoints.toXContent(commitPoint);
|
||||
logger.info("serialized commit_point {}", new String(serialized, Charsets.UTF_8));
|
||||
logger.info("serialized commit_point {}", new String(serialized, StandardCharsets.UTF_8));
|
||||
|
||||
CommitPoint desCp = CommitPoints.fromXContent(serialized);
|
||||
assertThat(desCp.version(), equalTo(commitPoint.version()));
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
package org.elasticsearch.index.store;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
|
||||
import com.google.common.base.Charsets;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import org.apache.lucene.codecs.CodecUtil;
|
||||
import org.apache.lucene.index.CheckIndex;
|
||||
import org.apache.lucene.index.IndexFileNames;
|
||||
|
@ -217,12 +217,12 @@ public class CorruptedFileIT extends ESIntegTestCase {
|
|||
}
|
||||
try (CheckIndex checkIndex = new CheckIndex(store.directory())) {
|
||||
BytesStreamOutput os = new BytesStreamOutput();
|
||||
PrintStream out = new PrintStream(os, false, Charsets.UTF_8.name());
|
||||
PrintStream out = new PrintStream(os, false, StandardCharsets.UTF_8.name());
|
||||
checkIndex.setInfoStream(out);
|
||||
out.flush();
|
||||
CheckIndex.Status status = checkIndex.checkIndex();
|
||||
if (!status.clean) {
|
||||
logger.warn("check index [failure]\n{}", new String(os.bytes().toBytes(), Charsets.UTF_8));
|
||||
logger.warn("check index [failure]\n{}", new String(os.bytes().toBytes(), StandardCharsets.UTF_8));
|
||||
throw new IOException("index check failure");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,8 +19,6 @@
|
|||
|
||||
package org.elasticsearch.indexlifecycle;
|
||||
|
||||
import com.google.common.base.Function;
|
||||
import com.google.common.collect.Iterables;
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthStatus;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
|
||||
|
@ -30,7 +28,6 @@ import org.elasticsearch.cluster.routing.RoutingNode;
|
|||
import org.elasticsearch.cluster.routing.RoutingNodes;
|
||||
import org.elasticsearch.common.Priority;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.set.Sets;
|
||||
import org.elasticsearch.discovery.Discovery;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
||||
|
@ -38,6 +35,8 @@ import org.elasticsearch.test.InternalTestCluster;
|
|||
import org.junit.Test;
|
||||
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.StreamSupport;
|
||||
|
||||
import static org.elasticsearch.client.Requests.clusterHealthRequest;
|
||||
import static org.elasticsearch.client.Requests.createIndexRequest;
|
||||
|
@ -222,12 +221,7 @@ public class IndexLifecycleActionIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
private void assertNodesPresent(RoutingNodes routingNodes, String... nodes) {
|
||||
final Set<String> keySet = Sets.newHashSet(Iterables.transform(routingNodes, new Function<RoutingNode, String>() {
|
||||
@Override
|
||||
public String apply(RoutingNode input) {
|
||||
return input.nodeId();
|
||||
}
|
||||
}));
|
||||
final Set<String> keySet = StreamSupport.stream(routingNodes.spliterator(), false).map((p) -> (p.nodeId())).collect(Collectors.toSet());
|
||||
assertThat(keySet, containsInAnyOrder(nodes));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.plugins;
|
||||
|
||||
import com.google.common.base.Function;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.cluster.node.info.PluginsInfo;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
@ -30,8 +29,8 @@ import java.nio.file.Files;
|
|||
import java.nio.file.Path;
|
||||
import java.util.List;
|
||||
import java.util.Properties;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.elasticsearch.common.util.CollectionUtils.eagerTransform;
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
|
||||
public class PluginInfoTests extends ESTestCase {
|
||||
|
@ -260,10 +259,10 @@ public class PluginInfoTests extends ESTestCase {
|
|||
public void testReadFromPropertiesSitePluginWithoutSite() throws Exception {
|
||||
Path pluginDir = createTempDir().resolve("fake-plugin");
|
||||
writeProperties(pluginDir,
|
||||
"description", "fake desc",
|
||||
"name", "my_plugin",
|
||||
"version", "1.0",
|
||||
"site", "true");
|
||||
"description", "fake desc",
|
||||
"name", "my_plugin",
|
||||
"version", "1.0",
|
||||
"site", "true");
|
||||
try {
|
||||
PluginInfo.readFromProperties(pluginDir);
|
||||
fail("didn't get expected exception");
|
||||
|
@ -281,12 +280,7 @@ public class PluginInfoTests extends ESTestCase {
|
|||
pluginsInfo.add(new PluginInfo("d", "foo", true, "dummy", true, "dummyclass", true));
|
||||
|
||||
final List<PluginInfo> infos = pluginsInfo.getInfos();
|
||||
List<String> names = eagerTransform(infos, new Function<PluginInfo, String>() {
|
||||
@Override
|
||||
public String apply(PluginInfo input) {
|
||||
return input.getName();
|
||||
}
|
||||
});
|
||||
List<String> names = infos.stream().map((input) -> input.getName()).collect(Collectors.toList());
|
||||
assertThat(names, contains("a", "b", "c", "d", "e"));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
*/
|
||||
package org.elasticsearch.plugins;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import com.google.common.hash.Hashing;
|
||||
|
||||
import org.apache.http.impl.client.HttpClients;
|
||||
|
@ -28,7 +28,6 @@ import org.elasticsearch.common.Base64;
|
|||
import org.elasticsearch.common.cli.CliTool;
|
||||
import org.elasticsearch.common.cli.CliTool.ExitStatus;
|
||||
import org.elasticsearch.common.cli.CliToolTestCase.CaptureOutputTerminal;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.node.internal.InternalSettingsPreparer;
|
||||
|
@ -57,7 +56,6 @@ import java.io.BufferedWriter;
|
|||
import java.io.IOException;
|
||||
import java.net.InetAddress;
|
||||
import java.net.InetSocketAddress;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.FileVisitResult;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
|
@ -112,7 +110,7 @@ public class PluginManagerIT extends ESIntegTestCase {
|
|||
|
||||
private void writeSha1(Path file, boolean corrupt) throws IOException {
|
||||
String sha1Hex = Hashing.sha1().hashBytes(Files.readAllBytes(file)).toString();
|
||||
try (BufferedWriter out = Files.newBufferedWriter(file.resolveSibling(file.getFileName() + ".sha1"), Charsets.UTF_8)) {
|
||||
try (BufferedWriter out = Files.newBufferedWriter(file.resolveSibling(file.getFileName() + ".sha1"), StandardCharsets.UTF_8)) {
|
||||
out.write(sha1Hex);
|
||||
if (corrupt) {
|
||||
out.write("bad");
|
||||
|
@ -122,7 +120,7 @@ public class PluginManagerIT extends ESIntegTestCase {
|
|||
|
||||
private void writeMd5(Path file, boolean corrupt) throws IOException {
|
||||
String md5Hex = Hashing.md5().hashBytes(Files.readAllBytes(file)).toString();
|
||||
try (BufferedWriter out = Files.newBufferedWriter(file.resolveSibling(file.getFileName() + ".md5"), Charsets.UTF_8)) {
|
||||
try (BufferedWriter out = Files.newBufferedWriter(file.resolveSibling(file.getFileName() + ".md5"), StandardCharsets.UTF_8)) {
|
||||
out.write(md5Hex);
|
||||
if (corrupt) {
|
||||
out.write("bad");
|
||||
|
@ -618,7 +616,7 @@ public class PluginManagerIT extends ESIntegTestCase {
|
|||
assertThat(requests, hasSize(1));
|
||||
String msg = String.format(Locale.ROOT, "Request header did not contain Authorization header, terminal output was: %s", terminal.getTerminalOutput());
|
||||
assertThat(msg, requests.get(0).headers().contains("Authorization"), is(true));
|
||||
assertThat(msg, requests.get(0).headers().get("Authorization"), is("Basic " + Base64.encodeBytes("user:pass".getBytes(Charsets.UTF_8))));
|
||||
assertThat(msg, requests.get(0).headers().get("Authorization"), is("Basic " + Base64.encodeBytes("user:pass".getBytes(StandardCharsets.UTF_8))));
|
||||
} finally {
|
||||
HttpsURLConnection.setDefaultSSLSocketFactory(defaultSocketFactory);
|
||||
serverBootstrap.releaseExternalResources();
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.search.basic;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.action.WriteConsistencyLevel;
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
package org.elasticsearch.search.basic;
|
||||
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.action.search.MultiSearchResponse;
|
||||
import org.elasticsearch.action.search.SearchPhaseExecutionException;
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.search.suggest;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import com.google.common.io.Resources;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
|
||||
|
@ -506,7 +506,7 @@ public class SuggestSearchIT extends ESIntegTestCase {
|
|||
assertAcked(builder.addMapping("type1", mapping));
|
||||
ensureGreen();
|
||||
|
||||
for (String line: Resources.readLines(SuggestSearchIT.class.getResource("/config/names.txt"), Charsets.UTF_8)) {
|
||||
for (String line: Resources.readLines(SuggestSearchIT.class.getResource("/config/names.txt"), StandardCharsets.UTF_8)) {
|
||||
index("test", "type1", line, "body", line, "body_reverse", line, "bigram", line);
|
||||
}
|
||||
refresh();
|
||||
|
@ -698,7 +698,7 @@ public class SuggestSearchIT extends ESIntegTestCase {
|
|||
assertAcked(builder.addMapping("type1", mapping));
|
||||
ensureGreen();
|
||||
|
||||
for (String line: Resources.readLines(SuggestSearchIT.class.getResource("/config/names.txt"), Charsets.UTF_8)) {
|
||||
for (String line: Resources.readLines(SuggestSearchIT.class.getResource("/config/names.txt"), StandardCharsets.UTF_8)) {
|
||||
index("test", "type1", line, "body", line, "bigram", line, "ngram", line);
|
||||
}
|
||||
refresh();
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
*/
|
||||
package org.elasticsearch.search.suggest.phrase;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.TokenFilter;
|
||||
import org.apache.lucene.analysis.Tokenizer;
|
||||
|
@ -86,7 +86,7 @@ public class NoisyChannelSpellCheckerTests extends ESTestCase {
|
|||
|
||||
IndexWriterConfig conf = new IndexWriterConfig(wrapper);
|
||||
IndexWriter writer = new IndexWriter(dir, conf);
|
||||
BufferedReader reader = new BufferedReader(new InputStreamReader(NoisyChannelSpellCheckerTests.class.getResourceAsStream("/config/names.txt"), Charsets.UTF_8));
|
||||
BufferedReader reader = new BufferedReader(new InputStreamReader(NoisyChannelSpellCheckerTests.class.getResourceAsStream("/config/names.txt"), StandardCharsets.UTF_8));
|
||||
String line = null;
|
||||
while ((line = reader.readLine()) != null) {
|
||||
Document doc = new Document();
|
||||
|
@ -228,7 +228,7 @@ public class NoisyChannelSpellCheckerTests extends ESTestCase {
|
|||
|
||||
IndexWriterConfig conf = new IndexWriterConfig(wrapper);
|
||||
IndexWriter writer = new IndexWriter(dir, conf);
|
||||
BufferedReader reader = new BufferedReader(new InputStreamReader(NoisyChannelSpellCheckerTests.class.getResourceAsStream("/config/names.txt"), Charsets.UTF_8));
|
||||
BufferedReader reader = new BufferedReader(new InputStreamReader(NoisyChannelSpellCheckerTests.class.getResourceAsStream("/config/names.txt"), StandardCharsets.UTF_8));
|
||||
String line = null;
|
||||
while ((line = reader.readLine()) != null) {
|
||||
Document doc = new Document();
|
||||
|
@ -315,7 +315,7 @@ public class NoisyChannelSpellCheckerTests extends ESTestCase {
|
|||
|
||||
IndexWriterConfig conf = new IndexWriterConfig(wrapper);
|
||||
IndexWriter writer = new IndexWriter(dir, conf);
|
||||
BufferedReader reader = new BufferedReader(new InputStreamReader(NoisyChannelSpellCheckerTests.class.getResourceAsStream("/config/names.txt"), Charsets.UTF_8));
|
||||
BufferedReader reader = new BufferedReader(new InputStreamReader(NoisyChannelSpellCheckerTests.class.getResourceAsStream("/config/names.txt"), StandardCharsets.UTF_8));
|
||||
String line = null;
|
||||
while ((line = reader.readLine()) != null) {
|
||||
Document doc = new Document();
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.test;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import org.elasticsearch.common.io.Streams;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
|
||||
|
@ -35,7 +35,7 @@ public class StreamsUtils {
|
|||
if (is == null) {
|
||||
throw new FileNotFoundException("Resource [" + path + "] not found in classpath with class loader [" + classLoader + "]");
|
||||
}
|
||||
return Streams.copyToString(new InputStreamReader(is, Charsets.UTF_8));
|
||||
return Streams.copyToString(new InputStreamReader(is, StandardCharsets.UTF_8));
|
||||
}
|
||||
|
||||
public static String copyToStringFromClasspath(String path) throws IOException {
|
||||
|
@ -43,7 +43,7 @@ public class StreamsUtils {
|
|||
if (is == null) {
|
||||
throw new FileNotFoundException("Resource [" + path + "] not found in classpath");
|
||||
}
|
||||
return Streams.copyToString(new InputStreamReader(is, Charsets.UTF_8));
|
||||
return Streams.copyToString(new InputStreamReader(is, StandardCharsets.UTF_8));
|
||||
}
|
||||
|
||||
public static byte[] copyToBytesFromClasspath(String path) throws IOException {
|
||||
|
|
|
@ -21,7 +21,7 @@ package org.elasticsearch.test.store;
|
|||
|
||||
import com.carrotsearch.randomizedtesting.SeedUtils;
|
||||
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
|
||||
import com.google.common.base.Charsets;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import org.apache.lucene.index.CheckIndex;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.store.*;
|
||||
|
@ -149,7 +149,7 @@ public class MockFSDirectoryService extends FsDirectoryService {
|
|||
}
|
||||
try (CheckIndex checkIndex = new CheckIndex(dir)) {
|
||||
BytesStreamOutput os = new BytesStreamOutput();
|
||||
PrintStream out = new PrintStream(os, false, Charsets.UTF_8.name());
|
||||
PrintStream out = new PrintStream(os, false, StandardCharsets.UTF_8.name());
|
||||
checkIndex.setInfoStream(out);
|
||||
out.flush();
|
||||
CheckIndex.Status status = checkIndex.checkIndex();
|
||||
|
@ -157,11 +157,11 @@ public class MockFSDirectoryService extends FsDirectoryService {
|
|||
ESTestCase.checkIndexFailed = true;
|
||||
logger.warn("check index [failure] index files={}\n{}",
|
||||
Arrays.toString(dir.listAll()),
|
||||
new String(os.bytes().toBytes(), Charsets.UTF_8));
|
||||
new String(os.bytes().toBytes(), StandardCharsets.UTF_8));
|
||||
throw new IOException("index check failure");
|
||||
} else {
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("check index [success]\n{}", new String(os.bytes().toBytes(), Charsets.UTF_8));
|
||||
logger.debug("check index [success]\n{}", new String(os.bytes().toBytes(), StandardCharsets.UTF_8));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.transport;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
|
@ -88,10 +88,10 @@ public class NettySizeHeaderFrameDecoderTests extends ESTestCase {
|
|||
String data = randomMethod + " / HTTP/1.1";
|
||||
|
||||
try (Socket socket = new Socket(host, port)) {
|
||||
socket.getOutputStream().write(data.getBytes(Charsets.UTF_8));
|
||||
socket.getOutputStream().write(data.getBytes(StandardCharsets.UTF_8));
|
||||
socket.getOutputStream().flush();
|
||||
|
||||
try (BufferedReader reader = new BufferedReader(new InputStreamReader(socket.getInputStream(), Charsets.UTF_8))) {
|
||||
try (BufferedReader reader = new BufferedReader(new InputStreamReader(socket.getInputStream(), StandardCharsets.UTF_8))) {
|
||||
assertThat(reader.readLine(), is("This is not a HTTP port"));
|
||||
}
|
||||
}
|
||||
|
@ -100,7 +100,7 @@ public class NettySizeHeaderFrameDecoderTests extends ESTestCase {
|
|||
@Test
|
||||
public void testThatNothingIsReturnedForOtherInvalidPackets() throws Exception {
|
||||
try (Socket socket = new Socket(host, port)) {
|
||||
socket.getOutputStream().write("FOOBAR".getBytes(Charsets.UTF_8));
|
||||
socket.getOutputStream().write("FOOBAR".getBytes(StandardCharsets.UTF_8));
|
||||
socket.getOutputStream().flush();
|
||||
|
||||
// end of stream
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
package org.elasticsearch.transport.netty;
|
||||
|
||||
import com.carrotsearch.hppc.IntHashSet;
|
||||
import com.google.common.base.Charsets;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cache.recycler.PageCacheRecycler;
|
||||
import org.elasticsearch.common.component.Lifecycle;
|
||||
|
@ -238,7 +238,7 @@ public class NettyTransportMultiPortTests extends ESTestCase {
|
|||
|
||||
assertThat(socket.isConnected(), is(true));
|
||||
try (OutputStream os = socket.getOutputStream()) {
|
||||
os.write("foo".getBytes(Charsets.UTF_8));
|
||||
os.write("foo".getBytes(StandardCharsets.UTF_8));
|
||||
os.flush();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
*/
|
||||
package org.elasticsearch.validate;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
import org.elasticsearch.action.admin.indices.alias.Alias;
|
||||
import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryResponse;
|
||||
|
@ -68,7 +68,7 @@ public class SimpleValidateQueryIT extends ESIntegTestCase {
|
|||
|
||||
refresh();
|
||||
|
||||
assertThat(client().admin().indices().prepareValidateQuery("test").setSource("foo".getBytes(Charsets.UTF_8)).execute().actionGet().isValid(), equalTo(false));
|
||||
assertThat(client().admin().indices().prepareValidateQuery("test").setSource("foo".getBytes(StandardCharsets.UTF_8)).execute().actionGet().isValid(), equalTo(false));
|
||||
assertThat(client().admin().indices().prepareValidateQuery("test").setQuery(QueryBuilders.queryStringQuery("_id:1")).execute().actionGet().isValid(), equalTo(true));
|
||||
assertThat(client().admin().indices().prepareValidateQuery("test").setQuery(QueryBuilders.queryStringQuery("_i:d:1")).execute().actionGet().isValid(), equalTo(false));
|
||||
|
||||
|
@ -97,7 +97,7 @@ public class SimpleValidateQueryIT extends ESIntegTestCase {
|
|||
|
||||
for (Client client : internalCluster()) {
|
||||
ValidateQueryResponse response = client.admin().indices().prepareValidateQuery("test")
|
||||
.setSource("foo".getBytes(Charsets.UTF_8))
|
||||
.setSource("foo".getBytes(StandardCharsets.UTF_8))
|
||||
.setExplain(true)
|
||||
.execute().actionGet();
|
||||
assertThat(response.isValid(), equalTo(false));
|
||||
|
|
|
@ -107,7 +107,10 @@ com.google.common.util.concurrent.SettableFuture
|
|||
com.google.common.util.concurrent.Futures
|
||||
com.google.common.util.concurrent.MoreExecutors
|
||||
com.google.common.collect.ImmutableSortedMap
|
||||
com.google.common.base.Charsets
|
||||
com.google.common.base.Function
|
||||
com.google.common.collect.Collections2
|
||||
|
||||
@defaultMessage Do not violate java's access system
|
||||
java.lang.reflect.AccessibleObject#setAccessible(boolean)
|
||||
java.lang.reflect.AccessibleObject#setAccessible(java.lang.reflect.AccessibleObject[], boolean)
|
||||
java.lang.reflect.AccessibleObject#setAccessible(java.lang.reflect.AccessibleObject[], boolean)
|
|
@ -27,23 +27,15 @@ import com.google.api.client.json.jackson2.JacksonFactory;
|
|||
import com.google.api.services.compute.Compute;
|
||||
import com.google.api.services.compute.model.Instance;
|
||||
import com.google.api.services.compute.model.InstanceList;
|
||||
import com.google.common.base.Function;
|
||||
import com.google.common.collect.Iterables;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.component.AbstractLifecycleComponent;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.util.CollectionUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.security.GeneralSecurityException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import static org.elasticsearch.common.util.CollectionUtils.eagerTransform;
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -61,37 +53,30 @@ public class GceComputeServiceImpl extends AbstractLifecycleComponent<GceCompute
|
|||
|
||||
@Override
|
||||
public Collection<Instance> instances() {
|
||||
|
||||
logger.debug("get instances for project [{}], zones [{}]", project, zones);
|
||||
|
||||
List<List<Instance>> instanceListByZone = eagerTransform(zones, new Function<String, List<Instance>>() {
|
||||
@Override
|
||||
public List<Instance> apply(String zoneId) {
|
||||
try {
|
||||
Compute.Instances.List list = client().instances().list(project, zoneId);
|
||||
InstanceList instanceList = list.execute();
|
||||
if (instanceList.isEmpty()) {
|
||||
return Collections.EMPTY_LIST;
|
||||
}
|
||||
|
||||
return instanceList.getItems();
|
||||
} catch (IOException e) {
|
||||
logger.warn("Problem fetching instance list for zone {}", zoneId);
|
||||
logger.debug("Full exception:", e);
|
||||
|
||||
return Collections.EMPTY_LIST;
|
||||
}
|
||||
logger.debug("get instances for project [{}], zones [{}]", project, zones);
|
||||
final List<Instance> instances = zones.stream().map((zoneId) -> {
|
||||
try {
|
||||
Compute.Instances.List list = client().instances().list(project, zoneId);
|
||||
InstanceList instanceList = list.execute();
|
||||
if (instanceList.isEmpty()) {
|
||||
return Collections.EMPTY_LIST;
|
||||
}
|
||||
});
|
||||
|
||||
// Collapse instances from all zones into one neat list
|
||||
List<Instance> instanceList = CollectionUtils.iterableAsArrayList(Iterables.concat(instanceListByZone));
|
||||
|
||||
if (instanceList.size() == 0) {
|
||||
logger.warn("disabling GCE discovery. Can not get list of nodes");
|
||||
return instanceList.getItems();
|
||||
} catch (IOException e) {
|
||||
logger.warn("Problem fetching instance list for zone {}", zoneId);
|
||||
logger.debug("Full exception:", e);
|
||||
return Collections.EMPTY_LIST;
|
||||
}
|
||||
}).reduce(new ArrayList<>(), (a, b) -> {
|
||||
a.addAll(b);
|
||||
return a;
|
||||
});
|
||||
|
||||
return instanceList;
|
||||
if (instances.isEmpty()) {
|
||||
logger.warn("disabling GCE discovery. Can not get list of nodes");
|
||||
}
|
||||
|
||||
return instances;
|
||||
}
|
||||
|
||||
private Compute client;
|
||||
|
|
Loading…
Reference in New Issue