mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-02-16 18:04:52 +00:00
Remove and forbid use of com.google.common.collect.Iterables
This commit removes and now forbids all uses of com.google.common.collect.Iterables across the codebase. This is one of many steps in the eventual removal of Guava as a dependency. Relates #13224
This commit is contained in:
parent
6e3a4e21a1
commit
527ab95c39
@ -21,7 +21,6 @@ package org.elasticsearch.cluster.routing;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectIntHashMap;
|
||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
import com.google.common.collect.Iterables;
|
||||
import com.google.common.collect.Iterators;
|
||||
import org.apache.lucene.util.CollectionUtil;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
@ -31,15 +30,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.*;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
/**
|
||||
@ -308,7 +299,7 @@ public class RoutingNodes implements Iterable<RoutingNode> {
|
||||
}
|
||||
for (ShardRoutingState s : state) {
|
||||
if (s == ShardRoutingState.UNASSIGNED) {
|
||||
Iterables.addAll(shards, unassigned());
|
||||
unassigned().forEach(shards::add);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
@ -21,7 +21,6 @@ package org.elasticsearch.cluster.routing;
|
||||
|
||||
import com.carrotsearch.hppc.IntSet;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.common.collect.Iterables;
|
||||
import com.google.common.collect.UnmodifiableIterator;
|
||||
import org.elasticsearch.cluster.Diff;
|
||||
import org.elasticsearch.cluster.Diffable;
|
||||
@ -30,14 +29,11 @@ import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.util.iterable.Iterables;
|
||||
import org.elasticsearch.index.IndexNotFoundException;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.*;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
/**
|
||||
@ -379,7 +375,10 @@ public class RoutingTable implements Iterable<IndexRoutingTable>, Diffable<Routi
|
||||
indexBuilder.addShard(refData, shardRoutingEntry);
|
||||
}
|
||||
}
|
||||
for (ShardRouting shardRoutingEntry : Iterables.concat(routingNodes.unassigned(), routingNodes.unassigned().ignored())) {
|
||||
|
||||
Iterable<ShardRouting> shardRoutingEntries = Iterables.concat(routingNodes.unassigned(), routingNodes.unassigned().ignored());
|
||||
|
||||
for (ShardRouting shardRoutingEntry : shardRoutingEntries) {
|
||||
String index = shardRoutingEntry.index();
|
||||
IndexRoutingTable.Builder indexBuilder = indexRoutingTableBuilders.get(index);
|
||||
if (indexBuilder == null) {
|
||||
|
@ -19,7 +19,6 @@
|
||||
|
||||
package org.elasticsearch.cluster.service;
|
||||
|
||||
import com.google.common.collect.Iterables;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.*;
|
||||
import org.elasticsearch.cluster.ClusterState.Builder;
|
||||
@ -42,6 +41,7 @@ import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.text.StringText;
|
||||
import org.elasticsearch.common.transport.TransportAddress;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.util.iterable.Iterables;
|
||||
import org.elasticsearch.common.util.concurrent.*;
|
||||
import org.elasticsearch.discovery.Discovery;
|
||||
import org.elasticsearch.discovery.DiscoveryService;
|
||||
@ -89,10 +89,7 @@ public class InternalClusterService extends AbstractLifecycleComponent<ClusterSe
|
||||
private final Collection<ClusterStateListener> lastClusterStateListeners = new CopyOnWriteArrayList<>();
|
||||
// TODO this is rather frequently changing I guess a Synced Set would be better here and a dedicated remove API
|
||||
private final Collection<ClusterStateListener> postAppliedListeners = new CopyOnWriteArrayList<>();
|
||||
private final Iterable<ClusterStateListener> preAppliedListeners = Iterables.concat(
|
||||
priorityClusterStateListeners,
|
||||
clusterStateListeners,
|
||||
lastClusterStateListeners);
|
||||
private final Iterable<ClusterStateListener> preAppliedListeners = Iterables.concat(priorityClusterStateListeners, clusterStateListeners, lastClusterStateListeners);
|
||||
|
||||
private final LocalNodeMasterListeners localNodeMasterListeners;
|
||||
|
||||
|
@ -20,8 +20,6 @@
|
||||
package org.elasticsearch.common;
|
||||
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
import com.google.common.collect.Iterables;
|
||||
|
||||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.FastStringReader;
|
||||
@ -32,18 +30,7 @@ import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Properties;
|
||||
import java.util.Random;
|
||||
import java.util.Set;
|
||||
import java.util.StringTokenizer;
|
||||
import java.util.TreeSet;
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
*
|
||||
@ -840,9 +827,6 @@ public class Strings {
|
||||
}
|
||||
|
||||
public static String collectionToDelimitedString(Iterable<?> coll, String delim, String prefix, String suffix, StringBuilder sb) {
|
||||
if (Iterables.isEmpty(coll)) {
|
||||
return "";
|
||||
}
|
||||
Iterator<?> it = coll.iterator();
|
||||
while (it.hasNext()) {
|
||||
sb.append(prefix).append(it.next()).append(suffix);
|
||||
|
@ -17,9 +17,9 @@
|
||||
package org.elasticsearch.common.inject;
|
||||
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
import com.google.common.collect.Iterables;
|
||||
import org.elasticsearch.common.inject.internal.*;
|
||||
import org.elasticsearch.common.inject.spi.Dependency;
|
||||
import org.elasticsearch.common.util.iterable.Iterables;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
|
@ -17,7 +17,6 @@
|
||||
package org.elasticsearch.common.inject.assistedinject;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.common.collect.Iterables;
|
||||
import org.elasticsearch.common.inject.*;
|
||||
import org.elasticsearch.common.inject.internal.Errors;
|
||||
import org.elasticsearch.common.inject.internal.ErrorsException;
|
||||
@ -227,7 +226,7 @@ public final class FactoryProvider2<F> implements InvocationHandler, Provider<F>
|
||||
} catch (ProvisionException e) {
|
||||
// if this is an exception declared by the factory method, throw it as-is
|
||||
if (e.getErrorMessages().size() == 1) {
|
||||
Message onlyError = Iterables.getOnlyElement(e.getErrorMessages());
|
||||
Message onlyError = e.getErrorMessages().iterator().next();
|
||||
Throwable cause = onlyError.getCause();
|
||||
if (cause != null && canRethrow(method, cause)) {
|
||||
throw cause;
|
||||
|
@ -17,7 +17,7 @@
|
||||
package org.elasticsearch.common.inject.internal;
|
||||
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
import com.google.common.collect.Iterables;
|
||||
import org.elasticsearch.common.util.iterable.Iterables;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
@ -19,7 +19,6 @@
|
||||
|
||||
package org.elasticsearch.common.lucene;
|
||||
|
||||
import com.google.common.collect.Iterables;
|
||||
import org.apache.lucene.analysis.core.KeywordAnalyzer;
|
||||
import org.apache.lucene.analysis.standard.StandardAnalyzer;
|
||||
import org.apache.lucene.codecs.CodecUtil;
|
||||
@ -27,7 +26,10 @@ import org.apache.lucene.codecs.DocValuesFormat;
|
||||
import org.apache.lucene.codecs.PostingsFormat;
|
||||
import org.apache.lucene.index.*;
|
||||
import org.apache.lucene.search.*;
|
||||
import org.apache.lucene.store.*;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.IOContext;
|
||||
import org.apache.lucene.store.IndexInput;
|
||||
import org.apache.lucene.store.Lock;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.Counter;
|
||||
@ -40,6 +42,7 @@ import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.util.iterable.Iterables;
|
||||
import org.elasticsearch.index.analysis.AnalyzerScope;
|
||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
@ -107,7 +110,7 @@ public class Lucene {
|
||||
for (SegmentCommitInfo info : infos) {
|
||||
list.add(info.files());
|
||||
}
|
||||
return Iterables.concat(list);
|
||||
return Iterables.flatten(list);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -452,4 +452,5 @@ public enum CollectionUtils {
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -0,0 +1,142 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.util.iterable;
|
||||
|
||||
import org.elasticsearch.common.lucene.store.IndexOutputOutputStream;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.stream.Stream;
|
||||
import java.util.stream.StreamSupport;
|
||||
|
||||
public class Iterables {
|
||||
public Iterables() {
|
||||
}
|
||||
|
||||
public static <T> Iterable<T> concat(Iterable<T>... inputs) {
|
||||
Objects.requireNonNull(inputs);
|
||||
return new ConcatenatedIterable(inputs);
|
||||
}
|
||||
|
||||
static class ConcatenatedIterable<T> implements Iterable<T> {
|
||||
private final Iterable<T>[] inputs;
|
||||
|
||||
ConcatenatedIterable(Iterable<T>[] inputs) {
|
||||
this.inputs = Arrays.copyOf(inputs, inputs.length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<T> iterator() {
|
||||
return Stream
|
||||
.of(inputs)
|
||||
.map(it -> StreamSupport.stream(it.spliterator(), false))
|
||||
.reduce(Stream::concat)
|
||||
.orElseGet(Stream::empty).iterator();
|
||||
}
|
||||
}
|
||||
|
||||
public static <T> Iterable<T> flatten(Iterable<? extends Iterable<T>> inputs) {
|
||||
Objects.requireNonNull(inputs);
|
||||
return new FlattenedIterables<>(inputs);
|
||||
}
|
||||
|
||||
static class FlattenedIterables<T> implements Iterable<T> {
|
||||
private final Iterable<? extends Iterable<T>> inputs;
|
||||
|
||||
FlattenedIterables(Iterable<? extends Iterable<T>> inputs) {
|
||||
List<Iterable<T>> list = new ArrayList<>();
|
||||
for (Iterable<T> iterable : inputs) {
|
||||
list.add(iterable);
|
||||
}
|
||||
this.inputs = list;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<T> iterator() {
|
||||
return StreamSupport
|
||||
.stream(inputs.spliterator(), false)
|
||||
.flatMap(s -> StreamSupport.stream(s.spliterator(), false)).iterator();
|
||||
}
|
||||
}
|
||||
|
||||
public static boolean allElementsAreEqual(Iterable<?> left, Iterable<?> right) {
|
||||
Objects.requireNonNull(left);
|
||||
Objects.requireNonNull(right);
|
||||
if (left instanceof Collection && right instanceof Collection) {
|
||||
Collection collection1 = (Collection) left;
|
||||
Collection collection2 = (Collection) right;
|
||||
if (collection1.size() != collection2.size()) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
Iterator<?> leftIt = left.iterator();
|
||||
Iterator<?> rightIt = right.iterator();
|
||||
|
||||
while (true) {
|
||||
if (leftIt.hasNext()) {
|
||||
if (!rightIt.hasNext()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Object o1 = leftIt.next();
|
||||
Object o2 = rightIt.next();
|
||||
if (Objects.equals(o1, o2)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
return !rightIt.hasNext();
|
||||
}
|
||||
}
|
||||
|
||||
public static <T> T getFirst(Iterable<T> collection, T defaultValue) {
|
||||
Objects.requireNonNull(collection);
|
||||
Iterator<T> iterator = collection.iterator();
|
||||
return iterator.hasNext() ? iterator.next() : defaultValue;
|
||||
}
|
||||
|
||||
public static <T> T get(Iterable<T> iterable, int position) {
|
||||
Objects.requireNonNull(iterable);
|
||||
if (position < 0) {
|
||||
throw new IllegalArgumentException("position >= 0");
|
||||
}
|
||||
if (iterable instanceof List) {
|
||||
List<T> list = (List<T>)iterable;
|
||||
if (position >= list.size()) {
|
||||
throw new IndexOutOfBoundsException(Integer.toString(position));
|
||||
}
|
||||
return list.get(position);
|
||||
} else {
|
||||
Iterator<T> it = iterable.iterator();
|
||||
for (int index = 0; index < position; index++) {
|
||||
if (!it.hasNext()) {
|
||||
throw new IndexOutOfBoundsException(Integer.toString(position));
|
||||
}
|
||||
it.next();
|
||||
}
|
||||
if (!it.hasNext()) {
|
||||
throw new IndexOutOfBoundsException(Integer.toString(position));
|
||||
}
|
||||
return it.next();
|
||||
}
|
||||
}
|
||||
}
|
@ -19,18 +19,12 @@
|
||||
|
||||
package org.elasticsearch.index.mapper.internal;
|
||||
|
||||
import com.google.common.collect.Iterables;
|
||||
import org.apache.lucene.document.BinaryDocValuesField;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.queries.TermsQuery;
|
||||
import org.apache.lucene.search.BooleanClause;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.MultiTermQuery;
|
||||
import org.apache.lucene.search.PrefixQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.RegexpQuery;
|
||||
import org.apache.lucene.search.*;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
@ -38,17 +32,11 @@ import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.lucene.BytesRefs;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.iterable.Iterables;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MergeMappingException;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.MetadataFieldMapper;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.Uid;
|
||||
import org.elasticsearch.index.mapper.*;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -19,7 +19,6 @@
|
||||
|
||||
package org.elasticsearch.index.mapper.object;
|
||||
|
||||
import com.google.common.collect.Iterables;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.apache.lucene.search.QueryWrapperFilter;
|
||||
@ -33,30 +32,13 @@ import org.elasticsearch.common.collect.CopyOnWriteHashMap;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.mapper.ContentPath;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.DocumentMapperParser;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MapperUtils;
|
||||
import org.elasticsearch.index.mapper.MergeMappingException;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.MetadataFieldMapper;
|
||||
import org.elasticsearch.index.mapper.*;
|
||||
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
|
||||
import org.elasticsearch.index.settings.IndexSettings;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Comparator;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.*;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue;
|
||||
import static org.elasticsearch.index.mapper.MapperBuilders.object;
|
||||
@ -583,7 +565,7 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll,
|
||||
doXContent(builder, params);
|
||||
|
||||
// sort the mappers so we get consistent serialization format
|
||||
Mapper[] sortedMappers = Iterables.toArray(mappers.values(), Mapper.class);
|
||||
Mapper[] sortedMappers = mappers.values().stream().toArray(size -> new Mapper[size]);
|
||||
Arrays.sort(sortedMappers, new Comparator<Mapper>() {
|
||||
@Override
|
||||
public int compare(Mapper o1, Mapper o2) {
|
||||
|
@ -19,13 +19,12 @@
|
||||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import com.google.common.collect.Iterables;
|
||||
|
||||
import org.apache.lucene.queries.TermsQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.common.util.iterable.Iterables;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.Uid;
|
||||
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
|
||||
|
@ -19,7 +19,6 @@
|
||||
|
||||
package org.elasticsearch.index.snapshots.blobstore;
|
||||
|
||||
import com.google.common.collect.Iterables;
|
||||
import org.apache.lucene.index.CorruptIndexException;
|
||||
import org.apache.lucene.index.IndexFormatTooNewException;
|
||||
import org.apache.lucene.index.IndexFormatTooOldException;
|
||||
@ -48,14 +47,11 @@ import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.common.lucene.store.InputStreamIndexInput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.common.util.iterable.Iterables;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.deletionpolicy.SnapshotIndexCommit;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.index.snapshots.IndexShardRepository;
|
||||
import org.elasticsearch.index.snapshots.IndexShardRestoreFailedException;
|
||||
import org.elasticsearch.index.snapshots.IndexShardSnapshotException;
|
||||
import org.elasticsearch.index.snapshots.IndexShardSnapshotFailedException;
|
||||
import org.elasticsearch.index.snapshots.IndexShardSnapshotStatus;
|
||||
import org.elasticsearch.index.snapshots.*;
|
||||
import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardSnapshot.FileInfo;
|
||||
import org.elasticsearch.index.store.Store;
|
||||
import org.elasticsearch.index.store.StoreFileMetaData;
|
||||
@ -71,11 +67,7 @@ import org.elasticsearch.repositories.blobstore.LegacyBlobStoreFormat;
|
||||
import java.io.FilterInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.*;
|
||||
|
||||
import static org.elasticsearch.repositories.blobstore.BlobStoreRepository.testBlobPrefix;
|
||||
|
||||
|
@ -20,7 +20,6 @@
|
||||
package org.elasticsearch.index.store;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.common.collect.Iterables;
|
||||
import org.apache.lucene.codecs.CodecUtil;
|
||||
import org.apache.lucene.index.*;
|
||||
import org.apache.lucene.store.*;
|
||||
@ -42,6 +41,7 @@ import org.elasticsearch.common.lucene.store.InputStreamIndexInput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.util.Callback;
|
||||
import org.elasticsearch.common.util.iterable.Iterables;
|
||||
import org.elasticsearch.common.util.SingleObjectCache;
|
||||
import org.elasticsearch.common.util.concurrent.AbstractRefCounted;
|
||||
import org.elasticsearch.common.util.concurrent.RefCounted;
|
||||
|
@ -19,8 +19,8 @@
|
||||
package org.elasticsearch.indices.flush;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.common.collect.Iterables;
|
||||
import org.elasticsearch.cluster.routing.ShardRouting;
|
||||
import org.elasticsearch.common.util.iterable.Iterables;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilderString;
|
||||
@ -41,7 +41,7 @@ public class IndicesSyncedFlushResult implements ToXContent {
|
||||
|
||||
public IndicesSyncedFlushResult(Map<String, List<ShardsSyncedFlushResult>> shardsResultPerIndex) {
|
||||
this.shardsResultPerIndex = ImmutableMap.copyOf(shardsResultPerIndex);
|
||||
this.shardCounts = calculateShardCounts(Iterables.concat(shardsResultPerIndex.values()));
|
||||
this.shardCounts = calculateShardCounts(Iterables.flatten(shardsResultPerIndex.values()));
|
||||
}
|
||||
|
||||
/** total number shards, including replicas, both assigned and unassigned */
|
||||
|
@ -19,7 +19,6 @@
|
||||
|
||||
package org.elasticsearch.indices.recovery;
|
||||
|
||||
import com.google.common.collect.Iterables;
|
||||
import org.apache.lucene.index.CorruptIndexException;
|
||||
import org.apache.lucene.index.IndexFormatTooNewException;
|
||||
import org.apache.lucene.index.IndexFormatTooOldException;
|
||||
@ -39,6 +38,7 @@ import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.common.util.CancellableThreads;
|
||||
import org.elasticsearch.common.util.CancellableThreads.Interruptable;
|
||||
import org.elasticsearch.common.util.iterable.Iterables;
|
||||
import org.elasticsearch.common.util.concurrent.AbstractRunnable;
|
||||
import org.elasticsearch.index.deletionpolicy.SnapshotIndexCommit;
|
||||
import org.elasticsearch.index.engine.Engine;
|
||||
@ -64,6 +64,7 @@ import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.ThreadPoolExecutor;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
import java.util.stream.StreamSupport;
|
||||
|
||||
/**
|
||||
* RecoverySourceHandler handles the three phases of shard recovery, which is
|
||||
@ -411,7 +412,8 @@ public class RecoverySourceHandler {
|
||||
if ((corruptIndexException = ExceptionsHelper.unwrapCorruption(remoteException)) != null) {
|
||||
try {
|
||||
final Store.MetadataSnapshot recoverySourceMetadata = store.getMetadata(snapshot);
|
||||
StoreFileMetaData[] metadata = Iterables.toArray(recoverySourceMetadata, StoreFileMetaData.class);
|
||||
StoreFileMetaData[] metadata =
|
||||
StreamSupport.stream(recoverySourceMetadata.spliterator(), false).toArray(size -> new StoreFileMetaData[size]);
|
||||
ArrayUtil.timSort(metadata, new Comparator<StoreFileMetaData>() {
|
||||
@Override
|
||||
public int compare(StoreFileMetaData o1, StoreFileMetaData o2) {
|
||||
|
@ -20,14 +20,13 @@
|
||||
package org.elasticsearch.search.aggregations;
|
||||
|
||||
|
||||
import com.google.common.collect.Iterables;
|
||||
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.search.Collector;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.stream.StreamSupport;
|
||||
|
||||
/**
|
||||
* A Collector that can collect data in separate buckets.
|
||||
@ -58,7 +57,8 @@ public abstract class BucketCollector implements Collector {
|
||||
* Wrap the given collectors into a single instance.
|
||||
*/
|
||||
public static BucketCollector wrap(Iterable<? extends BucketCollector> collectorList) {
|
||||
final BucketCollector[] collectors = Iterables.toArray(collectorList, BucketCollector.class);
|
||||
final BucketCollector[] collectors =
|
||||
StreamSupport.stream(collectorList.spliterator(), false).toArray(size -> new BucketCollector[size]);
|
||||
switch (collectors.length) {
|
||||
case 0:
|
||||
return NO_OP_COLLECTOR;
|
||||
|
@ -19,12 +19,11 @@
|
||||
|
||||
package org.elasticsearch.search.aggregations;
|
||||
|
||||
import com.google.common.collect.Iterables;
|
||||
|
||||
import org.apache.lucene.search.LeafCollector;
|
||||
import org.apache.lucene.search.Scorer;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.stream.Stream;
|
||||
import java.util.stream.StreamSupport;
|
||||
|
||||
/**
|
||||
@ -44,9 +43,9 @@ public abstract class LeafBucketCollector implements LeafCollector {
|
||||
};
|
||||
|
||||
public static LeafBucketCollector wrap(Iterable<LeafBucketCollector> collectors) {
|
||||
final Iterable<LeafBucketCollector> actualCollectors =
|
||||
StreamSupport.stream(collectors.spliterator(), false).filter(c -> c != NO_OP_COLLECTOR)::iterator;
|
||||
final LeafBucketCollector[] colls = Iterables.toArray(actualCollectors, LeafBucketCollector.class);
|
||||
final Stream<LeafBucketCollector> actualCollectors =
|
||||
StreamSupport.stream(collectors.spliterator(), false).filter(c -> c != NO_OP_COLLECTOR);
|
||||
final LeafBucketCollector[] colls = actualCollectors.toArray(size -> new LeafBucketCollector[size]);
|
||||
switch (colls.length) {
|
||||
case 0:
|
||||
return NO_OP_COLLECTOR;
|
||||
|
@ -18,7 +18,6 @@
|
||||
*/
|
||||
package org.elasticsearch.search.internal;
|
||||
|
||||
import com.google.common.collect.Iterables;
|
||||
import com.google.common.collect.Multimap;
|
||||
import com.google.common.collect.MultimapBuilder;
|
||||
|
||||
@ -35,6 +34,7 @@ import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.lease.Releasable;
|
||||
import org.elasticsearch.common.lease.Releasables;
|
||||
import org.elasticsearch.common.util.BigArrays;
|
||||
import org.elasticsearch.common.util.iterable.Iterables;
|
||||
import org.elasticsearch.index.analysis.AnalysisService;
|
||||
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
||||
@ -330,7 +330,7 @@ public abstract class SearchContext extends DelegatingHasContextAndHeaders imple
|
||||
}
|
||||
releasables.add(clearables.removeAll(lc));
|
||||
}
|
||||
Releasables.close(Iterables.concat(releasables));
|
||||
Releasables.close(Iterables.flatten(releasables));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -20,7 +20,6 @@
|
||||
package org.elasticsearch.search.suggest.context;
|
||||
|
||||
import com.google.common.base.Joiner;
|
||||
import com.google.common.collect.Iterables;
|
||||
import org.apache.lucene.analysis.PrefixAnalyzer;
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.index.IndexableField;
|
||||
@ -28,6 +27,7 @@ import org.apache.lucene.util.automaton.Automata;
|
||||
import org.apache.lucene.util.automaton.Automaton;
|
||||
import org.apache.lucene.util.automaton.Operations;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.util.iterable.Iterables;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
@ -35,11 +35,7 @@ import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.ParseContext.Document;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
* The {@link CategoryContextMapping} is used to define a {@link ContextMapping} that
|
||||
@ -213,7 +209,7 @@ public class CategoryContextMapping extends ContextMapping {
|
||||
if (obj instanceof CategoryContextMapping) {
|
||||
CategoryContextMapping other = (CategoryContextMapping) obj;
|
||||
if (this.fieldName.equals(other.fieldName)) {
|
||||
return Iterables.elementsEqual(this.defaultValues, other.defaultValues);
|
||||
return Iterables.allElementsAreEqual(this.defaultValues, other.defaultValues);
|
||||
}
|
||||
}
|
||||
return false;
|
||||
|
@ -19,7 +19,6 @@
|
||||
|
||||
package org.elasticsearch.search.suggest.context;
|
||||
|
||||
import com.google.common.collect.Iterables;
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.search.suggest.analyzing.XAnalyzingSuggester;
|
||||
import org.apache.lucene.util.automaton.Automata;
|
||||
@ -36,13 +35,7 @@ import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.ParseContext.Document;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.SortedMap;
|
||||
import java.util.TreeMap;
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
* A {@link ContextMapping} is used t define a context that may used
|
||||
@ -157,7 +150,7 @@ public abstract class ContextMapping implements ToXContent {
|
||||
* @return true if both arguments are equal
|
||||
*/
|
||||
public static boolean mappingsAreEqual(SortedMap<String, ? extends ContextMapping> thisMappings, SortedMap<String, ? extends ContextMapping> otherMappings) {
|
||||
return Iterables.elementsEqual(thisMappings.entrySet(), otherMappings.entrySet());
|
||||
return thisMappings.entrySet().equals(otherMappings.entrySet());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -19,7 +19,6 @@
|
||||
|
||||
package org.elasticsearch.common.util;
|
||||
|
||||
import com.google.common.collect.Iterables;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.BytesRefArray;
|
||||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
@ -27,14 +26,7 @@ import org.apache.lucene.util.Counter;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.SortedSet;
|
||||
import java.util.TreeSet;
|
||||
import java.util.*;
|
||||
|
||||
import static org.elasticsearch.common.util.CollectionUtils.eagerPartition;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
@ -60,7 +52,7 @@ public class CollectionUtilsTests extends ESTestCase {
|
||||
final List<Object> rotated = CollectionUtils.rotate(list, distance);
|
||||
// check content is the same
|
||||
assertEquals(rotated.size(), list.size());
|
||||
assertEquals(Iterables.size(rotated), list.size());
|
||||
assertEquals(rotated.size(), list.size());
|
||||
assertEquals(new HashSet<>(rotated), new HashSet<>(list));
|
||||
// check stability
|
||||
for (int j = randomInt(4); j >= 0; --j) {
|
||||
|
@ -0,0 +1,81 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.util.iterable;
|
||||
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.hamcrest.Matchers;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.NoSuchElementException;
|
||||
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.object.HasToString.hasToString;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
public class IterablesTests extends ESTestCase {
|
||||
public void testGetOverList() {
|
||||
test(Arrays.asList("a", "b", "c"));
|
||||
}
|
||||
|
||||
public void testGetOverIterable() {
|
||||
Iterable<String> iterable = () ->
|
||||
new Iterator<String>() {
|
||||
private int position = 0;
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return position < 3;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String next() {
|
||||
if (position < 3) {
|
||||
String s = position == 0 ? "a" : position == 1 ? "b" : "c";
|
||||
position++;
|
||||
return s;
|
||||
} else {
|
||||
throw new NoSuchElementException();
|
||||
}
|
||||
}
|
||||
};
|
||||
test(iterable);
|
||||
}
|
||||
|
||||
private void test(Iterable<String> iterable) {
|
||||
try {
|
||||
Iterables.get(iterable, -1);
|
||||
fail("expected IllegalArgumentException");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e, hasToString("java.lang.IllegalArgumentException: position >= 0"));
|
||||
}
|
||||
assertEquals("a", Iterables.get(iterable, 0));
|
||||
assertEquals("b", Iterables.get(iterable, 1));
|
||||
assertEquals("c", Iterables.get(iterable, 2));
|
||||
try {
|
||||
Iterables.get(iterable, 3);
|
||||
fail("expected IndexOutOfBoundsException");
|
||||
} catch (IndexOutOfBoundsException e) {
|
||||
assertThat(e, hasToString("java.lang.IndexOutOfBoundsException: 3"));
|
||||
}
|
||||
}
|
||||
}
|
@ -19,22 +19,15 @@
|
||||
package org.elasticsearch.search.highlight;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
|
||||
import com.google.common.base.Joiner;
|
||||
import com.google.common.collect.Iterables;
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.common.settings.Settings.Builder;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.query.BoostableQueryBuilder;
|
||||
import org.elasticsearch.index.query.IdsQueryBuilder;
|
||||
import org.elasticsearch.index.query.MatchQueryBuilder;
|
||||
import org.elasticsearch.index.query.*;
|
||||
import org.elasticsearch.index.query.MatchQueryBuilder.Operator;
|
||||
import org.elasticsearch.index.query.MatchQueryBuilder.Type;
|
||||
import org.elasticsearch.index.query.MultiMatchQueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
@ -51,38 +44,12 @@ import java.util.Map;
|
||||
import static org.elasticsearch.client.Requests.searchRequest;
|
||||
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.boolQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.boostingQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.commonTermsQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.fuzzyQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.matchPhrasePrefixQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.matchPhraseQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.matchQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.missingQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.multiMatchQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.prefixQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.queryStringQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.rangeQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.regexpQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.typeQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.wildcardQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.*;
|
||||
import static org.elasticsearch.search.builder.SearchSourceBuilder.highlight;
|
||||
import static org.elasticsearch.search.builder.SearchSourceBuilder.searchSource;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFailures;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHighlight;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNotHighlighted;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*;
|
||||
import static org.elasticsearch.test.hamcrest.RegexMatcher.matches;
|
||||
import static org.hamcrest.Matchers.anyOf;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.hasKey;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
import static org.hamcrest.Matchers.startsWith;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
|
||||
public class HighlighterSearchIT extends ESIntegTestCase {
|
||||
|
||||
@ -845,8 +812,10 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
||||
ensureGreen();
|
||||
|
||||
// Index one megabyte of "t " over and over and over again
|
||||
String pattern = "t ";
|
||||
String value = new String(new char[1024 * 256 / pattern.length()]).replace("\0", pattern);
|
||||
client().prepareIndex("test", "type1")
|
||||
.setSource("field1", Joiner.on("").join(Iterables.limit(Iterables.cycle("t "), 1024*256))).get();
|
||||
.setSource("field1", value).get();
|
||||
refresh();
|
||||
|
||||
logger.info("--> highlighting and searching on field1");
|
||||
|
@ -110,7 +110,8 @@ com.google.common.base.Function
|
||||
com.google.common.collect.Collections2
|
||||
com.google.common.cache.LoadingCache
|
||||
com.google.common.cache.CacheLoader
|
||||
com.google.common.collect.Iterables
|
||||
|
||||
@defaultMessage Do not violate java's access system
|
||||
java.lang.reflect.AccessibleObject#setAccessible(boolean)
|
||||
java.lang.reflect.AccessibleObject#setAccessible(java.lang.reflect.AccessibleObject[], boolean)
|
||||
java.lang.reflect.AccessibleObject#setAccessible(java.lang.reflect.AccessibleObject[], boolean)
|
||||
|
Loading…
x
Reference in New Issue
Block a user