Remove methods from code review

This commit is contained in:
Nik Everett 2015-09-24 13:43:05 -04:00
parent ffa5b27c75
commit 63da3b1a7a
8 changed files with 42 additions and 101 deletions

View File

@ -25,6 +25,7 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.rest.RestStatus;
import java.io.IOException;
import java.util.HashSet;
import java.util.Set;
import static java.util.Collections.unmodifiableSet;
@ -42,7 +43,12 @@ public class ClusterBlockException extends ElasticsearchException {
public ClusterBlockException(StreamInput in) throws IOException {
super(in);
blocks = unmodifiableSet(in.readSet(ClusterBlock::readClusterBlock));
int totalBlocks = in.readVInt();
Set<ClusterBlock> blocks = new HashSet<>(totalBlocks);
for (int i = 0; i < totalBlocks;i++) {
blocks.add(ClusterBlock.readClusterBlock(in));
}
this.blocks = unmodifiableSet(blocks);
}
@Override

View File

@ -91,7 +91,7 @@ public class ClusterBlocks extends AbstractDiffable<ClusterBlocks> {
return levelHolders[level.id()].indices();
}
public Set<ClusterBlock> blocksForIndex(ClusterBlockLevel level, String index) {
private Set<ClusterBlock> blocksForIndex(ClusterBlockLevel level, String index) {
return indices(level).getOrDefault(index, emptySet());
}
@ -220,7 +220,12 @@ public class ClusterBlocks extends AbstractDiffable<ClusterBlocks> {
}
private static Set<ClusterBlock> readBlockSet(StreamInput in) throws IOException {
return unmodifiableSet(in.readSet(ClusterBlock::readClusterBlock));
int totalBlocks = in.readVInt();
Set<ClusterBlock> blocks = new HashSet<>(totalBlocks);
for (int i = 0; i < totalBlocks;i++) {
blocks.add(ClusterBlock.readClusterBlock(in));
}
return unmodifiableSet(blocks);
}
static class ImmutableLevelHolder {

View File

@ -18,11 +18,11 @@ package org.elasticsearch.common.inject.spi;
import org.elasticsearch.common.inject.Key;
import java.util.HashSet;
import java.util.Objects;
import java.util.Set;
import static java.util.Collections.unmodifiableSet;
import static org.elasticsearch.common.util.set.Sets.toFlatSet;
/**
* A variable that can be resolved by an injector.
@ -60,8 +60,11 @@ public final class Dependency<T> {
* Returns the dependencies from the given injection points.
*/
public static Set<Dependency<?>> forInjectionPoints(Set<InjectionPoint> injectionPoints) {
return unmodifiableSet(
injectionPoints.stream().map(InjectionPoint::getDependencies).collect(toFlatSet()));
Set<Dependency<?>> dependencies = new HashSet<>();
for (InjectionPoint injectionPoint : injectionPoints) {
dependencies.addAll(injectionPoint.getDependencies());
}
return unmodifiableSet(dependencies);
}
/**

View File

@ -46,12 +46,9 @@ import java.nio.file.NoSuchFileException;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Function;
import static org.elasticsearch.ElasticsearchException.readException;
import static org.elasticsearch.ElasticsearchException.readStackTrace;
@ -483,18 +480,6 @@ public abstract class StreamInput extends InputStream {
return values;
}
/**
* Reads a set using reader to decode the set elements.
*/
public <T> Set<T> readSet(ObjectReader<T> reader) throws IOException {
int num = readVInt();
Set<T> builder = new HashSet<>(num);
for (int i = 0; i < num; i++) {
builder.add(reader.read(this));
}
return builder;
}
/**
* Serializes a potential null value.
*/
@ -600,12 +585,4 @@ public abstract class StreamInput extends InputStream {
public static StreamInput wrap(byte[] bytes, int offset, int length) {
return new InputStreamStreamInput(new ByteArrayInputStream(bytes, offset, length));
}
/**
* FunctionalInterface for methods that read an object from the stream.
*/
@FunctionalInterface
public static interface ObjectReader<T> {
public T read(StreamInput in) throws IOException;
}
}

View File

@ -21,17 +21,11 @@ package org.elasticsearch.common.util.set;
import java.util.Collection;
import java.util.Collections;
import java.util.EnumSet;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.BiConsumer;
import java.util.function.BinaryOperator;
import java.util.function.Function;
import java.util.function.Supplier;
import java.util.stream.Collector;
import java.util.stream.Collectors;
public final class Sets {
@ -59,27 +53,6 @@ public final class Sets {
return set;
}
/**
* Create a new HashSet copying the original set with elements added. Useful
* for initializing constants without static blocks.
*/
public static <T> HashSet<T> newHashSetCopyWith(Set<T> original, T... elements) {
Objects.requireNonNull(original);
Objects.requireNonNull(elements);
HashSet<T> set = new HashSet<>(original.size() + elements.length);
set.addAll(original);
Collections.addAll(set, elements);
return set;
}
/**
* Collects {@code Stream<Collection<T>>} into {@code Set<T>}.
*/
@SuppressWarnings("unchecked")
public static <T> Collector<Collection<T>, Set<T>, Set<T>> toFlatSet() {
return (Collector<Collection<T>, Set<T>, Set<T>>) (Object) ToFlatSetCollector.INSTANCE;
}
public static <T> Set<T> newConcurrentHashSet() {
return Collections.newSetFromMap(new ConcurrentHashMap<>());
}
@ -103,39 +76,4 @@ public final class Sets {
union.addAll(right);
return union;
}
/**
* Collects {@code Stream<Collection<T>>} into {@code Set<T>}.
*/
private static enum ToFlatSetCollector implements Collector<Collection<Object>, Set<Object>, Set<Object>> {
INSTANCE;
@Override
public Supplier<Set<Object>> supplier() {
return HashSet::new;
}
@Override
public BiConsumer<Set<Object>, Collection<Object>> accumulator() {
return Collection::addAll;
}
@Override
public BinaryOperator<Set<Object>> combiner() {
return (lhs, rhs) -> {
lhs.addAll(rhs);
return lhs;
};
}
@Override
public Function<Set<Object>, Set<Object>> finisher() {
return Function.identity();
}
@Override
public Set<java.util.stream.Collector.Characteristics> characteristics() {
return EnumSet.of(Collector.Characteristics.IDENTITY_FINISH, Collector.Characteristics.UNORDERED);
}
}
}

View File

@ -64,6 +64,7 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
@ -75,7 +76,6 @@ import java.util.function.Function;
import static java.util.Collections.emptySet;
import static java.util.Collections.unmodifiableSet;
import static org.elasticsearch.common.collect.MapBuilder.newMapBuilder;
import static org.elasticsearch.common.util.set.Sets.newHashSetCopyWith;
/**
*
@ -288,7 +288,10 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
}
mappers = newMapBuilder(mappers).put(mapper.type(), mapper).map();
if (mapper.parentFieldMapper().active()) {
parentTypes = unmodifiableSet(newHashSetCopyWith(parentTypes, mapper.parentFieldMapper().type()));
Set<String> newParentTypes = new HashSet<>(parentTypes.size() + 1);
newParentTypes.addAll(parentTypes);
newParentTypes.add(mapper.parentFieldMapper().type());
parentTypes = unmodifiableSet(newParentTypes);
}
assert assertSerialization(mapper);
return mapper;

View File

@ -29,13 +29,14 @@ import org.elasticsearch.rest.support.RestUtils;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import static java.util.Collections.emptySet;
import static java.util.Collections.unmodifiableSet;
import static org.elasticsearch.common.util.set.Sets.newHashSetCopyWith;
import static org.elasticsearch.rest.RestStatus.BAD_REQUEST;
import static org.elasticsearch.rest.RestStatus.OK;
@ -84,7 +85,10 @@ public class RestController extends AbstractLifecycleComponent<RestController> {
* By default no headers get copied but it is possible to extend this behaviour via plugins by calling this method.
*/
public synchronized void registerRelevantHeaders(String... headers) {
relevantHeaders = unmodifiableSet(newHashSetCopyWith(relevantHeaders, headers));
Set<String> newRelevantHeaders = new HashSet<>(relevantHeaders.size() + headers.length);
newRelevantHeaders.addAll(relevantHeaders);
Collections.addAll(newRelevantHeaders, headers);
relevantHeaders = unmodifiableSet(newRelevantHeaders);
}
/**

View File

@ -102,7 +102,6 @@ import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_VERSION_M
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_VERSION_UPGRADED;
import static org.elasticsearch.cluster.metadata.MetaDataIndexStateService.INDEX_CLOSED_BLOCK;
import static org.elasticsearch.common.util.set.Sets.newHashSet;
import static org.elasticsearch.common.util.set.Sets.newHashSetCopyWith;
/**
* Service responsible for restoring snapshots
@ -139,11 +138,17 @@ public class RestoreService extends AbstractComponent implements ClusterStateLis
SETTING_CREATION_DATE));
// It's OK to change some settings, but we shouldn't allow simply removing them
private static final Set<String> UNREMOVABLE_SETTINGS = unmodifiableSet(newHashSetCopyWith(UNMODIFIABLE_SETTINGS,
SETTING_NUMBER_OF_REPLICAS,
SETTING_AUTO_EXPAND_REPLICAS,
SETTING_VERSION_UPGRADED,
SETTING_VERSION_MINIMUM_COMPATIBLE));
private static final Set<String> UNREMOVABLE_SETTINGS;
static {
Set<String> unremovable = new HashSet<>(UNMODIFIABLE_SETTINGS.size() + 4);
unremovable.addAll(UNMODIFIABLE_SETTINGS);
unremovable.add(SETTING_NUMBER_OF_REPLICAS);
unremovable.add(SETTING_AUTO_EXPAND_REPLICAS);
unremovable.add(SETTING_VERSION_UPGRADED);
unremovable.add(SETTING_VERSION_MINIMUM_COMPATIBLE);
UNREMOVABLE_SETTINGS = unmodifiableSet(unremovable);
}
private final ClusterService clusterService;