Drop commons-lang dependency

commons-lang really is only used by some core classes to join strings or modiy arrays.
It's not worth carrying the dependency. This commit removes the dependency on commons-lang
entirely.
This commit is contained in:
Simon Willnauer 2015-08-18 21:36:04 +02:00
parent e07f039659
commit 0ffd99cca3
16 changed files with 75 additions and 67 deletions

View File

@ -161,10 +161,6 @@
<groupId>org.hdrhistogram</groupId> <groupId>org.hdrhistogram</groupId>
<artifactId>HdrHistogram</artifactId> <artifactId>HdrHistogram</artifactId>
</dependency> </dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
</dependency>
<dependency> <dependency>
<groupId>commons-cli</groupId> <groupId>commons-cli</groupId>
<artifactId>commons-cli</artifactId> <artifactId>commons-cli</artifactId>

View File

@ -23,7 +23,6 @@
<include>com.ning:compress-lzf</include> <include>com.ning:compress-lzf</include>
<include>com.github.spullara.mustache.java:compiler</include> <include>com.github.spullara.mustache.java:compiler</include>
<include>com.tdunning:t-digest</include> <include>com.tdunning:t-digest</include>
<include>org.apache.commons:commons-lang3</include>
<include>commons-cli:commons-cli</include> <include>commons-cli:commons-cli</include>
<include>com.twitter:jsr166e</include> <include>com.twitter:jsr166e</include>
<include>org.hdrhistogram:HdrHistogram</include> <include>org.hdrhistogram:HdrHistogram</include>

View File

@ -23,7 +23,6 @@ import com.google.common.base.Preconditions;
import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.DefaultParser; import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.GnuParser;
import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment; import org.elasticsearch.env.Environment;

View File

@ -22,9 +22,9 @@ package org.elasticsearch.common.collect;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.google.common.collect.Maps; import com.google.common.collect.Maps;
import com.google.common.collect.UnmodifiableIterator; import com.google.common.collect.UnmodifiableIterator;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.lucene.util.mutable.MutableValueInt; import org.apache.lucene.util.mutable.MutableValueInt;
import java.lang.reflect.Array;
import java.util.*; import java.util.*;
/** /**
@ -134,12 +134,13 @@ public final class CopyOnWriteHashMap<K, V> extends AbstractMap<K, V> {
@Override @Override
V get(Object key, int hash) { V get(Object key, int hash) {
final int slot = ArrayUtils.indexOf(keys, key); for (int i = 0; i < keys.length; i++) {
if (slot < 0) { if (key.equals(keys[i])) {
return null; return values[i];
} else { }
return values[slot];
} }
return null;
} }
private static <T> T[] replace(T[] array, int index, T value) { private static <T> T[] replace(T[] array, int index, T value) {
@ -151,14 +152,20 @@ public final class CopyOnWriteHashMap<K, V> extends AbstractMap<K, V> {
@Override @Override
Leaf<K, V> put(K key, int hash, int hashBits, V value, MutableValueInt newValue) { Leaf<K, V> put(K key, int hash, int hashBits, V value, MutableValueInt newValue) {
assert hashBits <= 0 : hashBits; assert hashBits <= 0 : hashBits;
final int slot = ArrayUtils.indexOf(keys, key); int slot = -1;
for (int i = 0; i < keys.length; i++) {
if (key.equals(keys[i])) {
slot = i;
break;
}
}
final K[] keys2; final K[] keys2;
final V[] values2; final V[] values2;
if (slot < 0) { if (slot < 0) {
keys2 = ArrayUtils.add(keys, key); keys2 = appendElement(keys, key);
values2 = ArrayUtils.add(values, value); values2 = appendElement(values, value);
newValue.value = 1; newValue.value = 1;
} else { } else {
keys2 = replace(keys, slot, key); keys2 = replace(keys, slot, key);
@ -170,16 +177,49 @@ public final class CopyOnWriteHashMap<K, V> extends AbstractMap<K, V> {
@Override @Override
Leaf<K, V> remove(Object key, int hash) { Leaf<K, V> remove(Object key, int hash) {
final int slot = ArrayUtils.indexOf(keys, key); int slot = -1;
for (int i = 0; i < keys.length; i++) {
if (key.equals(keys[i])) {
slot = i;
break;
}
}
if (slot < 0) { if (slot < 0) {
return this; return this;
} }
final K[] keys2 = ArrayUtils.remove(keys, slot); final K[] keys2 = removeArrayElement(keys, slot);
final V[] values2 = ArrayUtils.remove(values, slot); final V[] values2 = removeArrayElement(values, slot);
return new Leaf<>(keys2, values2); return new Leaf<>(keys2, values2);
} }
} }
private static <T> T[] removeArrayElement(T[] array, int index) {
final Object result = Array.newInstance(array.getClass().getComponentType(), array.length - 1);
System.arraycopy(array, 0, result, 0, index);
if (index < array.length - 1) {
System.arraycopy(array, index + 1, result, index, array.length - index - 1);
}
return (T[]) result;
}
public static <T> T[] appendElement(final T[] array, final T element) {
final T[] newArray = Arrays.copyOf(array, array.length + 1);
newArray[newArray.length - 1] = element;
return newArray;
}
public static <T> T[] insertElement(final T[] array, final T element, final int index) {
final T[] result = Arrays.copyOf(array, array.length + 1);
System.arraycopy(array, 0, result, 0, index);
result[index] = element;
if (index < array.length) {
System.arraycopy(array, index, result, index + 1, array.length - index);
}
return result;
}
/** /**
* An inner node in this trie. Inner nodes store up to 64 key-value pairs * An inner node in this trie. Inner nodes store up to 64 key-value pairs
* and use a bitmap in order to associate hashes to them. For example, if * and use a bitmap in order to associate hashes to them. For example, if
@ -320,8 +360,8 @@ public final class CopyOnWriteHashMap<K, V> extends AbstractMap<K, V> {
private InnerNode<K, V> putNew(K key, int hash6, int slot, V value) { private InnerNode<K, V> putNew(K key, int hash6, int slot, V value) {
final long mask2 = mask | (1L << hash6); final long mask2 = mask | (1L << hash6);
final K[] keys2 = ArrayUtils.add(keys, slot, key); final K[] keys2 = insertElement(keys, key, slot);
final Object[] subNodes2 = ArrayUtils.add(subNodes, slot, value); final Object[] subNodes2 = insertElement(subNodes, value, slot);
return new InnerNode<>(mask2, keys2, subNodes2); return new InnerNode<>(mask2, keys2, subNodes2);
} }
@ -342,8 +382,8 @@ public final class CopyOnWriteHashMap<K, V> extends AbstractMap<K, V> {
private InnerNode<K, V> removeSlot(int hash6, int slot) { private InnerNode<K, V> removeSlot(int hash6, int slot) {
final long mask2 = mask & ~(1L << hash6); final long mask2 = mask & ~(1L << hash6);
final K[] keys2 = ArrayUtils.remove(keys, slot); final K[] keys2 = removeArrayElement(keys, slot);
final Object[] subNodes2 = ArrayUtils.remove(subNodes, slot); final Object[] subNodes2 = removeArrayElement(subNodes, slot);
return new InnerNode<>(mask2, keys2, subNodes2); return new InnerNode<>(mask2, keys2, subNodes2);
} }

View File

@ -23,7 +23,7 @@ import com.google.common.collect.Sets;
import com.spatial4j.core.exception.InvalidShapeException; import com.spatial4j.core.exception.InvalidShapeException;
import com.spatial4j.core.shape.Shape; import com.spatial4j.core.shape.Shape;
import com.vividsolutions.jts.geom.*; import com.vividsolutions.jts.geom.*;
import org.apache.commons.lang3.tuple.Pair; import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException; import java.io.IOException;
@ -98,7 +98,6 @@ public abstract class BasePolygonBuilder<E extends BasePolygonBuilder<E>> extend
/** /**
* build new hole to the polygon * build new hole to the polygon
* @param hole linear ring defining the hole
* @return this * @return this
*/ */
public Ring<E> hole() { public Ring<E> hole() {
@ -285,7 +284,7 @@ public abstract class BasePolygonBuilder<E extends BasePolygonBuilder<E>> extend
Edge current = edge; Edge current = edge;
Edge prev = edge; Edge prev = edge;
// bookkeep the source and sink of each visited coordinate // bookkeep the source and sink of each visited coordinate
HashMap<Coordinate, Pair<Edge, Edge>> visitedEdge = new HashMap<>(); HashMap<Coordinate, Tuple<Edge, Edge>> visitedEdge = new HashMap<>();
do { do {
current.coordinate = shift(current.coordinate, shiftOffset); current.coordinate = shift(current.coordinate, shiftOffset);
current.component = id; current.component = id;
@ -301,7 +300,7 @@ public abstract class BasePolygonBuilder<E extends BasePolygonBuilder<E>> extend
// since we're splitting connected components, we want the edges method to visit // since we're splitting connected components, we want the edges method to visit
// the newly separated component // the newly separated component
final int visitID = -id; final int visitID = -id;
Edge firstAppearance = visitedEdge.get(current.coordinate).getRight(); Edge firstAppearance = visitedEdge.get(current.coordinate).v2();
// correct the graph pointers by correcting the 'next' pointer for both the // correct the graph pointers by correcting the 'next' pointer for both the
// first appearance and this appearance of the edge // first appearance and this appearance of the edge
Edge temp = firstAppearance.next; Edge temp = firstAppearance.next;
@ -312,12 +311,12 @@ public abstract class BasePolygonBuilder<E extends BasePolygonBuilder<E>> extend
// a non-visited value (anything positive) // a non-visited value (anything positive)
do { do {
prev.component = visitID; prev.component = visitID;
prev = visitedEdge.get(prev.coordinate).getLeft(); prev = visitedEdge.get(prev.coordinate).v1();
++splitIndex; ++splitIndex;
} while (!current.coordinate.equals(prev.coordinate)); } while (!current.coordinate.equals(prev.coordinate));
++connectedComponents; ++connectedComponents;
} else { } else {
visitedEdge.put(current.coordinate, Pair.of(prev, current)); visitedEdge.put(current.coordinate, new Tuple<Edge, Edge>(prev, current));
} }
edges.add(current); edges.add(current);
prev = current; prev = current;

View File

@ -26,9 +26,8 @@ import com.spatial4j.core.shape.jts.JtsGeometry;
import com.vividsolutions.jts.geom.Coordinate; import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geom.Geometry; import com.vividsolutions.jts.geom.Geometry;
import com.vividsolutions.jts.geom.GeometryFactory; import com.vividsolutions.jts.geom.GeometryFactory;
import org.apache.commons.lang3.tuple.Pair;
import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.Explicit; import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.logging.ESLoggerFactory;
import org.elasticsearch.common.unit.DistanceUnit.Distance; import org.elasticsearch.common.unit.DistanceUnit.Distance;
@ -487,7 +486,7 @@ public abstract class ShapeBuilder implements ToXContent {
return top; return top;
} }
private static final Pair range(Coordinate[] points, int offset, int length) { private static final double[] range(Coordinate[] points, int offset, int length) {
double minX = points[0].x; double minX = points[0].x;
double maxX = points[0].x; double maxX = points[0].x;
double minY = points[0].y; double minY = points[0].y;
@ -507,7 +506,7 @@ public abstract class ShapeBuilder implements ToXContent {
maxY = points[offset + i].y; maxY = points[offset + i].y;
} }
} }
return Pair.of(Pair.of(minX, maxX), Pair.of(minY, maxY)); return new double[] {minX, maxX, minY, maxY};
} }
/** /**
@ -585,8 +584,8 @@ public abstract class ShapeBuilder implements ToXContent {
// and convert to a right handed system // and convert to a right handed system
// compute the bounding box and calculate range // compute the bounding box and calculate range
Pair<Pair, Pair> range = range(points, offset, length); double[] range = range(points, offset, length);
final double rng = (Double)range.getLeft().getRight() - (Double)range.getLeft().getLeft(); final double rng = range[1] - range[0];
// translate the points if the following is true // translate the points if the following is true
// 1. shell orientation is cw and range is greater than a hemisphere (180 degrees) but not spanning 2 hemispheres // 1. shell orientation is cw and range is greater than a hemisphere (180 degrees) but not spanning 2 hemispheres
// (translation would result in a collapsed poly) // (translation would result in a collapsed poly)

View File

@ -18,7 +18,6 @@
*/ */
package org.elasticsearch.common; package org.elasticsearch.common;
import org.apache.commons.lang3.ArrayUtils;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.junit.Test; import org.junit.Test;
@ -77,7 +76,10 @@ public class ParseFieldTests extends ESTestCase {
String[] deprecated = new String[]{"text", "same_as_text"}; String[] deprecated = new String[]{"text", "same_as_text"};
String[] allValues = values; String[] allValues = values;
if (withDeprecatedNames) { if (withDeprecatedNames) {
allValues = ArrayUtils.addAll(values, deprecated); String[] newArray = new String[allValues.length + deprecated.length];
System.arraycopy(allValues, 0, newArray, 0, allValues.length);
System.arraycopy(deprecated, 0, newArray, allValues.length, deprecated.length);
allValues = newArray;
} }
ParseField field = new ParseField(randomFrom(values)); ParseField field = new ParseField(randomFrom(values));

View File

@ -19,7 +19,6 @@
package org.elasticsearch.test; package org.elasticsearch.test;
import com.carrotsearch.randomizedtesting.annotations.TestGroup; import com.carrotsearch.randomizedtesting.annotations.TestGroup;
import org.apache.commons.lang3.ArrayUtils;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.IndexRoutingTable;
@ -32,13 +31,8 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.indices.recovery.RecoverySettings;
import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.junit.annotations.TestLogging;
import org.elasticsearch.test.junit.listeners.LoggingListener; import org.elasticsearch.test.junit.listeners.LoggingListener;
import org.elasticsearch.test.transport.AssertingLocalTransport;
import org.elasticsearch.test.transport.MockTransportService;
import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.Transport;
import org.elasticsearch.transport.TransportModule; import org.elasticsearch.transport.TransportModule;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.transport.netty.NettyTransport;
import org.junit.Ignore;
import java.io.IOException; import java.io.IOException;
import java.lang.annotation.ElementType; import java.lang.annotation.ElementType;

View File

@ -28,12 +28,10 @@ import com.google.common.base.Joiner;
import com.google.common.base.Predicate; import com.google.common.base.Predicate;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.impl.client.HttpClients; import org.apache.http.impl.client.HttpClients;
import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.cluster.routing.UnassignedInfo;
import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider;
import org.elasticsearch.env.NodeEnvironment;
import org.elasticsearch.index.shard.MergeSchedulerConfig; import org.elasticsearch.index.shard.MergeSchedulerConfig;
import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.LuceneTestCase;
@ -80,7 +78,6 @@ import org.elasticsearch.common.Priority;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.InetSocketTransportAddress; import org.elasticsearch.common.transport.InetSocketTransportAddress;
@ -1920,7 +1917,11 @@ public abstract class ESIntegTestCase extends ESTestCase {
} }
if (list.length != 1) { if (list.length != 1) {
throw new IllegalStateException("Backwards index must contain exactly one cluster\n" + StringUtils.join(list, "\n")); StringBuilder builder = new StringBuilder("Backwards index must contain exactly one cluster\n");
for (Path line : list) {
builder.append(line.toString()).append('\n');
}
throw new IllegalStateException(builder.toString());
} }
Path src = list[0]; Path src = list[0];
Path dest = dataDir.resolve(internalCluster().getClusterName()); Path dest = dataDir.resolve(internalCluster().getClusterName());

View File

@ -177,11 +177,6 @@
<artifactId>t-digest</artifactId> <artifactId>t-digest</artifactId>
<scope>provided</scope> <scope>provided</scope>
</dependency> </dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
<scope>provided</scope>
</dependency>
<dependency> <dependency>
<groupId>commons-cli</groupId> <groupId>commons-cli</groupId>
<artifactId>commons-cli</artifactId> <artifactId>commons-cli</artifactId>

View File

@ -409,12 +409,6 @@
<version>2.1.6</version> <version>2.1.6</version>
</dependency> </dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
<version>3.3.2</version>
</dependency>
<dependency> <dependency>
<groupId>commons-cli</groupId> <groupId>commons-cli</groupId>
<artifactId>commons-cli</artifactId> <artifactId>commons-cli</artifactId>

View File

@ -165,11 +165,6 @@
<artifactId>t-digest</artifactId> <artifactId>t-digest</artifactId>
<scope>provided</scope> <scope>provided</scope>
</dependency> </dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
<scope>provided</scope>
</dependency>
<dependency> <dependency>
<groupId>commons-cli</groupId> <groupId>commons-cli</groupId>
<artifactId>commons-cli</artifactId> <artifactId>commons-cli</artifactId>

View File

@ -170,11 +170,6 @@
<artifactId>t-digest</artifactId> <artifactId>t-digest</artifactId>
<scope>provided</scope> <scope>provided</scope>
</dependency> </dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
<scope>provided</scope>
</dependency>
<dependency> <dependency>
<groupId>commons-cli</groupId> <groupId>commons-cli</groupId>
<artifactId>commons-cli</artifactId> <artifactId>commons-cli</artifactId>