Remove and forbid use of com.google.common.hash.*

This commit removes and now forbids all uses of
com.google.common.hash.HashCode, com.google.common.hash.HashFunction,
and com.google.common.hash.Hashing across the codebase. This is one of
the few remaining steps in the eventual removal of Guava as a
dependency.

Relates #13224
This commit is contained in:
Jason Tedor 2015-10-02 17:18:26 +02:00
parent fcdd8a29a9
commit 67d1c70c2d
8 changed files with 213 additions and 58 deletions

View File

@ -0,0 +1,80 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.hash;
import org.elasticsearch.ElasticsearchException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
public class MessageDigests {
private static final MessageDigest MD5_DIGEST;
private static final MessageDigest SHA_1_DIGEST;
private static final MessageDigest SHA_256_DIGEST;
static {
try {
MD5_DIGEST = MessageDigest.getInstance("MD5");
SHA_1_DIGEST = MessageDigest.getInstance("SHA-1");
SHA_256_DIGEST = MessageDigest.getInstance("SHA-256");
} catch (NoSuchAlgorithmException e) {
throw new ElasticsearchException("Unexpected exception creating MessageDigest instance", e);
}
}
public static MessageDigest md5() {
return cloneAndReset(MD5_DIGEST);
}
public static MessageDigest sha1() {
return cloneAndReset(SHA_1_DIGEST);
}
public static MessageDigest sha256() {
return cloneAndReset(SHA_256_DIGEST);
}
private static MessageDigest cloneAndReset(MessageDigest messageDigest) {
try {
MessageDigest clone = (MessageDigest) messageDigest.clone();
clone.reset();
return clone;
} catch (CloneNotSupportedException e) {
throw new ElasticsearchException("Unexpected exception cloning MessageDigest instance", e);
}
}
private static final char[] HEX_DIGITS = "0123456789abcdef".toCharArray();
public static String toHexString(byte[] bytes) {
if (bytes == null) {
throw new NullPointerException("bytes");
}
StringBuilder sb = new StringBuilder(2 * bytes.length);
for (int i = 0; i < bytes.length; i++) {
byte b = bytes[i];
sb.append(HEX_DIGITS[b >> 4 & 0xf]).append(HEX_DIGITS[b & 0xf]);
}
return sb.toString();
}
}

View File

@ -19,19 +19,22 @@
package org.elasticsearch.common.http.client;
import java.nio.charset.StandardCharsets;
import com.google.common.hash.Hashing;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.*;
import org.elasticsearch.Build;
import org.elasticsearch.ElasticsearchCorruptionException;
import org.elasticsearch.ElasticsearchTimeoutException;
import org.elasticsearch.Version;
import org.elasticsearch.common.Base64;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.hash.MessageDigests;
import org.elasticsearch.common.unit.TimeValue;
import java.io.*;
import java.net.HttpURLConnection;
import java.net.URL;
import java.net.URLConnection;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.NoSuchFileException;
import java.nio.file.Path;
@ -96,7 +99,7 @@ public class HttpDownloadHelper {
public static Checksummer SHA1_CHECKSUM = new Checksummer() {
@Override
public String checksum(byte[] filebytes) {
return Hashing.sha1().hashBytes(filebytes).toString();
return MessageDigests.toHexString(MessageDigests.sha1().digest(filebytes));
}
@Override
@ -109,7 +112,7 @@ public class HttpDownloadHelper {
public static Checksummer MD5_CHECKSUM = new Checksummer() {
@Override
public String checksum(byte[] filebytes) {
return Hashing.md5().hashBytes(filebytes).toString();
return MessageDigests.toHexString(MessageDigests.md5().digest(filebytes));
}
@Override

View File

@ -19,26 +19,24 @@
package org.elasticsearch.cluster.routing.operation.hash.murmur3;
import com.carrotsearch.randomizedtesting.generators.RandomInts;
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
import com.google.common.hash.HashFunction;
import com.google.common.hash.Hashing;
import org.elasticsearch.cluster.routing.Murmur3HashFunction;
import org.elasticsearch.test.ESTestCase;
public class Murmur3HashFunctionTests extends ESTestCase {
public void test() {
// Make sure that we agree with guava
Murmur3HashFunction murmur3 = new Murmur3HashFunction();
HashFunction guavaMurmur3 = Hashing.murmur3_32();
for (int i = 0; i < 100; ++i) {
final String id = RandomStrings.randomRealisticUnicodeOfCodepointLength(getRandom(), RandomInts.randomIntBetween(getRandom(), 1, 20));
//final String id = "0";
final int hash1 = guavaMurmur3.newHasher().putUnencodedChars(id).hash().asInt();
final int hash2 = murmur3.hash(id);
assertEquals(hash1, hash2);
}
private static Murmur3HashFunction HASH = new Murmur3HashFunction();
public void testKnownValues() {
assertHash(0x5a0cb7c3, "hell");
assertHash(0xd7c31989, "hello");
assertHash(0x22ab2984, "hello w");
assertHash(0xdf0ca123, "hello wo");
assertHash(0xe7744d61, "hello wor");
assertHash(0xe07db09c, "The quick brown fox jumps over the lazy dog");
assertHash(0x4e63d2ad, "The quick brown fox jumps over the lazy cog");
}
private static void assertHash(int expected, String stringInput) {
assertEquals(expected, HASH.hash(stringInput));
}
}

View File

@ -0,0 +1,81 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.hash;
import org.elasticsearch.test.ESTestCase;
import org.junit.Test;
import java.math.BigInteger;
import java.nio.charset.StandardCharsets;
import java.security.MessageDigest;
import static org.junit.Assert.*;
public class MessageDigestsTests extends ESTestCase {
private void assertHash(String expected, String test, MessageDigest messageDigest) {
String actual = MessageDigests.toHexString(messageDigest.digest(test.getBytes(StandardCharsets.UTF_8)));
assertEquals(expected, actual);
}
@Test
public void testMd5() throws Exception {
assertHash("d41d8cd98f00b204e9800998ecf8427e", "", MessageDigests.md5());
assertHash("900150983cd24fb0d6963f7d28e17f72", "abc", MessageDigests.md5());
assertHash("8215ef0796a20bcaaae116d3876c664a", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq", MessageDigests.md5());
assertHash("7707d6ae4e027c70eea2a935c2296f21", new String(new char[1000000]).replace("\0", "a"), MessageDigests.md5());
assertHash("9e107d9d372bb6826bd81d3542a419d6", "The quick brown fox jumps over the lazy dog", MessageDigests.md5());
assertHash("1055d3e698d289f2af8663725127bd4b", "The quick brown fox jumps over the lazy cog", MessageDigests.md5());
}
@Test
public void testSha1() throws Exception {
assertHash("da39a3ee5e6b4b0d3255bfef95601890afd80709", "", MessageDigests.sha1());
assertHash("a9993e364706816aba3e25717850c26c9cd0d89d", "abc", MessageDigests.sha1());
assertHash("84983e441c3bd26ebaae4aa1f95129e5e54670f1", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq", MessageDigests.sha1());
assertHash("34aa973cd4c4daa4f61eeb2bdbad27316534016f", new String(new char[1000000]).replace("\0", "a"), MessageDigests.sha1());
assertHash("2fd4e1c67a2d28fced849ee1bb76e7391b93eb12", "The quick brown fox jumps over the lazy dog", MessageDigests.sha1());
assertHash("de9f2c7fd25e1b3afad3e85a0bd17d9b100db4b3", "The quick brown fox jumps over the lazy cog", MessageDigests.sha1());
}
@Test
public void testSha256() throws Exception {
assertHash("e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", "", MessageDigests.sha256());
assertHash("ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad", "abc", MessageDigests.sha256());
assertHash("248d6a61d20638b8e5c026930c3e6039a33ce45964ff2167f6ecedd419db06c1", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq", MessageDigests.sha256());
assertHash("cdc76e5c9914fb9281a1c7e284d73e67f1809a48a497200e046d39ccc7112cd0", new String(new char[1000000]).replace("\0", "a"), MessageDigests.sha256());
assertHash("d7a8fbb307d7809469ca9abcb0082e4f8d5651e46d3cdb762d02d0bf37c9e592", "The quick brown fox jumps over the lazy dog", MessageDigests.sha256());
assertHash("e4c4d8f3bf76b692de791a173e05321150f7a345b46484fe427f6acc7ecc81be", "The quick brown fox jumps over the lazy cog", MessageDigests.sha256());
}
@Test
public void testToHexString() throws Exception {
for (int i = 0; i < 1024; i++) {
BigInteger expected = BigInteger.probablePrime(256, random());
byte[] bytes = expected.toByteArray();
String hex = MessageDigests.toHexString(bytes);
String zeros = new String(new char[bytes.length * 2]).replace("\0", "0");
String expectedAsString = expected.toString(16);
String expectedHex = zeros.substring(expectedAsString.length()) + expectedAsString;
assertEquals(expectedHex, hex);
BigInteger actual = new BigInteger(hex, 16);
assertEquals(expected, actual);
}
}
}

View File

@ -19,37 +19,34 @@
package org.elasticsearch.common.hashing;
import com.google.common.hash.HashCode;
import com.google.common.hash.Hashing;
import org.elasticsearch.common.hash.MurmurHash3;
import org.elasticsearch.test.ESTestCase;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.LongBuffer;
import java.io.UnsupportedEncodingException;
import java.nio.charset.StandardCharsets;
public class MurmurHash3Tests extends ESTestCase {
public void testHash128() {
final int iters = scaledRandomIntBetween(100, 5000);
for (int i = 0; i < iters; ++i) {
final int seed = randomInt();
final int offset = randomInt(20);
final int len = randomInt(randomBoolean() ? 20 : 200);
final byte[] bytes = new byte[len + offset + randomInt(3)];
getRandom().nextBytes(bytes);
HashCode h1 = Hashing.murmur3_128(seed).hashBytes(bytes, offset, len);
MurmurHash3.Hash128 h2 = MurmurHash3.hash128(bytes, offset, len, seed, new MurmurHash3.Hash128());
assertEquals(h1, h2);
}
public void testKnownValues() throws UnsupportedEncodingException {
assertHash(0x629942693e10f867L, 0x92db0b82baeb5347L, "hell", 0);
assertHash(0xa78ddff5adae8d10L, 0x128900ef20900135L, "hello", 1);
assertHash(0x8a486b23f422e826L, 0xf962a2c58947765fL, "hello ", 2);
assertHash(0x2ea59f466f6bed8cL, 0xc610990acc428a17L, "hello w", 3);
assertHash(0x79f6305a386c572cL, 0x46305aed3483b94eL, "hello wo", 4);
assertHash(0xc2219d213ec1f1b5L, 0xa1d8e2e0a52785bdL, "hello wor", 5);
assertHash(0xe34bbc7bbc071b6cL, 0x7a433ca9c49a9347L, "The quick brown fox jumps over the lazy dog", 0);
assertHash(0x658ca970ff85269aL, 0x43fee3eaa68e5c3eL, "The quick brown fox jumps over the lazy cog", 0);
}
private void assertEquals(HashCode h1, MurmurHash3.Hash128 h2) {
final LongBuffer longs = ByteBuffer.wrap(h1.asBytes()).order(ByteOrder.LITTLE_ENDIAN).asLongBuffer();
assertEquals(2, longs.limit());
assertEquals(h1.asLong(), h2.h1);
assertEquals(longs.get(), h2.h1);
assertEquals(longs.get(), h2.h2);
private static void assertHash(long lower, long upper, String inputString, long seed) {
byte[] bytes = inputString.getBytes(StandardCharsets.UTF_8);
MurmurHash3.Hash128 expected = new MurmurHash3.Hash128();
expected.h1 = lower;
expected.h2 = upper;
assertHash(expected, MurmurHash3.hash128(bytes, 0, bytes.length, seed, new MurmurHash3.Hash128()));
}
private static void assertHash(MurmurHash3.Hash128 expected, MurmurHash3.Hash128 actual) {
assertEquals(expected.h1, actual.h1);
assertEquals(expected.h2, actual.h2);
}
}

View File

@ -18,9 +18,6 @@
*/
package org.elasticsearch.plugins;
import java.nio.charset.StandardCharsets;
import com.google.common.hash.Hashing;
import org.apache.http.impl.client.HttpClients;
import org.apache.lucene.util.LuceneTestCase;
import org.elasticsearch.Version;
@ -28,6 +25,7 @@ import org.elasticsearch.common.Base64;
import org.elasticsearch.common.cli.CliTool;
import org.elasticsearch.common.cli.CliTool.ExitStatus;
import org.elasticsearch.common.cli.CliToolTestCase.CaptureOutputTerminal;
import org.elasticsearch.common.hash.MessageDigests;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.node.internal.InternalSettingsPreparer;
@ -46,16 +44,15 @@ import org.jboss.netty.handler.ssl.util.InsecureTrustManagerFactory;
import org.jboss.netty.handler.ssl.util.SelfSignedCertificate;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import javax.net.ssl.HttpsURLConnection;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLSocketFactory;
import java.io.BufferedWriter;
import java.io.IOException;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.nio.charset.StandardCharsets;
import java.nio.file.FileVisitResult;
import java.nio.file.Files;
import java.nio.file.Path;
@ -109,7 +106,7 @@ public class PluginManagerIT extends ESIntegTestCase {
}
private void writeSha1(Path file, boolean corrupt) throws IOException {
String sha1Hex = Hashing.sha1().hashBytes(Files.readAllBytes(file)).toString();
String sha1Hex = MessageDigests.toHexString(MessageDigests.sha1().digest(Files.readAllBytes(file)));
try (BufferedWriter out = Files.newBufferedWriter(file.resolveSibling(file.getFileName() + ".sha1"), StandardCharsets.UTF_8)) {
out.write(sha1Hex);
if (corrupt) {
@ -119,7 +116,7 @@ public class PluginManagerIT extends ESIntegTestCase {
}
private void writeMd5(Path file, boolean corrupt) throws IOException {
String md5Hex = Hashing.md5().hashBytes(Files.readAllBytes(file)).toString();
String md5Hex = MessageDigests.toHexString(MessageDigests.md5().digest(Files.readAllBytes(file)));
try (BufferedWriter out = Files.newBufferedWriter(file.resolveSibling(file.getFileName() + ".md5"), StandardCharsets.UTF_8)) {
out.write(md5Hex);
if (corrupt) {

View File

@ -130,6 +130,9 @@ com.google.common.primitives.Ints
com.google.common.collect.ImmutableSet
com.google.common.collect.ImmutableSet$Builder
com.google.common.io.Resources
com.google.common.hash.HashCode
com.google.common.hash.HashFunction
com.google.common.hash.Hashing
@defaultMessage Do not violate java's access system
java.lang.reflect.AccessibleObject#setAccessible(boolean)

View File

@ -19,14 +19,9 @@
package org.elasticsearch.script.groovy;
import java.nio.charset.StandardCharsets;
import com.google.common.hash.Hashing;
import groovy.lang.Binding;
import groovy.lang.GroovyClassLoader;
import groovy.lang.Script;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.Scorer;
import org.codehaus.groovy.ast.ClassCodeExpressionTransformer;
@ -41,9 +36,9 @@ import org.codehaus.groovy.control.SourceUnit;
import org.codehaus.groovy.control.customizers.CompilationCustomizer;
import org.codehaus.groovy.control.customizers.ImportCustomizer;
import org.elasticsearch.SpecialPermission;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.hash.MessageDigests;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.settings.Settings;
@ -53,6 +48,7 @@ import org.elasticsearch.search.lookup.SearchLookup;
import java.io.IOException;
import java.math.BigDecimal;
import java.nio.charset.StandardCharsets;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.util.HashMap;
@ -172,7 +168,7 @@ public class GroovyScriptEngineService extends AbstractComponent implements Scri
if (sm != null) {
sm.checkPermission(new SpecialPermission());
}
return loader.parseClass(script, Hashing.sha1().hashString(script, StandardCharsets.UTF_8).toString());
return loader.parseClass(script, MessageDigests.toHexString(MessageDigests.sha1().digest(script.getBytes(StandardCharsets.UTF_8))));
} catch (Throwable e) {
if (logger.isTraceEnabled()) {
logger.trace("exception compiling Groovy script:", e);