- remove Digest/Hex, only used in test, directly use MD5 there
- don't use Closeables and use IOUtils
- remove duplicate Classes usage and unused LongsLAB
This commit is contained in:
Shay Banon 2013-07-29 01:29:58 +02:00
parent a1197de6af
commit 4e66658aac
12 changed files with 21 additions and 993 deletions

View File

@ -23,8 +23,8 @@ import com.google.common.base.Charsets;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.google.common.io.Closeables;
import org.apache.lucene.util.CollectionUtil;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.ElasticSearchException;
import org.elasticsearch.Version;
import org.elasticsearch.action.support.master.MasterNodeOperationRequest;
@ -462,7 +462,7 @@ public class MetaDataCreateIndexService extends AbstractComponent {
} catch (Exception e) {
logger.warn("[{}] failed to read template [{}] from config", e, request.index, templatesFile.getAbsolutePath());
} finally {
Closeables.closeQuietly(parser);
IOUtils.closeWhileHandlingException(parser);
}
}
}

View File

@ -1,560 +0,0 @@
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.Charset;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
/**
*
*/
public class Digest {
private static final int STREAM_BUFFER_LENGTH = 1024 * 16;
/**
* Read through an InputStream and returns the digest for the data
*
* @param digest The MessageDigest to use (e.g. MD5)
* @param data Data to digest
* @return MD5 digest
* @throws IOException On error reading from the stream
*/
private static byte[] digest(MessageDigest digest, InputStream data) throws IOException {
byte[] buffer = new byte[STREAM_BUFFER_LENGTH];
int read = data.read(buffer, 0, STREAM_BUFFER_LENGTH);
while (read > -1) {
digest.update(buffer, 0, read);
read = data.read(buffer, 0, STREAM_BUFFER_LENGTH);
}
return digest.digest();
}
private static final Charset Charset_UTF8 = Charset.forName("UTF8");
private static byte[] getBytesUtf8(String data) {
return data.getBytes(Charset_UTF8);
}
/**
* Returns a <code>MessageDigest</code> for the given <code>algorithm</code>.
*
* @param algorithm the name of the algorithm requested. See <a
* href="http://java.sun.com/j2se/1.3/docs/guide/security/CryptoSpec.html#AppA">Appendix A in the Java
* Cryptography Architecture API Specification & Reference</a> for information about standard algorithm
* names.
* @return An MD5 digest instance.
* @throws RuntimeException when a {@link java.security.NoSuchAlgorithmException} is caught.
* @see MessageDigest#getInstance(String)
*/
static MessageDigest getDigest(String algorithm) {
try {
return MessageDigest.getInstance(algorithm);
} catch (NoSuchAlgorithmException e) {
throw new RuntimeException(e.getMessage());
}
}
/**
* Returns an MD5 MessageDigest.
*
* @return An MD5 digest instance.
* @throws RuntimeException when a {@link java.security.NoSuchAlgorithmException} is caught.
*/
public static MessageDigest getMd5Digest() {
return getDigest("MD5");
}
/**
* Returns an SHA-256 digest.
* <p>
* Throws a <code>RuntimeException</code> on JRE versions prior to 1.4.0.
* </p>
*
* @return An SHA-256 digest instance.
* @throws RuntimeException when a {@link java.security.NoSuchAlgorithmException} is caught.
*/
private static MessageDigest getSha256Digest() {
return getDigest("SHA-256");
}
/**
* Returns an SHA-384 digest.
* <p>
* Throws a <code>RuntimeException</code> on JRE versions prior to 1.4.0.
* </p>
*
* @return An SHA-384 digest instance.
* @throws RuntimeException when a {@link java.security.NoSuchAlgorithmException} is caught.
*/
private static MessageDigest getSha384Digest() {
return getDigest("SHA-384");
}
/**
* Returns an SHA-512 digest.
* <p>
* Throws a <code>RuntimeException</code> on JRE versions prior to 1.4.0.
* </p>
*
* @return An SHA-512 digest instance.
* @throws RuntimeException when a {@link java.security.NoSuchAlgorithmException} is caught.
*/
private static MessageDigest getSha512Digest() {
return getDigest("SHA-512");
}
/**
* Returns an SHA-1 digest.
*
* @return An SHA-1 digest instance.
* @throws RuntimeException when a {@link java.security.NoSuchAlgorithmException} is caught.
*/
private static MessageDigest getShaDigest() {
return getDigest("SHA");
}
/**
* Calculates the MD5 digest and returns the value as a 16 element <code>byte[]</code>.
*
* @param data Data to digest
* @return MD5 digest
*/
public static byte[] md5(byte[] data) {
return getMd5Digest().digest(data);
}
/**
* Calculates the MD5 digest and returns the value as a 16 element <code>byte[]</code>.
*
* @param data Data to digest
* @return MD5 digest
* @throws IOException On error reading from the stream
* @since 1.4
*/
public static byte[] md5(InputStream data) throws IOException {
return digest(getMd5Digest(), data);
}
/**
* Calculates the MD5 digest and returns the value as a 16 element <code>byte[]</code>.
*
* @param data Data to digest
* @return MD5 digest
*/
public static byte[] md5(String data) {
return md5(getBytesUtf8(data));
}
/**
* Calculates the MD5 digest and returns the value as a 32 character hex string.
*
* @param data Data to digest
* @return MD5 digest as a hex string
*/
public static String md5Hex(byte[] data) {
return Hex.encodeHexString(md5(data));
}
/**
* Calculates the MD5 digest and returns the value as a 32 character hex string.
*
* @param data Data to digest
* @return MD5 digest as a hex string
* @throws IOException On error reading from the stream
* @since 1.4
*/
public static String md5Hex(InputStream data) throws IOException {
return Hex.encodeHexString(md5(data));
}
/**
* Calculates the MD5 digest and returns the value as a 32 character hex string.
*
* @param data Data to digest
* @return MD5 digest as a hex string
*/
public static String md5Hex(String data) {
return Hex.encodeHexString(md5(data));
}
final static private Charset US_ASCII = Charset.forName("US-ASCII");
public static byte[] md5HexToByteArray(String md5Hex) {
return md5Hex.getBytes(US_ASCII);
}
public static String md5HexFromByteArray(byte[] data) {
return new String(data, 0, 32, US_ASCII);
}
/**
* Calculates the SHA-1 digest and returns the value as a <code>byte[]</code>.
*
* @param data Data to digest
* @return SHA-1 digest
*/
public static byte[] sha(byte[] data) {
return getShaDigest().digest(data);
}
/**
* Calculates the SHA-1 digest and returns the value as a <code>byte[]</code>.
*
* @param data Data to digest
* @return SHA-1 digest
* @throws IOException On error reading from the stream
* @since 1.4
*/
public static byte[] sha(InputStream data) throws IOException {
return digest(getShaDigest(), data);
}
/**
* Calculates the SHA-1 digest and returns the value as a <code>byte[]</code>.
*
* @param data Data to digest
* @return SHA-1 digest
*/
public static byte[] sha(String data) {
return sha(getBytesUtf8(data));
}
/**
* Calculates the SHA-256 digest and returns the value as a <code>byte[]</code>.
* <p>
* Throws a <code>RuntimeException</code> on JRE versions prior to 1.4.0.
* </p>
*
* @param data Data to digest
* @return SHA-256 digest
* @since 1.4
*/
public static byte[] sha256(byte[] data) {
return getSha256Digest().digest(data);
}
/**
* Calculates the SHA-256 digest and returns the value as a <code>byte[]</code>.
* <p>
* Throws a <code>RuntimeException</code> on JRE versions prior to 1.4.0.
* </p>
*
* @param data Data to digest
* @return SHA-256 digest
* @throws IOException On error reading from the stream
* @since 1.4
*/
public static byte[] sha256(InputStream data) throws IOException {
return digest(getSha256Digest(), data);
}
/**
* Calculates the SHA-256 digest and returns the value as a <code>byte[]</code>.
* <p>
* Throws a <code>RuntimeException</code> on JRE versions prior to 1.4.0.
* </p>
*
* @param data Data to digest
* @return SHA-256 digest
* @since 1.4
*/
public static byte[] sha256(String data) {
return sha256(getBytesUtf8(data));
}
/**
* Calculates the SHA-256 digest and returns the value as a hex string.
* <p>
* Throws a <code>RuntimeException</code> on JRE versions prior to 1.4.0.
* </p>
*
* @param data Data to digest
* @return SHA-256 digest as a hex string
* @since 1.4
*/
public static String sha256Hex(byte[] data) {
return Hex.encodeHexString(sha256(data));
}
/**
* Calculates the SHA-256 digest and returns the value as a hex string.
* <p>
* Throws a <code>RuntimeException</code> on JRE versions prior to 1.4.0.
* </p>
*
* @param data Data to digest
* @return SHA-256 digest as a hex string
* @throws IOException On error reading from the stream
* @since 1.4
*/
public static String sha256Hex(InputStream data) throws IOException {
return Hex.encodeHexString(sha256(data));
}
/**
* Calculates the SHA-256 digest and returns the value as a hex string.
* <p>
* Throws a <code>RuntimeException</code> on JRE versions prior to 1.4.0.
* </p>
*
* @param data Data to digest
* @return SHA-256 digest as a hex string
* @since 1.4
*/
public static String sha256Hex(String data) {
return Hex.encodeHexString(sha256(data));
}
/**
* Calculates the SHA-384 digest and returns the value as a <code>byte[]</code>.
* <p>
* Throws a <code>RuntimeException</code> on JRE versions prior to 1.4.0.
* </p>
*
* @param data Data to digest
* @return SHA-384 digest
* @since 1.4
*/
public static byte[] sha384(byte[] data) {
return getSha384Digest().digest(data);
}
/**
* Calculates the SHA-384 digest and returns the value as a <code>byte[]</code>.
* <p>
* Throws a <code>RuntimeException</code> on JRE versions prior to 1.4.0.
* </p>
*
* @param data Data to digest
* @return SHA-384 digest
* @throws IOException On error reading from the stream
* @since 1.4
*/
public static byte[] sha384(InputStream data) throws IOException {
return digest(getSha384Digest(), data);
}
/**
* Calculates the SHA-384 digest and returns the value as a <code>byte[]</code>.
* <p>
* Throws a <code>RuntimeException</code> on JRE versions prior to 1.4.0.
* </p>
*
* @param data Data to digest
* @return SHA-384 digest
* @since 1.4
*/
public static byte[] sha384(String data) {
return sha384(getBytesUtf8(data));
}
/**
* Calculates the SHA-384 digest and returns the value as a hex string.
* <p>
* Throws a <code>RuntimeException</code> on JRE versions prior to 1.4.0.
* </p>
*
* @param data Data to digest
* @return SHA-384 digest as a hex string
* @since 1.4
*/
public static String sha384Hex(byte[] data) {
return Hex.encodeHexString(sha384(data));
}
/**
* Calculates the SHA-384 digest and returns the value as a hex string.
* <p>
* Throws a <code>RuntimeException</code> on JRE versions prior to 1.4.0.
* </p>
*
* @param data Data to digest
* @return SHA-384 digest as a hex string
* @throws IOException On error reading from the stream
* @since 1.4
*/
public static String sha384Hex(InputStream data) throws IOException {
return Hex.encodeHexString(sha384(data));
}
/**
* Calculates the SHA-384 digest and returns the value as a hex string.
* <p>
* Throws a <code>RuntimeException</code> on JRE versions prior to 1.4.0.
* </p>
*
* @param data Data to digest
* @return SHA-384 digest as a hex string
* @since 1.4
*/
public static String sha384Hex(String data) {
return Hex.encodeHexString(sha384(data));
}
/**
* Calculates the SHA-512 digest and returns the value as a <code>byte[]</code>.
* <p>
* Throws a <code>RuntimeException</code> on JRE versions prior to 1.4.0.
* </p>
*
* @param data Data to digest
* @return SHA-512 digest
* @since 1.4
*/
public static byte[] sha512(byte[] data) {
return getSha512Digest().digest(data);
}
/**
* Calculates the SHA-512 digest and returns the value as a <code>byte[]</code>.
* <p>
* Throws a <code>RuntimeException</code> on JRE versions prior to 1.4.0.
* </p>
*
* @param data Data to digest
* @return SHA-512 digest
* @throws IOException On error reading from the stream
* @since 1.4
*/
public static byte[] sha512(InputStream data) throws IOException {
return digest(getSha512Digest(), data);
}
/**
* Calculates the SHA-512 digest and returns the value as a <code>byte[]</code>.
* <p>
* Throws a <code>RuntimeException</code> on JRE versions prior to 1.4.0.
* </p>
*
* @param data Data to digest
* @return SHA-512 digest
* @since 1.4
*/
public static byte[] sha512(String data) {
return sha512(getBytesUtf8(data));
}
/**
* Calculates the SHA-512 digest and returns the value as a hex string.
* <p>
* Throws a <code>RuntimeException</code> on JRE versions prior to 1.4.0.
* </p>
*
* @param data Data to digest
* @return SHA-512 digest as a hex string
* @since 1.4
*/
public static String sha512Hex(byte[] data) {
return Hex.encodeHexString(sha512(data));
}
/**
* Calculates the SHA-512 digest and returns the value as a hex string.
* <p>
* Throws a <code>RuntimeException</code> on JRE versions prior to 1.4.0.
* </p>
*
* @param data Data to digest
* @return SHA-512 digest as a hex string
* @throws IOException On error reading from the stream
* @since 1.4
*/
public static String sha512Hex(InputStream data) throws IOException {
return Hex.encodeHexString(sha512(data));
}
/**
* Calculates the SHA-512 digest and returns the value as a hex string.
* <p>
* Throws a <code>RuntimeException</code> on JRE versions prior to 1.4.0.
* </p>
*
* @param data Data to digest
* @return SHA-512 digest as a hex string
* @since 1.4
*/
public static String sha512Hex(String data) {
return Hex.encodeHexString(sha512(data));
}
/**
* Calculates the SHA-1 digest and returns the value as a hex string.
*
* @param data Data to digest
* @return SHA-1 digest as a hex string
*/
public static String shaHex(byte[] data) {
return Hex.encodeHexString(sha(data));
}
/**
* Calculates the SHA-1 digest and returns the value as a hex string.
*
* @param data Data to digest
* @return SHA-1 digest as a hex string
* @throws IOException On error reading from the stream
* @since 1.4
*/
public static String shaHex(InputStream data) throws IOException {
return Hex.encodeHexString(sha(data));
}
/**
* Calculates the SHA-1 digest and returns the value as a hex string.
*
* @param data Data to digest
* @return SHA-1 digest as a hex string
*/
public static String shaHex(String data) {
return Hex.encodeHexString(sha(data));
}
public static final NullDigest NULL_DIGEST = new NullDigest("null");
private static final class NullDigest extends MessageDigest {
private NullDigest(String algorithm) {
super(algorithm);
}
@Override
protected void engineUpdate(byte input) {
}
@Override
protected void engineUpdate(byte[] input, int offset, int len) {
}
@Override
protected byte[] engineDigest() {
return null;
}
@Override
protected void engineReset() {
}
}
}

View File

@ -1,129 +0,0 @@
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common;
import org.elasticsearch.ElasticSearchIllegalStateException;
/**
*
*/
public class Hex {
/**
* Used to build output as Hex
*/
private static final char[] DIGITS_LOWER = {'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f'};
/**
* Used to build output as Hex
*/
private static final char[] DIGITS_UPPER = {'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F'};
/**
* Converts an array of bytes into an array of characters representing the hexadecimal values of each byte in order.
* The returned array will be double the length of the passed array, as it takes two characters to represent any
* given byte.
*
* @param data a byte[] to convert to Hex characters
* @return A char[] containing hexadecimal characters
*/
public static char[] encodeHex(byte[] data) {
return encodeHex(data, true);
}
/**
* Converts an array of bytes into an array of characters representing the hexadecimal values of each byte in order.
* The returned array will be double the length of the passed array, as it takes two characters to represent any
* given byte.
*
* @param data a byte[] to convert to Hex characters
* @param toLowerCase <code>true</code> converts to lowercase, <code>false</code> to uppercase
* @return A char[] containing hexadecimal characters
*/
public static char[] encodeHex(byte[] data, boolean toLowerCase) {
return encodeHex(data, toLowerCase ? DIGITS_LOWER : DIGITS_UPPER);
}
/**
* Converts an array of bytes into an array of characters representing the hexadecimal values of each byte in order.
* The returned array will be double the length of the passed array, as it takes two characters to represent any
* given byte.
*
* @param data a byte[] to convert to Hex characters
* @param toDigits the output alphabet
* @return A char[] containing hexadecimal characters
* @since 1.4
*/
protected static char[] encodeHex(byte[] data, char[] toDigits) {
int l = data.length;
char[] out = new char[l << 1];
// two characters form the hex value.
for (int i = 0, j = 0; i < l; i++) {
out[j++] = toDigits[(0xF0 & data[i]) >>> 4];
out[j++] = toDigits[0x0F & data[i]];
}
return out;
}
/**
* Converts an array of bytes into a String representing the hexadecimal values of each byte in order. The returned
* String will be double the length of the passed array, as it takes two characters to represent any given byte.
*
* @param data a byte[] to convert to Hex characters
* @return A String containing hexadecimal characters
*/
public static String encodeHexString(byte[] data) {
return new String(encodeHex(data));
}
public static byte[] decodeHex(String data) throws ElasticSearchIllegalStateException {
return decodeHex(data.toCharArray());
}
public static byte[] decodeHex(char[] data) throws ElasticSearchIllegalStateException {
int len = data.length;
if ((len & 0x01) != 0) {
throw new ElasticSearchIllegalStateException("Odd number of characters.");
}
byte[] out = new byte[len >> 1];
// two characters form the hex value.
for (int i = 0, j = 0; j < len; i++) {
int f = toDigit(data[j], j) << 4;
j++;
f = f | toDigit(data[j], j);
j++;
out[i] = (byte) (f & 0xFF);
}
return out;
}
protected static int toDigit(char ch, int index) throws ElasticSearchIllegalStateException {
int digit = Character.digit(ch, 16);
if (digit == -1) {
throw new ElasticSearchIllegalStateException("Illegal hexadecimal character " + ch + " at index " + index);
}
return digit;
}
}

View File

@ -20,7 +20,7 @@
package org.elasticsearch.common.blobstore.fs;
import com.google.common.collect.ImmutableMap;
import com.google.common.io.Closeables;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.common.blobstore.BlobMetaData;
import org.elasticsearch.common.blobstore.BlobPath;
import org.elasticsearch.common.blobstore.support.AbstractBlobContainer;
@ -85,7 +85,7 @@ public abstract class AbstractFsBlobContainer extends AbstractBlobContainer {
try {
is = new FileInputStream(new File(path, blobName));
} catch (FileNotFoundException e) {
Closeables.closeQuietly(is);
IOUtils.closeWhileHandlingException(is);
listener.onFailure(e);
return;
}
@ -96,7 +96,7 @@ public abstract class AbstractFsBlobContainer extends AbstractBlobContainer {
}
listener.onCompleted();
} catch (Exception e) {
Closeables.closeQuietly(is);
IOUtils.closeWhileHandlingException(is);
listener.onFailure(e);
}
}

View File

@ -20,6 +20,7 @@ import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import org.elasticsearch.common.Classes;
import org.elasticsearch.common.inject.internal.*;
import org.elasticsearch.common.inject.spi.*;
import org.elasticsearch.common.inject.util.Providers;

View File

@ -16,6 +16,7 @@
package org.elasticsearch.common.inject.internal;
import org.elasticsearch.common.Classes;
import org.elasticsearch.common.inject.BindingAnnotation;
import org.elasticsearch.common.inject.Key;
import org.elasticsearch.common.inject.ScopeAnnotation;

View File

@ -1,35 +0,0 @@
/**
* Copyright (C) 2007 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.elasticsearch.common.inject.internal;
import java.lang.reflect.Modifier;
/**
* Class utilities.
*/
public class Classes {
public static boolean isInnerClass(Class<?> clazz) {
return !Modifier.isStatic(clazz.getModifiers())
&& clazz.getEnclosingClass() != null;
}
public static boolean isConcrete(Class<?> clazz) {
int modifiers = clazz.getModifiers();
return !clazz.isInterface() && !Modifier.isAbstract(modifiers);
}
}

View File

@ -1,249 +0,0 @@
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.lab;
import org.elasticsearch.common.Preconditions;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
public class LongsLAB {
private AtomicReference<Chunk> curChunk = new AtomicReference<Chunk>();
final int chunkSize;
final int maxAlloc;
public LongsLAB(int chunkSize, int maxAlloc) {
this.chunkSize = chunkSize;
this.maxAlloc = maxAlloc;
// if we don't exclude allocations >CHUNK_SIZE, we'd infiniteloop on one!
Preconditions.checkArgument(maxAlloc <= chunkSize);
}
/**
* Allocate a slice of the given length.
* <p/>
* If the size is larger than the maximum size specified for this
* allocator, returns null.
*/
public Allocation allocateLongs(int size) {
Preconditions.checkArgument(size >= 0, "negative size");
// Callers should satisfy large allocations directly from JVM since they
// don't cause fragmentation as badly.
if (size > maxAlloc) {
return null;
}
while (true) {
Chunk c = getOrMakeChunk();
// Try to allocate from this chunk
int allocOffset = c.alloc(size);
if (allocOffset != -1) {
// We succeeded - this is the common case - small alloc
// from a big buffer
return new Allocation(c.data, allocOffset);
}
// not enough space!
// try to retire this chunk
tryRetireChunk(c);
}
}
/**
* Try to retire the current chunk if it is still
* <code>c</code>. Postcondition is that curChunk.get()
* != c
*/
private void tryRetireChunk(Chunk c) {
@SuppressWarnings("unused")
boolean weRetiredIt = curChunk.compareAndSet(c, null);
// If the CAS succeeds, that means that we won the race
// to retire the chunk. We could use this opportunity to
// update metrics on external fragmentation.
//
// If the CAS fails, that means that someone else already
// retired the chunk for us.
}
/**
* Get the current chunk, or, if there is no current chunk,
* allocate a new one from the JVM.
*/
private Chunk getOrMakeChunk() {
while (true) {
// Try to get the chunk
Chunk c = curChunk.get();
if (c != null) {
return c;
}
// No current chunk, so we want to allocate one. We race
// against other allocators to CAS in an uninitialized chunk
// (which is cheap to allocate)
c = new Chunk(chunkSize);
if (curChunk.compareAndSet(null, c)) {
// we won race - now we need to actually do the expensive
// allocation step
c.init();
return c;
}
// someone else won race - that's fine, we'll try to grab theirs
// in the next iteration of the loop.
}
}
/**
* A chunk of memory out of which allocations are sliced.
*/
private static class Chunk {
/**
* Actual underlying data
*/
private long[] data;
private static final int UNINITIALIZED = -1;
private static final int OOM = -2;
/**
* Offset for the next allocation, or the sentinel value -1
* which implies that the chunk is still uninitialized.
*/
private AtomicInteger nextFreeOffset = new AtomicInteger(UNINITIALIZED);
/**
* Total number of allocations satisfied from this buffer
*/
private AtomicInteger allocCount = new AtomicInteger();
/**
* Size of chunk in longs
*/
private final int size;
/**
* Create an uninitialized chunk. Note that memory is not allocated yet, so
* this is cheap.
*
* @param size in longs
*/
private Chunk(int size) {
this.size = size;
}
/**
* Actually claim the memory for this chunk. This should only be called from
* the thread that constructed the chunk. It is thread-safe against other
* threads calling alloc(), who will block until the allocation is complete.
*/
public void init() {
assert nextFreeOffset.get() == UNINITIALIZED;
try {
data = new long[size];
} catch (OutOfMemoryError e) {
boolean failInit = nextFreeOffset.compareAndSet(UNINITIALIZED, OOM);
assert failInit; // should be true.
throw e;
}
// Mark that it's ready for use
boolean initted = nextFreeOffset.compareAndSet(
UNINITIALIZED, 0);
// We should always succeed the above CAS since only one thread
// calls init()!
Preconditions.checkState(initted,
"Multiple threads tried to init same chunk");
}
/**
* Try to allocate <code>size</code> longs from the chunk.
*
* @return the offset of the successful allocation, or -1 to indicate not-enough-space
*/
public int alloc(int size) {
while (true) {
int oldOffset = nextFreeOffset.get();
if (oldOffset == UNINITIALIZED) {
// The chunk doesn't have its data allocated yet.
// Since we found this in curChunk, we know that whoever
// CAS-ed it there is allocating it right now. So spin-loop
// shouldn't spin long!
Thread.yield();
continue;
}
if (oldOffset == OOM) {
// doh we ran out of ram. return -1 to chuck this away.
return -1;
}
if (oldOffset + size > data.length) {
return -1; // alloc doesn't fit
}
// Try to atomically claim this chunk
if (nextFreeOffset.compareAndSet(oldOffset, oldOffset + size)) {
// we got the alloc
allocCount.incrementAndGet();
return oldOffset;
}
// we raced and lost alloc, try again
}
}
@Override
public String toString() {
return "Chunk@" + System.identityHashCode(this) +
" allocs=" + allocCount.get() + "waste=" +
(data.length - nextFreeOffset.get());
}
}
/**
* The result of a single allocation. Contains the chunk that the
* allocation points into, and the offset in this array where the
* slice begins.
*/
public static class Allocation {
private final long[] data;
private final int offset;
private Allocation(long[] data, int off) {
this.data = data;
this.offset = off;
}
@Override
public String toString() {
return "Allocation(data=" + data +
" with capacity=" + data.length +
", off=" + offset + ")";
}
public long[] getData() {
return data;
}
public int getOffset() {
return offset;
}
}
}

View File

@ -19,7 +19,7 @@
package org.elasticsearch.common.settings.loader;
import com.google.common.io.Closeables;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.common.io.FastStringReader;
import org.elasticsearch.common.io.stream.BytesStreamInput;
@ -46,7 +46,7 @@ public class PropertiesSettingsLoader implements SettingsLoader {
}
return result;
} finally {
Closeables.closeQuietly(reader);
IOUtils.closeWhileHandlingException(reader);
}
}
@ -62,7 +62,7 @@ public class PropertiesSettingsLoader implements SettingsLoader {
}
return result;
} finally {
Closeables.closeQuietly(stream);
IOUtils.closeWhileHandlingException(stream);
}
}
}

View File

@ -21,7 +21,7 @@ package org.elasticsearch.gateway.local.state.meta;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.io.Closeables;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.ElasticSearchIllegalArgumentException;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterChangedEvent;
@ -341,12 +341,12 @@ public class LocalGatewayMetaState extends AbstractComponent implements ClusterS
BytesReference bytes = builder.bytes();
fos.write(bytes.array(), bytes.arrayOffset(), bytes.length());
fos.getChannel().force(true);
Closeables.closeQuietly(fos);
fos.close();
wroteAtLeastOnce = true;
} catch (Exception e) {
lastFailure = e;
} finally {
Closeables.closeQuietly(fos);
IOUtils.closeWhileHandlingException(fos);
}
}
@ -400,12 +400,12 @@ public class LocalGatewayMetaState extends AbstractComponent implements ClusterS
BytesReference bytes = builder.bytes();
fos.write(bytes.array(), bytes.arrayOffset(), bytes.length());
fos.getChannel().force(true);
Closeables.closeQuietly(fos);
fos.close();
wroteAtLeastOnce = true;
} catch (Exception e) {
lastFailure = e;
} finally {
Closeables.closeQuietly(fos);
IOUtils.closeWhileHandlingException(fos);
}
}

View File

@ -20,7 +20,7 @@
package org.elasticsearch.gateway.local.state.shards;
import com.google.common.collect.Maps;
import com.google.common.io.Closeables;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.cluster.ClusterChangedEvent;
import org.elasticsearch.cluster.ClusterStateListener;
import org.elasticsearch.cluster.node.DiscoveryNode;
@ -291,12 +291,12 @@ public class LocalGatewayShardsState extends AbstractComponent implements Cluste
BytesReference bytes = builder.bytes();
fos.write(bytes.array(), bytes.arrayOffset(), bytes.length());
fos.getChannel().force(true);
Closeables.closeQuietly(fos);
fos.close();
wroteAtLeastOnce = true;
} catch (Exception e) {
lastFailure = e;
} finally {
Closeables.closeQuietly(fos);
IOUtils.closeWhileHandlingException(fos);
}
}

View File

@ -19,7 +19,6 @@
package org.elasticsearch.benchmark.checksum;
import org.elasticsearch.common.Digest;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
@ -34,7 +33,7 @@ public class ChecksumBenchmarkTest {
public static final int BATCH_SIZE = 16 * 1024;
public static void main(String[] args) {
public static void main(String[] args) throws Exception {
System.out.println("Warning up");
long warmSize = ByteSizeValue.parseBytesSizeValue("1g", null).bytes();
crc(warmSize);
@ -72,11 +71,11 @@ public class ChecksumBenchmarkTest {
System.out.println("Adler took " + new TimeValue(System.currentTimeMillis() - start));
}
private static void md5(long dataSize) {
private static void md5(long dataSize) throws Exception {
long start = System.currentTimeMillis();
byte[] data = new byte[BATCH_SIZE];
long iter = dataSize / BATCH_SIZE;
MessageDigest digest = Digest.getMd5Digest();
MessageDigest digest = MessageDigest.getInstance("MD5");
for (long i = 0; i < iter; i++) {
digest.update(data);
}