[STORE] Fold two hashFile implemenation into one

This commit is contained in:
Simon Willnauer 2014-09-15 11:03:49 +02:00
parent 723a40ef34
commit ec28d7c465
2 changed files with 19 additions and 25 deletions

View File

@ -26,6 +26,7 @@ import com.google.common.io.ByteStreams;
import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.store.*; import org.apache.lucene.store.*;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.cluster.metadata.SnapshotId; import org.elasticsearch.cluster.metadata.SnapshotId;
import org.elasticsearch.common.blobstore.*; import org.elasticsearch.common.blobstore.*;
@ -621,11 +622,12 @@ public class BlobStoreIndexShardRepository extends AbstractComponent implements
// to calculate it. // to calculate it.
// we might have multiple parts even though the file is small... make sure we read all of it. // we might have multiple parts even though the file is small... make sure we read all of it.
try (final InputStream stream = new PartSliceStream(blobContainer, fileInfo)) { try (final InputStream stream = new PartSliceStream(blobContainer, fileInfo)) {
final byte[] bytes = ByteStreams.toByteArray(stream); BytesRefBuilder builder = new BytesRefBuilder();
assert bytes != null; Store.MetadataSnapshot.hashFile(builder, stream, fileInfo.length());
assert bytes.length == fileInfo.length() : bytes.length + " != " + fileInfo.length(); BytesRef hash = metadata.hash();
final BytesRef spare = new BytesRef(bytes); hash.bytes = builder.bytes();
Store.MetadataSnapshot.hashFile(fileInfo.metadata().hash(), spare); hash.offset = 0;
hash.length = builder.length();
} }
} }
} }

View File

@ -27,6 +27,7 @@ import org.apache.lucene.codecs.lucene46.Lucene46SegmentInfoFormat;
import org.apache.lucene.index.*; import org.apache.lucene.index.*;
import org.apache.lucene.store.*; import org.apache.lucene.store.*;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.Version; import org.apache.lucene.util.Version;
import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ExceptionsHelper;
@ -35,9 +36,11 @@ import org.elasticsearch.common.Strings;
import org.elasticsearch.common.compress.Compressor; import org.elasticsearch.common.compress.Compressor;
import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.compress.CompressorFactory;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.lucene.Directories; import org.elasticsearch.common.lucene.Directories;
import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.store.InputStreamIndexInput;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.CloseableIndexComponent; import org.elasticsearch.index.CloseableIndexComponent;
import org.elasticsearch.index.codec.CodecService; import org.elasticsearch.index.codec.CodecService;
@ -561,15 +564,15 @@ public class Store extends AbstractIndexShardComponent implements CloseableIndex
private static void checksumFromLuceneFile(Directory directory, String file, ImmutableMap.Builder<String, StoreFileMetaData> builder, ESLogger logger, Version version, boolean readFileAsHash) throws IOException { private static void checksumFromLuceneFile(Directory directory, String file, ImmutableMap.Builder<String, StoreFileMetaData> builder, ESLogger logger, Version version, boolean readFileAsHash) throws IOException {
final String checksum; final String checksum;
final BytesRef fileHash = new BytesRef(); final BytesRefBuilder fileHash = new BytesRefBuilder();
try (IndexInput in = directory.openInput(file, IOContext.READONCE)) { try (final IndexInput in = directory.openInput(file, IOContext.READONCE)) {
try { try {
if (in.length() < CodecUtil.footerLength()) { if (in.length() < CodecUtil.footerLength()) {
// truncated files trigger IAE if we seek negative... these files are really corrupted though // truncated files trigger IAE if we seek negative... these files are really corrupted though
throw new CorruptIndexException("Can't retrieve checksum from file: " + file + " file length must be >= " + CodecUtil.footerLength() + " but was: " + in.length()); throw new CorruptIndexException("Can't retrieve checksum from file: " + file + " file length must be >= " + CodecUtil.footerLength() + " but was: " + in.length());
} }
if (readFileAsHash) { if (readFileAsHash) {
hashFile(fileHash, in); hashFile(fileHash, new InputStreamIndexInput(in, in.length()), in.length());
} }
checksum = digestToString(CodecUtil.retrieveChecksum(in)); checksum = digestToString(CodecUtil.retrieveChecksum(in));
@ -577,30 +580,19 @@ public class Store extends AbstractIndexShardComponent implements CloseableIndex
logger.debug("Can retrieve checksum from file [{}]", ex, file); logger.debug("Can retrieve checksum from file [{}]", ex, file);
throw ex; throw ex;
} }
builder.put(file, new StoreFileMetaData(file, directory.fileLength(file), checksum, version, fileHash)); builder.put(file, new StoreFileMetaData(file, directory.fileLength(file), checksum, version, fileHash.get()));
} }
} }
/** /**
* Computes a strong hash value for small files. Note that this method should only be used for files < 1MB * Computes a strong hash value for small files. Note that this method should only be used for files < 1MB
*/ */
public static void hashFile(BytesRef fileHash, IndexInput in) throws IOException { public static void hashFile(BytesRefBuilder fileHash, InputStream in, long size) throws IOException {
final int len = (int)Math.min(1024 * 1024, in.length()); // for safety we limit this to 1MB final int len = (int)Math.min(1024 * 1024, size); // for safety we limit this to 1MB
fileHash.offset = 0;
fileHash.grow(len); fileHash.grow(len);
fileHash.length = len; fileHash.setLength(len);
in.readBytes(fileHash.bytes, 0, len); Streams.readFully(in, fileHash.bytes(), 0, len);
} assert fileHash.length() == len;
/**
* Computes a strong hash value for small files. Note that this method should only be used for files < 1MB
*/
public static void hashFile(BytesRef fileHash, BytesRef source) throws IOException {
final int len = Math.min(1024 * 1024, source.length); // for safety we limit this to 1MB
fileHash.offset = 0;
fileHash.grow(len);
fileHash.length = len;
System.arraycopy(source.bytes, source.offset, fileHash.bytes, 0, len);
} }
@Override @Override