diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/common/compress/lzf/LZFInputStream.java b/modules/elasticsearch/src/main/java/org/elasticsearch/common/compress/lzf/LZFInputStream.java index e767b436d13..1852fd05636 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/common/compress/lzf/LZFInputStream.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/common/compress/lzf/LZFInputStream.java @@ -1,22 +1,3 @@ -/* - * Licensed to Elastic Search and Shay Banon under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. Elastic Search licenses this - * file to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - package org.elasticsearch.common.compress.lzf; import java.io.IOException; @@ -26,7 +7,7 @@ public class LZFInputStream extends InputStream { public static int EOF_FLAG = -1; /* stream to be decompressed */ - private InputStream inputStream; + private final InputStream inputStream; /* the current buffer of compressed bytes */ private final byte[] compressedBytes = new byte[LZFChunk.MAX_CHUNK_LEN]; @@ -79,7 +60,6 @@ public class LZFInputStream extends InputStream { bufferPosition += chunkLength; readyBuffer(); } - // FIXED HERE: fixed to return actual length read return outputPos - offset; } @@ -87,16 +67,10 @@ public class LZFInputStream extends InputStream { inputStream.close(); } - public void reset(InputStream is) { - this.inputStream = is; - bufferLength = 0; - bufferPosition = 0; - } - /** * Fill the uncompressed bytes buffer by reading the underlying inputStream. * - * @throws java.io.IOException + * @throws IOException */ private void readyBuffer() throws IOException { if (bufferPosition >= bufferLength) { @@ -104,4 +78,4 @@ public class LZFInputStream extends InputStream { bufferPosition = 0; } } -} +} \ No newline at end of file diff --git a/modules/elasticsearch/src/test/java/org/elasticsearch/common/compress/lzf/LZFInputStreamTests.java b/modules/elasticsearch/src/test/java/org/elasticsearch/common/compress/lzf/LZFInputStreamTests.java index 145ad86f054..8f5a3697680 100644 --- a/modules/elasticsearch/src/test/java/org/elasticsearch/common/compress/lzf/LZFInputStreamTests.java +++ b/modules/elasticsearch/src/test/java/org/elasticsearch/common/compress/lzf/LZFInputStreamTests.java @@ -25,6 +25,7 @@ import org.testng.annotations.Test; import java.io.*; import java.security.SecureRandom; +import java.util.Random; /** * @author kimchy (shay.banon) @@ -91,6 +92,49 @@ public class LZFInputStreamTests { Assert.assertTrue(outputBytes == reference.length); } + @Test void testIncremental() throws IOException { + // first need to compress something... + String[] words = new String[]{"what", "ever", "some", "other", "words", "too"}; + StringBuilder sb = new StringBuilder(258000); + Random rnd = new Random(123); + while (sb.length() < 256000) { + int i = (rnd.nextInt() & 31); + if (i < words.length) { + sb.append(words[i]); + } else { + sb.append(i); + } + } + byte[] uncomp = sb.toString().getBytes("UTF-8"); + ByteArrayOutputStream bytes = new ByteArrayOutputStream(); + LZFOutputStream lzOut = new LZFOutputStream(bytes); + lzOut.write(uncomp); + lzOut.close(); + byte[] comp = bytes.toByteArray(); + + // read back, in chunks + bytes = new ByteArrayOutputStream(uncomp.length); + byte[] buffer = new byte[64]; + LZFInputStream lzIn = new LZFInputStream(new ByteArrayInputStream(comp)); + + while (true) { + int len = 1 + ((rnd.nextInt() & 0x7FFFFFFF) % buffer.length); + int offset = buffer.length - len; + + int count = lzIn.read(buffer, offset, len); + if (count < 0) { + break; + } + if (count > len) { + Assert.fail("Requested " + len + " bytes (offset " + offset + ", array length " + buffer.length + "), got " + count); + } + bytes.write(buffer, offset, count); + } + byte[] result = bytes.toByteArray(); + Assert.assertEquals(result.length, uncomp.length); + Assert.assertEquals(result, uncomp); + } + private void doDecompressReadBlock(byte[] bytes, byte[] reference) throws IOException { ByteArrayInputStream bis = new ByteArrayInputStream(bytes);