Fix S3 Repo Tests Incomplete Reads (#50268) (#50275)

We need to read in a loop here. A single read to a huge byte array will
only read 16k max with the S3 SDK so if the blob we're trying to fully
read is larger we close early and fail the size comparison.
Also, drain streams fully when checking existence to avoid S3 SDK warnings.
This commit is contained in:
Armin Braun 2019-12-17 15:33:09 +01:00 committed by GitHub
parent 99fdea50dd
commit 55cc5432d6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 7 additions and 2 deletions

View File

@ -96,8 +96,12 @@ public final class BlobStoreTestUtil {
BlobStoreTestUtil.assertConsistency(repo, repo.threadPool().executor(ThreadPool.Names.GENERIC)); BlobStoreTestUtil.assertConsistency(repo, repo.threadPool().executor(ThreadPool.Names.GENERIC));
} }
private static final byte[] SINK = new byte[1024];
public static boolean blobExists(BlobContainer container, String blobName) throws IOException { public static boolean blobExists(BlobContainer container, String blobName) throws IOException {
try (InputStream ignored = container.readBlob(blobName)) { try (InputStream input = container.readBlob(blobName)) {
// Drain input stream fully to avoid warnings from SDKs like S3 that don't like closing streams mid-way
while (input.read(SINK) >= 0);
return true; return true;
} catch (NoSuchFileException e) { } catch (NoSuchFileException e) {
return false; return false;

View File

@ -34,6 +34,7 @@ import org.elasticsearch.common.blobstore.BlobMetaData;
import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobPath;
import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.blobstore.BlobStore;
import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.repositories.IndexId; import org.elasticsearch.repositories.IndexId;
import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.repositories.RepositoriesService;
@ -236,7 +237,7 @@ public abstract class ESBlobStoreRepositoryIntegTestCase extends ESIntegTestCase
public static byte[] readBlobFully(BlobContainer container, String name, int length) throws IOException { public static byte[] readBlobFully(BlobContainer container, String name, int length) throws IOException {
byte[] data = new byte[length]; byte[] data = new byte[length];
try (InputStream inputStream = container.readBlob(name)) { try (InputStream inputStream = container.readBlob(name)) {
assertThat(inputStream.read(data), CoreMatchers.equalTo(length)); assertThat(Streams.readFully(inputStream, data), CoreMatchers.equalTo(length));
assertThat(inputStream.read(), CoreMatchers.equalTo(-1)); assertThat(inputStream.read(), CoreMatchers.equalTo(-1));
} }
return data; return data;