Address several Guava InputSupplier deprecations

Many remain due to Payload implementing InputSupplier<InputStream>.  Guava 18
will remove InputSupplier.
This commit is contained in:
Andrew Gaul 2014-04-11 22:25:16 -07:00
parent 7af18d4843
commit d0bd30cc15
10 changed files with 17 additions and 20 deletions

View File

@ -48,7 +48,6 @@ import org.testng.annotations.Test;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.common.io.ByteSource;
import com.google.common.io.ByteStreams;
import com.google.common.io.Files;
/**
@ -356,7 +355,7 @@ public class FilesystemStorageStrategyImplTest {
File blobFullPath = new File(TARGET_CONTAINER_NAME, blobKey);
ByteSource expectedInput = Files.asByteSource(sourceFile);
ByteSource actualInput = Files.asByteSource(blobFullPath);
assertTrue(ByteStreams.equal(expectedInput, actualInput),
assertTrue(expectedInput.contentEquals(actualInput),
"Files are not equal");
}
@ -375,7 +374,7 @@ public class FilesystemStorageStrategyImplTest {
File blobFullPath = new File(TARGET_CONTAINER_NAME, blobKey);
ByteSource expectedInput = Files.asByteSource(sourceFile);
ByteSource actualInput = Files.asByteSource(blobFullPath);
assertTrue(ByteStreams.equal(expectedInput, actualInput),
assertTrue(expectedInput.contentEquals(actualInput),
"Files are not equal");
}

View File

@ -31,7 +31,7 @@ import org.jclouds.filesystem.util.Utils;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterators;
import com.google.common.collect.Sets;
import com.google.common.io.ByteStreams;
import com.google.common.io.ByteSource;
import com.google.common.io.Files;
/**
@ -218,7 +218,7 @@ public class TestUtils {
for (File file : IMAGE_RESOURCES) {
byte[] buffer = new byte[random.nextInt(2 * 1024 * 1024)];
random.nextBytes(buffer);
Files.copy(ByteStreams.newInputStreamSupplier(buffer), file);
ByteSource.wrap(buffer).copyTo(Files.asByteSink(file));
}
}

View File

@ -183,7 +183,7 @@ public class SwiftBlobIntegrationLiveTest extends BaseBlobIntegrationTest {
}
File fileToUpload = new File("target/lots-of-const.txt");
Files.copy(temp, fileToUpload);
temp.copyTo(Files.asByteSink(fileToUpload));
assertTrue(fileToUpload.length() > partSize);
return fileToUpload;

View File

@ -135,7 +135,7 @@ public class BaseBlobIntegrationTest extends BaseBlobStoreIntegrationTest {
public void testPutFileParallel() throws InterruptedException, IOException, TimeoutException {
File payloadFile = File.createTempFile("testPutFileParallel", "png");
Files.copy(createTestInput(32 * 1024), payloadFile);
createTestInput(32 * 1024).copyTo(Files.asByteSink(payloadFile));
final Payload testPayload = Payloads.newFilePayload(payloadFile);
final byte[] md5 = md5Supplier(testPayload);
@ -225,7 +225,7 @@ public class BaseBlobIntegrationTest extends BaseBlobStoreIntegrationTest {
.payload(new InputStreamSupplierPayload(supplier))
.contentType("text/plain")
.contentMD5(supplier.hash(md5()).asBytes())
.contentLength(ByteStreams.length(supplier))
.contentLength(supplier.size())
.contentDisposition(contentDisposition)
.build());
}
@ -297,14 +297,14 @@ public class BaseBlobIntegrationTest extends BaseBlobStoreIntegrationTest {
@Test(groups = { "integration", "live" })
public void testPutCorrectContentMD5() throws InterruptedException, IOException {
byte[] payload = ByteStreams.toByteArray(createTestInput(1024));
byte[] payload = createTestInput(1024).read();
HashCode contentMD5 = md5().hashBytes(payload);
putBlobWithMd5(payload, contentMD5);
}
@Test(groups = { "integration", "live" })
public void testPutIncorrectContentMD5() throws InterruptedException, IOException {
byte[] payload = ByteStreams.toByteArray(createTestInput(1024));
byte[] payload = createTestInput(1024).read();
HashCode contentMD5 = md5().hashBytes(new byte[0]);
try {
putBlobWithMd5(payload, contentMD5);

View File

@ -100,7 +100,7 @@ public abstract class Wire {
is.close();
}
// we must call FileBackedOutputStream.reset to remove temporary file
return new FilterInputStream(out.asByteSource().getInput()) {
return new FilterInputStream(out.asByteSource().openStream()) {
@Override
public void close() throws IOException {
super.close();

View File

@ -227,7 +227,7 @@ public abstract class BaseHttpCommandExecutorServiceIntegrationTest extends Base
public void testGetBigFile() throws Exception {
MockResponse response = new MockResponse().addHeader("Content-MD5", constitutionsMd5)
.addHeader("Content-type", "text/plain")
.setBody(oneHundredOneConstitutions.getInput(), constitutionsLength);
.setBody(oneHundredOneConstitutions.openStream(), constitutionsLength);
MockWebServer server = mockWebServer(response, response);
InputStream input = server.getUrl("/101constitutions").openStream();

View File

@ -19,7 +19,6 @@ package org.jclouds.http;
import static com.google.common.base.Throwables.getStackTraceAsString;
import static com.google.common.hash.Hashing.md5;
import static com.google.common.io.BaseEncoding.base64;
import static com.google.common.io.ByteStreams.copy;
import static com.google.common.io.ByteStreams.toByteArray;
import static com.google.common.net.HttpHeaders.CONTENT_DISPOSITION;
import static com.google.common.net.HttpHeaders.CONTENT_ENCODING;
@ -114,7 +113,7 @@ public abstract class BaseJettyTest {
response.setContentType("text/plain");
response.setHeader("Content-MD5", md5);
response.setStatus(SC_OK);
copy(oneHundredOneConstitutions, response.getOutputStream());
oneHundredOneConstitutions.copyTo(response.getOutputStream());
} else if (request.getMethod().equals("PUT")) {
if (request.getContentLength() > 0) {
response.setStatus(SC_OK);

View File

@ -149,7 +149,7 @@ public class AWSS3ClientLiveTest extends S3ClientLiveTest {
public void testMultipartChunkedFileStream() throws IOException, InterruptedException {
File file = new File("target/const.txt");
Files.copy(oneHundredOneConstitutions, file);
oneHundredOneConstitutions.copyTo(Files.asByteSink(file));
String containerName = getContainerName();
try {

View File

@ -81,7 +81,7 @@ public class AzureBlobIntegrationLiveTest extends BaseBlobIntegrationTest {
oneHundredOneConstitutions = getTestDataSupplier();
oneHundredOneConstitutionsMD5 = oneHundredOneConstitutions.hash(md5()).asBytes();
File file = new File("target/const.txt");
Files.copy(oneHundredOneConstitutions, file);
oneHundredOneConstitutions.copyTo(Files.asByteSink(file));
String containerName = getContainerName();
try {
@ -100,7 +100,7 @@ public class AzureBlobIntegrationLiveTest extends BaseBlobIntegrationTest {
final long limit = MultipartUploadStrategy.MAX_BLOCK_SIZE;
ByteSource input = repeatingArrayByteSource(new byte[1024]).slice(0, limit);
File file = new File("target/const.txt");
Files.copy(input, file);
input.copyTo(Files.asByteSink(file));
String containerName = getContainerName();
try {

View File

@ -16,8 +16,6 @@
*/
package org.jclouds.scriptbuilder.functionloader.filters;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.io.CharStreams.newReaderSupplier;
import static com.google.common.io.CharStreams.readLines;
import java.io.IOException;
@ -26,6 +24,7 @@ import org.jclouds.scriptbuilder.domain.ShellToken;
import org.jclouds.scriptbuilder.functionloader.FunctionLoader;
import org.jclouds.scriptbuilder.functionloader.FunctionNotFoundException;
import com.google.common.io.CharSource;
import com.google.common.io.LineProcessor;
/**
@ -55,7 +54,7 @@ public class LicenseHeaderFilter implements FunctionLoader {
*/
public String filter(String lines, OsFamily family) {
try {
return readLines(newReaderSupplier(checkNotNull(lines, "lines")),
return CharSource.wrap(checkNotNull(lines, "lines")).readLines(
new LicenseHeaderProcessor(checkNotNull(family, "family")));
} catch (IOException e) {
// Don't fail; just return the original text with the comments