mirror of https://github.com/apache/jclouds.git
Remove erroneous static final from BIG_FILE
Also use preferred bigFile capitalization for member. Fixes
regression from e058973abc
.
This commit is contained in:
parent
e058973abc
commit
01780db4db
|
@ -56,7 +56,7 @@ import com.google.common.util.concurrent.MoreExecutors;
|
|||
@Test(groups = "live", singleThreaded = true)
|
||||
public class RegionScopedSwiftBlobStoreParallelLiveTest extends BaseBlobStoreIntegrationTest {
|
||||
|
||||
private static final File BIG_FILE = new File("random.dat");
|
||||
private File bigFile = new File("random.dat");
|
||||
private static final long SIZE = 10 * 1000 * 1000;
|
||||
private BlobStore blobStore;
|
||||
private String etag;
|
||||
|
@ -72,7 +72,7 @@ public class RegionScopedSwiftBlobStoreParallelLiveTest extends BaseBlobStoreInt
|
|||
public RegionScopedSwiftBlobStoreParallelLiveTest() {
|
||||
provider = "openstack-swift";
|
||||
try {
|
||||
BIG_FILE = File.createTempFile("random", "dat");
|
||||
bigFile = File.createTempFile("random", "dat");
|
||||
} catch (IOException ioe) {
|
||||
throw new RuntimeException(ioe);
|
||||
}
|
||||
|
@ -94,8 +94,8 @@ public class RegionScopedSwiftBlobStoreParallelLiveTest extends BaseBlobStoreInt
|
|||
@BeforeClass
|
||||
public void setup() throws IOException, InterruptedException {
|
||||
blobStore = getBlobStore();
|
||||
createRandomFile(SIZE, BIG_FILE);
|
||||
HashCode hashCode = Files.hash(BIG_FILE, Hashing.md5());
|
||||
createRandomFile(SIZE, bigFile);
|
||||
HashCode hashCode = Files.hash(bigFile, Hashing.md5());
|
||||
etag = hashCode.toString();
|
||||
blobStore.createContainerInLocation(null, CONTAINER);
|
||||
System.out.println("generated file md5: " + etag);
|
||||
|
@ -104,8 +104,8 @@ public class RegionScopedSwiftBlobStoreParallelLiveTest extends BaseBlobStoreInt
|
|||
@AfterClass
|
||||
public void cleanupFiles() {
|
||||
// Delete local file
|
||||
delete(BIG_FILE);
|
||||
delete(new File(BIG_FILE + ".downloaded"));
|
||||
delete(bigFile);
|
||||
delete(new File(bigFile + ".downloaded"));
|
||||
|
||||
// Delete uploaded file
|
||||
blobStore.clearContainer(CONTAINER);
|
||||
|
@ -114,8 +114,8 @@ public class RegionScopedSwiftBlobStoreParallelLiveTest extends BaseBlobStoreInt
|
|||
|
||||
@Test
|
||||
public void uploadMultipartBlob() {
|
||||
Blob blob = blobStore.blobBuilder(BIG_FILE.getName())
|
||||
.payload(new FilePayload(BIG_FILE))
|
||||
Blob blob = blobStore.blobBuilder(bigFile.getName())
|
||||
.payload(new FilePayload(bigFile))
|
||||
.build();
|
||||
// configure the blobstore to use multipart uploading of the file
|
||||
String eTag = blobStore.putBlob(CONTAINER, blob, multipart(executor));
|
||||
|
@ -126,15 +126,15 @@ public class RegionScopedSwiftBlobStoreParallelLiveTest extends BaseBlobStoreInt
|
|||
|
||||
@Test(dependsOnMethods = "uploadMultipartBlob", singleThreaded = true)
|
||||
public void downloadParallelBlob() throws IOException {
|
||||
final File downloadedFile = new File(BIG_FILE + ".downloaded");
|
||||
blobStore.downloadBlob(CONTAINER, BIG_FILE.getName(), downloadedFile, executor);
|
||||
final File downloadedFile = new File(bigFile + ".downloaded");
|
||||
blobStore.downloadBlob(CONTAINER, bigFile.getName(), downloadedFile, executor);
|
||||
String eTag = Files.hash(downloadedFile, Hashing.md5()).toString();
|
||||
assertEquals(eTag, etag);
|
||||
}
|
||||
|
||||
@Test(dependsOnMethods = "uploadMultipartBlob", singleThreaded = true)
|
||||
public void streamParallelBlob() throws IOException {
|
||||
InputStream is = blobStore.streamBlob(CONTAINER, BIG_FILE.getName(), executor);
|
||||
InputStream is = blobStore.streamBlob(CONTAINER, bigFile.getName(), executor);
|
||||
byte[] segment = new byte[1000000];
|
||||
|
||||
Hasher hasher = Hashing.md5().newHasher();
|
||||
|
|
Loading…
Reference in New Issue