HADOOP-15274. Move hadoop-openstack to slf4j

Signed-off-by: Akira Ajisaka <aajisaka@apache.org>
This commit is contained in:
fang zhenyi 2018-03-01 15:08:21 -08:00 committed by Akira Ajisaka
parent 96e8f260ab
commit 29233c3c4a
No known key found for this signature in database
GPG Key ID: C1EDBB9CA400FD50
19 changed files with 79 additions and 73 deletions

View File

@ -18,8 +18,8 @@
package org.apache.hadoop.fs.swift.http; package org.apache.hadoop.fs.swift.http;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import java.io.IOException; import java.io.IOException;
import java.lang.reflect.Constructor; import java.lang.reflect.Constructor;
@ -33,7 +33,8 @@ import java.net.UnknownHostException;
* available in branch-1 too. * available in branch-1 too.
*/ */
public class ExceptionDiags { public class ExceptionDiags {
private static final Log LOG = LogFactory.getLog(ExceptionDiags.class); private static final Logger LOG =
LoggerFactory.getLogger(ExceptionDiags.class);
/** text to point users elsewhere: {@value} */ /** text to point users elsewhere: {@value} */
private static final String FOR_MORE_DETAILS_SEE private static final String FOR_MORE_DETAILS_SEE

View File

@ -18,8 +18,8 @@
package org.apache.hadoop.fs.swift.http; package org.apache.hadoop.fs.swift.http;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.swift.exceptions.SwiftConfigurationException; import org.apache.hadoop.fs.swift.exceptions.SwiftConfigurationException;
@ -39,7 +39,8 @@ import static org.apache.hadoop.fs.swift.http.SwiftProtocolConstants.*;
* and then builds an appropriate Properties file. * and then builds an appropriate Properties file.
*/ */
public final class RestClientBindings { public final class RestClientBindings {
private static final Log LOG = LogFactory.getLog(RestClientBindings.class); private static final Logger LOG =
LoggerFactory.getLogger(RestClientBindings.class);
public static final String E_INVALID_NAME = "Invalid swift hostname '%s':" + public static final String E_INVALID_NAME = "Invalid swift hostname '%s':" +
" hostname must in form container.service"; " hostname must in form container.service";

View File

@ -40,8 +40,8 @@ import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.DefaultHttpRequestRetryHandler; import org.apache.http.impl.client.DefaultHttpRequestRetryHandler;
import org.apache.http.impl.client.HttpClientBuilder; import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.http.message.BasicHeader; import org.apache.http.message.BasicHeader;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.swift.auth.ApiKeyAuthenticationRequest; import org.apache.hadoop.fs.swift.auth.ApiKeyAuthenticationRequest;
import org.apache.hadoop.fs.swift.auth.ApiKeyCredentials; import org.apache.hadoop.fs.swift.auth.ApiKeyCredentials;
@ -97,7 +97,8 @@ import static org.apache.hadoop.fs.swift.http.SwiftProtocolConstants.*;
* details. * details.
*/ */
public final class SwiftRestClient { public final class SwiftRestClient {
private static final Log LOG = LogFactory.getLog(SwiftRestClient.class); private static final Logger LOG =
LoggerFactory.getLogger(SwiftRestClient.class);
/** /**
* Header that says "use newest version" -ensures that * Header that says "use newest version" -ensures that
@ -1725,7 +1726,7 @@ public final class SwiftRestClient {
for (Header header : req.getAllHeaders()) { for (Header header : req.getAllHeaders()) {
builder.append(header.toString()); builder.append(header.toString());
} }
LOG.debug(builder); LOG.debug(builder.toString());
} }
HttpResponse resp = client.execute(req); HttpResponse resp = client.execute(req);
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {

View File

@ -18,8 +18,8 @@
package org.apache.hadoop.fs.swift.snative; package org.apache.hadoop.fs.swift.snative;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.fs.BlockLocation;
@ -54,8 +54,8 @@ public class SwiftNativeFileSystem extends FileSystem {
/** filesystem prefix: {@value} */ /** filesystem prefix: {@value} */
public static final String SWIFT = "swift"; public static final String SWIFT = "swift";
private static final Log LOG = private static final Logger LOG =
LogFactory.getLog(SwiftNativeFileSystem.class); LoggerFactory.getLogger(SwiftNativeFileSystem.class);
/** /**
* path to user work directory for storing temporary files * path to user work directory for storing temporary files

View File

@ -22,8 +22,8 @@ import com.fasterxml.jackson.databind.type.CollectionType;
import org.apache.http.Header; import org.apache.http.Header;
import org.apache.http.HttpStatus; import org.apache.http.HttpStatus;
import org.apache.http.message.BasicHeader; import org.apache.http.message.BasicHeader;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileAlreadyExistsException; import org.apache.hadoop.fs.FileAlreadyExistsException;
import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileStatus;
@ -65,8 +65,8 @@ import java.util.regex.Pattern;
public class SwiftNativeFileSystemStore { public class SwiftNativeFileSystemStore {
private static final Pattern URI_PATTERN = Pattern.compile("\"\\S+?\""); private static final Pattern URI_PATTERN = Pattern.compile("\"\\S+?\"");
private static final String PATTERN = "EEE, d MMM yyyy hh:mm:ss zzz"; private static final String PATTERN = "EEE, d MMM yyyy hh:mm:ss zzz";
private static final Log LOG = private static final Logger LOG =
LogFactory.getLog(SwiftNativeFileSystemStore.class); LoggerFactory.getLogger(SwiftNativeFileSystemStore.class);
private URI uri; private URI uri;
private SwiftRestClient swiftRestClient; private SwiftRestClient swiftRestClient;
@ -720,7 +720,7 @@ public class SwiftNativeFileSystemStore {
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
LOG.debug(message + ": listing of " + objectPath); LOG.debug(message + ": listing of " + objectPath);
for (FileStatus fileStatus : statuses) { for (FileStatus fileStatus : statuses) {
LOG.debug(fileStatus.getPath()); LOG.debug(fileStatus.getPath().toString());
} }
} }
} }

View File

@ -18,8 +18,8 @@
package org.apache.hadoop.fs.swift.snative; package org.apache.hadoop.fs.swift.snative;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.fs.FSExceptionMessages; import org.apache.hadoop.fs.FSExceptionMessages;
import org.apache.hadoop.fs.FSInputStream; import org.apache.hadoop.fs.FSInputStream;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
@ -43,7 +43,8 @@ import java.io.IOException;
*/ */
class SwiftNativeInputStream extends FSInputStream { class SwiftNativeInputStream extends FSInputStream {
private static final Log LOG = LogFactory.getLog(SwiftNativeInputStream.class); private static final Logger LOG =
LoggerFactory.getLogger(SwiftNativeInputStream.class);
/** /**
* range requested off the server: {@value} * range requested off the server: {@value}

View File

@ -18,8 +18,8 @@
package org.apache.hadoop.fs.swift.snative; package org.apache.hadoop.fs.swift.snative;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.swift.exceptions.SwiftConnectionClosedException; import org.apache.hadoop.fs.swift.exceptions.SwiftConnectionClosedException;
@ -45,8 +45,8 @@ import java.io.OutputStream;
class SwiftNativeOutputStream extends OutputStream { class SwiftNativeOutputStream extends OutputStream {
public static final int ATTEMPT_LIMIT = 3; public static final int ATTEMPT_LIMIT = 3;
private long filePartSize; private long filePartSize;
private static final Log LOG = private static final Logger LOG =
LogFactory.getLog(SwiftNativeOutputStream.class); LoggerFactory.getLogger(SwiftNativeOutputStream.class);
private Configuration conf; private Configuration conf;
private String key; private String key;
private File backupFile; private File backupFile;

View File

@ -18,8 +18,8 @@
package org.apache.hadoop.fs.swift.util; package org.apache.hadoop.fs.swift.util;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataOutputStream;
@ -40,8 +40,8 @@ import java.util.Properties;
*/ */
public class SwiftTestUtils extends org.junit.Assert { public class SwiftTestUtils extends org.junit.Assert {
private static final Log LOG = private static final Logger LOG =
LogFactory.getLog(SwiftTestUtils.class); LoggerFactory.getLogger(SwiftTestUtils.class);
public static final String TEST_FS_SWIFT = "test.fs.swift.name"; public static final String TEST_FS_SWIFT = "test.fs.swift.name";
public static final String IO_FILE_BUFFER_SIZE = "io.file.buffer.size"; public static final String IO_FILE_BUFFER_SIZE = "io.file.buffer.size";

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.fs.swift.util; package org.apache.hadoop.fs.swift.util;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
@ -98,7 +98,7 @@ public final class SwiftUtils {
* @param text text message * @param text text message
* @param args args arguments to the print statement * @param args args arguments to the print statement
*/ */
public static void debug(Log log, String text, Object... args) { public static void debug(Logger log, String text, Object... args) {
if (log.isDebugEnabled()) { if (log.isDebugEnabled()) {
log.debug(String.format(text, args)); log.debug(String.format(text, args));
} }
@ -110,7 +110,7 @@ public final class SwiftUtils {
* @param text text message * @param text text message
* @param ex exception * @param ex exception
*/ */
public static void debugEx(Log log, String text, Exception ex) { public static void debugEx(Logger log, String text, Exception ex) {
if (log.isDebugEnabled()) { if (log.isDebugEnabled()) {
log.debug(text + ex, ex); log.debug(text + ex, ex);
} }
@ -124,7 +124,7 @@ public final class SwiftUtils {
* @param text text message * @param text text message
* @param args args arguments to the print statement * @param args args arguments to the print statement
*/ */
public static void trace(Log log, String text, Object... args) { public static void trace(Logger log, String text, Object... args) {
if (log.isTraceEnabled()) { if (log.isTraceEnabled()) {
log.trace(String.format(text, args)); log.trace(String.format(text, args));
} }

View File

@ -18,8 +18,8 @@
package org.apache.hadoop.fs.swift; package org.apache.hadoop.fs.swift;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileStatus;
@ -53,8 +53,8 @@ import static org.apache.hadoop.fs.swift.util.SwiftTestUtils.noteAction;
public class SwiftFileSystemBaseTest extends Assert implements public class SwiftFileSystemBaseTest extends Assert implements
SwiftTestConstants { SwiftTestConstants {
protected static final Log LOG = protected static final Logger LOG =
LogFactory.getLog(SwiftFileSystemBaseTest.class); LoggerFactory.getLogger(SwiftFileSystemBaseTest.class);
protected SwiftNativeFileSystem fs; protected SwiftNativeFileSystem fs;
protected static SwiftNativeFileSystem lastFs; protected static SwiftNativeFileSystem lastFs;
protected byte[] data = SwiftTestUtils.dataset(getBlockSize() * 2, 0, 255); protected byte[] data = SwiftTestUtils.dataset(getBlockSize() * 2, 0, 255);

View File

@ -18,8 +18,8 @@
package org.apache.hadoop.fs.swift; package org.apache.hadoop.fs.swift;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.junit.Test; import org.junit.Test;
import java.net.URL; import java.net.URL;
@ -28,8 +28,8 @@ import java.net.URL;
* This test just debugs which log resources are being picked up * This test just debugs which log resources are being picked up
*/ */
public class TestLogResources implements SwiftTestConstants { public class TestLogResources implements SwiftTestConstants {
protected static final Log LOG = protected static final Logger LOG =
LogFactory.getLog(TestLogResources.class); LoggerFactory.getLogger(TestLogResources.class);
private void printf(String format, Object... args) { private void printf(String format, Object... args) {
String msg = String.format(format, args); String msg = String.format(format, args);

View File

@ -18,8 +18,8 @@
package org.apache.hadoop.fs.swift; package org.apache.hadoop.fs.swift;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
@ -44,8 +44,8 @@ import org.junit.Test;
* to get confused. * to get confused.
*/ */
public class TestReadPastBuffer extends SwiftFileSystemBaseTest { public class TestReadPastBuffer extends SwiftFileSystemBaseTest {
protected static final Log LOG = protected static final Logger LOG =
LogFactory.getLog(TestReadPastBuffer.class); LoggerFactory.getLogger(TestReadPastBuffer.class);
public static final int SWIFT_READ_BLOCKSIZE = 4096; public static final int SWIFT_READ_BLOCKSIZE = 4096;
public static final int SEEK_FILE_LEN = SWIFT_READ_BLOCKSIZE * 2; public static final int SEEK_FILE_LEN = SWIFT_READ_BLOCKSIZE * 2;

View File

@ -18,8 +18,8 @@
package org.apache.hadoop.fs.swift; package org.apache.hadoop.fs.swift;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
@ -48,8 +48,8 @@ import java.io.IOException;
* to get confused. * to get confused.
*/ */
public class TestSeek extends SwiftFileSystemBaseTest { public class TestSeek extends SwiftFileSystemBaseTest {
protected static final Log LOG = protected static final Logger LOG =
LogFactory.getLog(TestSeek.class); LoggerFactory.getLogger(TestSeek.class);
public static final int SMALL_SEEK_FILE_LEN = 256; public static final int SMALL_SEEK_FILE_LEN = 256;
private Path testPath; private Path testPath;

View File

@ -19,8 +19,8 @@
package org.apache.hadoop.fs.swift; package org.apache.hadoop.fs.swift;
import org.junit.Assert; import org.junit.Assert;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.ParentNotDirectoryException; import org.apache.hadoop.fs.ParentNotDirectoryException;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
@ -46,8 +46,8 @@ import static org.apache.hadoop.fs.swift.util.SwiftTestUtils.writeTextFile;
*/ */
public class TestSwiftFileSystemBasicOps extends SwiftFileSystemBaseTest { public class TestSwiftFileSystemBasicOps extends SwiftFileSystemBaseTest {
private static final Log LOG = private static final Logger LOG =
LogFactory.getLog(TestSwiftFileSystemBasicOps.class); LoggerFactory.getLogger(TestSwiftFileSystemBasicOps.class);
@Test(timeout = SWIFT_TEST_TIMEOUT) @Test(timeout = SWIFT_TEST_TIMEOUT)
public void testLsRoot() throws Throwable { public void testLsRoot() throws Throwable {

View File

@ -18,8 +18,8 @@
package org.apache.hadoop.fs.swift; package org.apache.hadoop.fs.swift;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.swift.util.SwiftTestUtils; import org.apache.hadoop.fs.swift.util.SwiftTestUtils;
@ -37,8 +37,8 @@ import java.util.concurrent.TimeUnit;
* Consider it "best effort" * Consider it "best effort"
*/ */
public class TestSwiftFileSystemConcurrency extends SwiftFileSystemBaseTest { public class TestSwiftFileSystemConcurrency extends SwiftFileSystemBaseTest {
protected static final Log LOG = protected static final Logger LOG =
LogFactory.getLog(TestSwiftFileSystemConcurrency.class); LoggerFactory.getLogger(TestSwiftFileSystemConcurrency.class);
private Exception thread1Ex, thread2Ex; private Exception thread1Ex, thread2Ex;
public static final String TEST_RACE_CONDITION_ON_DELETE_DIR = public static final String TEST_RACE_CONDITION_ON_DELETE_DIR =
"/test/testraceconditionondirdeletetest"; "/test/testraceconditionondirdeletetest";

View File

@ -18,8 +18,8 @@
package org.apache.hadoop.fs.swift; package org.apache.hadoop.fs.swift;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystemContractBaseTest; import org.apache.hadoop.fs.FileSystemContractBaseTest;
import org.apache.hadoop.fs.ParentNotDirectoryException; import org.apache.hadoop.fs.ParentNotDirectoryException;
@ -47,8 +47,8 @@ import java.net.URISyntaxException;
*/ */
public class TestSwiftFileSystemContract public class TestSwiftFileSystemContract
extends FileSystemContractBaseTest { extends FileSystemContractBaseTest {
private static final Log LOG = private static final Logger LOG =
LogFactory.getLog(TestSwiftFileSystemContract.class); LoggerFactory.getLogger(TestSwiftFileSystemContract.class);
/** /**
* Override this if the filesystem is not case sensitive * Override this if the filesystem is not case sensitive

View File

@ -17,8 +17,8 @@
*/ */
package org.apache.hadoop.fs.swift; package org.apache.hadoop.fs.swift;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.swift.http.RestClientBindings; import org.apache.hadoop.fs.swift.http.RestClientBindings;
import org.apache.hadoop.fs.swift.http.SwiftRestClient; import org.apache.hadoop.fs.swift.http.SwiftRestClient;
@ -36,7 +36,8 @@ import static org.junit.Assert.assertTrue;
* Unit tests for SwiftObjectPath class. * Unit tests for SwiftObjectPath class.
*/ */
public class TestSwiftObjectPath implements SwiftTestConstants { public class TestSwiftObjectPath implements SwiftTestConstants {
private static final Log LOG = LogFactory.getLog(TestSwiftObjectPath.class); private static final Logger LOG =
LoggerFactory.getLogger(TestSwiftObjectPath.class);
/** /**
* What an endpoint looks like. This is derived from a (valid) * What an endpoint looks like. This is derived from a (valid)

View File

@ -18,8 +18,8 @@
package org.apache.hadoop.fs.swift.http; package org.apache.hadoop.fs.swift.http;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.swift.SwiftTestConstants; import org.apache.hadoop.fs.swift.SwiftTestConstants;
@ -39,8 +39,8 @@ import java.io.IOException;
import java.net.URI; import java.net.URI;
public class TestSwiftRestClient implements SwiftTestConstants { public class TestSwiftRestClient implements SwiftTestConstants {
private static final Log LOG = private static final Logger LOG =
LogFactory.getLog(TestSwiftRestClient.class); LoggerFactory.getLogger(TestSwiftRestClient.class);
private Configuration conf; private Configuration conf;
private boolean runTests; private boolean runTests;
@ -110,7 +110,7 @@ public class TestSwiftRestClient implements SwiftTestConstants {
//expected //expected
} }
for (DurationStats stats: client.getOperationStatistics()) { for (DurationStats stats: client.getOperationStatistics()) {
LOG.info(stats); LOG.info(stats.toString());
} }
} }

View File

@ -18,8 +18,8 @@
package org.apache.hadoop.fs.swift.scale; package org.apache.hadoop.fs.swift.scale;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.swift.util.Duration; import org.apache.hadoop.fs.swift.util.Duration;
@ -29,7 +29,8 @@ import org.junit.Test;
public class TestWriteManySmallFiles extends SwiftScaleTestBase { public class TestWriteManySmallFiles extends SwiftScaleTestBase {
public static final Log LOG = LogFactory.getLog(TestWriteManySmallFiles.class); public static final Logger LOG =
LoggerFactory.getLogger(TestWriteManySmallFiles.class);
@Test(timeout = SWIFT_BULK_IO_TEST_TIMEOUT) @Test(timeout = SWIFT_BULK_IO_TEST_TIMEOUT)
public void testScaledWriteThenRead() throws Throwable { public void testScaledWriteThenRead() throws Throwable {