HADOOP-6884. Add LOG.isDebugEnabled() guard for each LOG.debug(..). Contributed by Erik Steffl
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@990460 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
97ce72a97e
commit
7efb9640be
|
@ -127,6 +127,9 @@ Trunk (unreleased changes)
|
||||||
|
|
||||||
OPTIMIZATIONS
|
OPTIMIZATIONS
|
||||||
|
|
||||||
|
HADOOP-6884. Add LOG.isDebugEnabled() guard for each LOG.debug(..).
|
||||||
|
(Erik Steffl via szetszwo)
|
||||||
|
|
||||||
BUG FIXES
|
BUG FIXES
|
||||||
|
|
||||||
HADOOP-6638. try to relogin in a case of failed RPC connection (expired tgt)
|
HADOOP-6638. try to relogin in a case of failed RPC connection (expired tgt)
|
||||||
|
|
|
@ -331,7 +331,9 @@ public class NativeS3FileSystem extends FileSystem {
|
||||||
throw new IOException("File already exists:"+f);
|
throw new IOException("File already exists:"+f);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if(LOG.isDebugEnabled()) {
|
||||||
LOG.debug("Creating new file '" + f + "' in S3");
|
LOG.debug("Creating new file '" + f + "' in S3");
|
||||||
|
}
|
||||||
Path absolutePath = makeAbsolute(f);
|
Path absolutePath = makeAbsolute(f);
|
||||||
String key = pathToKey(absolutePath);
|
String key = pathToKey(absolutePath);
|
||||||
return new FSDataOutputStream(new NativeS3FsOutputStream(getConf(), store,
|
return new FSDataOutputStream(new NativeS3FsOutputStream(getConf(), store,
|
||||||
|
@ -344,7 +346,10 @@ public class NativeS3FileSystem extends FileSystem {
|
||||||
try {
|
try {
|
||||||
status = getFileStatus(f);
|
status = getFileStatus(f);
|
||||||
} catch (FileNotFoundException e) {
|
} catch (FileNotFoundException e) {
|
||||||
LOG.debug("Delete called for '" + f + "' but file does not exist, so returning false");
|
if(LOG.isDebugEnabled()) {
|
||||||
|
LOG.debug("Delete called for '" + f +
|
||||||
|
"' but file does not exist, so returning false");
|
||||||
|
}
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
Path absolutePath = makeAbsolute(f);
|
Path absolutePath = makeAbsolute(f);
|
||||||
|
@ -356,7 +361,9 @@ public class NativeS3FileSystem extends FileSystem {
|
||||||
|
|
||||||
createParent(f);
|
createParent(f);
|
||||||
|
|
||||||
|
if(LOG.isDebugEnabled()) {
|
||||||
LOG.debug("Deleting directory '" + f + "'");
|
LOG.debug("Deleting directory '" + f + "'");
|
||||||
|
}
|
||||||
String priorLastKey = null;
|
String priorLastKey = null;
|
||||||
do {
|
do {
|
||||||
PartialListing listing = store.list(key, S3_MAX_LISTING_LENGTH, priorLastKey, true);
|
PartialListing listing = store.list(key, S3_MAX_LISTING_LENGTH, priorLastKey, true);
|
||||||
|
@ -372,7 +379,9 @@ public class NativeS3FileSystem extends FileSystem {
|
||||||
//this is fine, we don't require a marker
|
//this is fine, we don't require a marker
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
if(LOG.isDebugEnabled()) {
|
||||||
LOG.debug("Deleting file '" + f + "'");
|
LOG.debug("Deleting file '" + f + "'");
|
||||||
|
}
|
||||||
createParent(f);
|
createParent(f);
|
||||||
store.delete(key);
|
store.delete(key);
|
||||||
}
|
}
|
||||||
|
@ -388,27 +397,40 @@ public class NativeS3FileSystem extends FileSystem {
|
||||||
return newDirectory(absolutePath);
|
return newDirectory(absolutePath);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if(LOG.isDebugEnabled()) {
|
||||||
LOG.debug("getFileStatus retrieving metadata for key '" + key + "'");
|
LOG.debug("getFileStatus retrieving metadata for key '" + key + "'");
|
||||||
|
}
|
||||||
FileMetadata meta = store.retrieveMetadata(key);
|
FileMetadata meta = store.retrieveMetadata(key);
|
||||||
if (meta != null) {
|
if (meta != null) {
|
||||||
|
if(LOG.isDebugEnabled()) {
|
||||||
LOG.debug("getFileStatus returning 'file' for key '" + key + "'");
|
LOG.debug("getFileStatus returning 'file' for key '" + key + "'");
|
||||||
|
}
|
||||||
return newFile(meta, absolutePath);
|
return newFile(meta, absolutePath);
|
||||||
}
|
}
|
||||||
if (store.retrieveMetadata(key + FOLDER_SUFFIX) != null) {
|
if (store.retrieveMetadata(key + FOLDER_SUFFIX) != null) {
|
||||||
LOG.debug("getFileStatus returning 'directory' for key '" + key + "' as '"
|
if(LOG.isDebugEnabled()) {
|
||||||
+ key + FOLDER_SUFFIX + "' exists");
|
LOG.debug("getFileStatus returning 'directory' for key '" + key +
|
||||||
|
"' as '" + key + FOLDER_SUFFIX + "' exists");
|
||||||
|
}
|
||||||
return newDirectory(absolutePath);
|
return newDirectory(absolutePath);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if(LOG.isDebugEnabled()) {
|
||||||
LOG.debug("getFileStatus listing key '" + key + "'");
|
LOG.debug("getFileStatus listing key '" + key + "'");
|
||||||
|
}
|
||||||
PartialListing listing = store.list(key, 1);
|
PartialListing listing = store.list(key, 1);
|
||||||
if (listing.getFiles().length > 0 ||
|
if (listing.getFiles().length > 0 ||
|
||||||
listing.getCommonPrefixes().length > 0) {
|
listing.getCommonPrefixes().length > 0) {
|
||||||
LOG.debug("getFileStatus returning 'directory' for key '" + key + "' as it has contents");
|
if(LOG.isDebugEnabled()) {
|
||||||
|
LOG.debug("getFileStatus returning 'directory' for key '" + key +
|
||||||
|
"' as it has contents");
|
||||||
|
}
|
||||||
return newDirectory(absolutePath);
|
return newDirectory(absolutePath);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if(LOG.isDebugEnabled()) {
|
||||||
LOG.debug("getFileStatus could not find key '" + key + "'");
|
LOG.debug("getFileStatus could not find key '" + key + "'");
|
||||||
|
}
|
||||||
throw new FileNotFoundException("No such file or directory '" + absolutePath + "'");
|
throw new FileNotFoundException("No such file or directory '" + absolutePath + "'");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -510,7 +532,9 @@ public class NativeS3FileSystem extends FileSystem {
|
||||||
|
|
||||||
}
|
}
|
||||||
} catch (FileNotFoundException e) {
|
} catch (FileNotFoundException e) {
|
||||||
|
if(LOG.isDebugEnabled()) {
|
||||||
LOG.debug("Making dir '" + f + "' in S3");
|
LOG.debug("Making dir '" + f + "' in S3");
|
||||||
|
}
|
||||||
String key = pathToKey(f) + FOLDER_SUFFIX;
|
String key = pathToKey(f) + FOLDER_SUFFIX;
|
||||||
store.storeEmptyFile(key);
|
store.storeEmptyFile(key);
|
||||||
}
|
}
|
||||||
|
@ -560,22 +584,35 @@ public class NativeS3FileSystem extends FileSystem {
|
||||||
try {
|
try {
|
||||||
boolean dstIsFile = getFileStatus(dst).isFile();
|
boolean dstIsFile = getFileStatus(dst).isFile();
|
||||||
if (dstIsFile) {
|
if (dstIsFile) {
|
||||||
LOG.debug(debugPreamble + "returning false as dst is an already existing file");
|
if(LOG.isDebugEnabled()) {
|
||||||
|
LOG.debug(debugPreamble +
|
||||||
|
"returning false as dst is an already existing file");
|
||||||
|
}
|
||||||
return false;
|
return false;
|
||||||
} else {
|
} else {
|
||||||
|
if(LOG.isDebugEnabled()) {
|
||||||
LOG.debug(debugPreamble + "using dst as output directory");
|
LOG.debug(debugPreamble + "using dst as output directory");
|
||||||
|
}
|
||||||
dstKey = pathToKey(makeAbsolute(new Path(dst, src.getName())));
|
dstKey = pathToKey(makeAbsolute(new Path(dst, src.getName())));
|
||||||
}
|
}
|
||||||
} catch (FileNotFoundException e) {
|
} catch (FileNotFoundException e) {
|
||||||
|
if(LOG.isDebugEnabled()) {
|
||||||
LOG.debug(debugPreamble + "using dst as output destination");
|
LOG.debug(debugPreamble + "using dst as output destination");
|
||||||
|
}
|
||||||
dstKey = pathToKey(makeAbsolute(dst));
|
dstKey = pathToKey(makeAbsolute(dst));
|
||||||
try {
|
try {
|
||||||
if (getFileStatus(dst.getParent()).isFile()) {
|
if (getFileStatus(dst.getParent()).isFile()) {
|
||||||
LOG.debug(debugPreamble + "returning false as dst parent exists and is a file");
|
if(LOG.isDebugEnabled()) {
|
||||||
|
LOG.debug(debugPreamble +
|
||||||
|
"returning false as dst parent exists and is a file");
|
||||||
|
}
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
} catch (FileNotFoundException ex) {
|
} catch (FileNotFoundException ex) {
|
||||||
LOG.debug(debugPreamble + "returning false as dst parent does not exist");
|
if(LOG.isDebugEnabled()) {
|
||||||
|
LOG.debug(debugPreamble +
|
||||||
|
"returning false as dst parent does not exist");
|
||||||
|
}
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -584,15 +621,22 @@ public class NativeS3FileSystem extends FileSystem {
|
||||||
try {
|
try {
|
||||||
srcIsFile = getFileStatus(src).isFile();
|
srcIsFile = getFileStatus(src).isFile();
|
||||||
} catch (FileNotFoundException e) {
|
} catch (FileNotFoundException e) {
|
||||||
|
if(LOG.isDebugEnabled()) {
|
||||||
LOG.debug(debugPreamble + "returning false as src does not exist");
|
LOG.debug(debugPreamble + "returning false as src does not exist");
|
||||||
|
}
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
if (srcIsFile) {
|
if (srcIsFile) {
|
||||||
LOG.debug(debugPreamble + "src is file, so doing copy then delete in S3");
|
if(LOG.isDebugEnabled()) {
|
||||||
|
LOG.debug(debugPreamble +
|
||||||
|
"src is file, so doing copy then delete in S3");
|
||||||
|
}
|
||||||
store.copy(srcKey, dstKey);
|
store.copy(srcKey, dstKey);
|
||||||
store.delete(srcKey);
|
store.delete(srcKey);
|
||||||
} else {
|
} else {
|
||||||
|
if(LOG.isDebugEnabled()) {
|
||||||
LOG.debug(debugPreamble + "src is directory, so copying contents");
|
LOG.debug(debugPreamble + "src is directory, so copying contents");
|
||||||
|
}
|
||||||
store.storeEmptyFile(dstKey + FOLDER_SUFFIX);
|
store.storeEmptyFile(dstKey + FOLDER_SUFFIX);
|
||||||
|
|
||||||
List<String> keysToDelete = new ArrayList<String>();
|
List<String> keysToDelete = new ArrayList<String>();
|
||||||
|
@ -606,7 +650,10 @@ public class NativeS3FileSystem extends FileSystem {
|
||||||
priorLastKey = listing.getPriorLastKey();
|
priorLastKey = listing.getPriorLastKey();
|
||||||
} while (priorLastKey != null);
|
} while (priorLastKey != null);
|
||||||
|
|
||||||
LOG.debug(debugPreamble + "all files in src copied, now removing src files");
|
if(LOG.isDebugEnabled()) {
|
||||||
|
LOG.debug(debugPreamble +
|
||||||
|
"all files in src copied, now removing src files");
|
||||||
|
}
|
||||||
for (String key: keysToDelete) {
|
for (String key: keysToDelete) {
|
||||||
store.delete(key);
|
store.delete(key);
|
||||||
}
|
}
|
||||||
|
@ -616,8 +663,10 @@ public class NativeS3FileSystem extends FileSystem {
|
||||||
} catch (FileNotFoundException e) {
|
} catch (FileNotFoundException e) {
|
||||||
//this is fine, we don't require a marker
|
//this is fine, we don't require a marker
|
||||||
}
|
}
|
||||||
|
if(LOG.isDebugEnabled()) {
|
||||||
LOG.debug(debugPreamble + "done");
|
LOG.debug(debugPreamble + "done");
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1834,7 +1834,7 @@ public class SequenceFile {
|
||||||
--noBufferedValues;
|
--noBufferedValues;
|
||||||
|
|
||||||
// Sanity check
|
// Sanity check
|
||||||
if (valLength < 0) {
|
if ((valLength < 0) && LOG.isDebugEnabled()) {
|
||||||
LOG.debug(val + " is a zero-length value");
|
LOG.debug(val + " is a zero-length value");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1873,7 +1873,7 @@ public class SequenceFile {
|
||||||
--noBufferedValues;
|
--noBufferedValues;
|
||||||
|
|
||||||
// Sanity check
|
// Sanity check
|
||||||
if (valLength < 0) {
|
if ((valLength < 0) && LOG.isDebugEnabled()) {
|
||||||
LOG.debug(val + " is a zero-length value");
|
LOG.debug(val + " is a zero-length value");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2415,7 +2415,9 @@ public class SequenceFile {
|
||||||
}
|
}
|
||||||
|
|
||||||
private int sortPass(boolean deleteInput) throws IOException {
|
private int sortPass(boolean deleteInput) throws IOException {
|
||||||
|
if(LOG.isDebugEnabled()) {
|
||||||
LOG.debug("running sort pass");
|
LOG.debug("running sort pass");
|
||||||
|
}
|
||||||
SortPass sortPass = new SortPass(); // make the SortPass
|
SortPass sortPass = new SortPass(); // make the SortPass
|
||||||
sortPass.setProgressable(progressable);
|
sortPass.setProgressable(progressable);
|
||||||
mergeSort = new MergeSort(sortPass.new SeqFileComparator());
|
mergeSort = new MergeSort(sortPass.new SeqFileComparator());
|
||||||
|
@ -2515,7 +2517,9 @@ public class SequenceFile {
|
||||||
}
|
}
|
||||||
|
|
||||||
// buffer is full -- sort & flush it
|
// buffer is full -- sort & flush it
|
||||||
|
if(LOG.isDebugEnabled()) {
|
||||||
LOG.debug("flushing segment " + segments);
|
LOG.debug("flushing segment " + segments);
|
||||||
|
}
|
||||||
rawBuffer = rawKeys.getData();
|
rawBuffer = rawKeys.getData();
|
||||||
sort(count);
|
sort(count);
|
||||||
// indicate we're making progress
|
// indicate we're making progress
|
||||||
|
@ -2798,7 +2802,9 @@ public class SequenceFile {
|
||||||
|
|
||||||
/** sort calls this to generate the final merged output */
|
/** sort calls this to generate the final merged output */
|
||||||
private int mergePass(Path tmpDir) throws IOException {
|
private int mergePass(Path tmpDir) throws IOException {
|
||||||
|
if(LOG.isDebugEnabled()) {
|
||||||
LOG.debug("running merge pass");
|
LOG.debug("running merge pass");
|
||||||
|
}
|
||||||
Writer writer = cloneFileAttributes(
|
Writer writer = cloneFileAttributes(
|
||||||
outFile.suffix(".0"), outFile, null);
|
outFile.suffix(".0"), outFile, null);
|
||||||
RawKeyValueIterator r = merge(outFile.suffix(".0"),
|
RawKeyValueIterator r = merge(outFile.suffix(".0"),
|
||||||
|
@ -3028,7 +3034,9 @@ public class SequenceFile {
|
||||||
Path outputFile = lDirAlloc.getLocalPathForWrite(
|
Path outputFile = lDirAlloc.getLocalPathForWrite(
|
||||||
tmpFilename.toString(),
|
tmpFilename.toString(),
|
||||||
approxOutputSize, conf);
|
approxOutputSize, conf);
|
||||||
|
if(LOG.isDebugEnabled()) {
|
||||||
LOG.debug("writing intermediate results to " + outputFile);
|
LOG.debug("writing intermediate results to " + outputFile);
|
||||||
|
}
|
||||||
Writer writer = cloneFileAttributes(
|
Writer writer = cloneFileAttributes(
|
||||||
fs.makeQualified(segmentsToMerge.get(0).segmentPathName),
|
fs.makeQualified(segmentsToMerge.get(0).segmentPathName),
|
||||||
fs.makeQualified(outputFile), null);
|
fs.makeQualified(outputFile), null);
|
||||||
|
|
|
@ -107,8 +107,10 @@ public class CodecPool {
|
||||||
LOG.info("Got brand-new compressor");
|
LOG.info("Got brand-new compressor");
|
||||||
} else {
|
} else {
|
||||||
compressor.reinit(conf);
|
compressor.reinit(conf);
|
||||||
|
if(LOG.isDebugEnabled()) {
|
||||||
LOG.debug("Got recycled compressor");
|
LOG.debug("Got recycled compressor");
|
||||||
}
|
}
|
||||||
|
}
|
||||||
return compressor;
|
return compressor;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -131,8 +133,10 @@ public class CodecPool {
|
||||||
decompressor = codec.createDecompressor();
|
decompressor = codec.createDecompressor();
|
||||||
LOG.info("Got brand-new decompressor");
|
LOG.info("Got brand-new decompressor");
|
||||||
} else {
|
} else {
|
||||||
|
if(LOG.isDebugEnabled()) {
|
||||||
LOG.debug("Got recycled decompressor");
|
LOG.debug("Got recycled decompressor");
|
||||||
}
|
}
|
||||||
|
}
|
||||||
return decompressor;
|
return decompressor;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -76,6 +76,8 @@ public class BuiltInZlibDeflater extends Deflater implements Compressor {
|
||||||
LOG.warn(strategy + " not supported by BuiltInZlibDeflater.");
|
LOG.warn(strategy + " not supported by BuiltInZlibDeflater.");
|
||||||
setStrategy(DEFAULT_STRATEGY);
|
setStrategy(DEFAULT_STRATEGY);
|
||||||
}
|
}
|
||||||
|
if(LOG.isDebugEnabled()) {
|
||||||
LOG.debug("Reinit compressor with new compression configuration");
|
LOG.debug("Reinit compressor with new compression configuration");
|
||||||
}
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -253,8 +253,10 @@ public class ZlibCompressor implements Compressor {
|
||||||
stream = init(level.compressionLevel(),
|
stream = init(level.compressionLevel(),
|
||||||
strategy.compressionStrategy(),
|
strategy.compressionStrategy(),
|
||||||
windowBits.windowBits());
|
windowBits.windowBits());
|
||||||
|
if(LOG.isDebugEnabled()) {
|
||||||
LOG.debug("Reinit compressor with new compression configuration");
|
LOG.debug("Reinit compressor with new compression configuration");
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public synchronized void setInput(byte[] b, int off, int len) {
|
public synchronized void setInput(byte[] b, int off, int len) {
|
||||||
if (b== null) {
|
if (b== null) {
|
||||||
|
|
|
@ -278,8 +278,10 @@ final class Compression {
|
||||||
// it.
|
// it.
|
||||||
LOG.warn("Compressor obtained from CodecPool already finished()");
|
LOG.warn("Compressor obtained from CodecPool already finished()");
|
||||||
} else {
|
} else {
|
||||||
|
if(LOG.isDebugEnabled()) {
|
||||||
LOG.debug("Got a compressor: " + compressor.hashCode());
|
LOG.debug("Got a compressor: " + compressor.hashCode());
|
||||||
}
|
}
|
||||||
|
}
|
||||||
/**
|
/**
|
||||||
* Following statement is necessary to get around bugs in 0.18 where a
|
* Following statement is necessary to get around bugs in 0.18 where a
|
||||||
* compressor is referenced after returned back to the codec pool.
|
* compressor is referenced after returned back to the codec pool.
|
||||||
|
@ -293,7 +295,9 @@ final class Compression {
|
||||||
|
|
||||||
public void returnCompressor(Compressor compressor) {
|
public void returnCompressor(Compressor compressor) {
|
||||||
if (compressor != null) {
|
if (compressor != null) {
|
||||||
|
if(LOG.isDebugEnabled()) {
|
||||||
LOG.debug("Return a compressor: " + compressor.hashCode());
|
LOG.debug("Return a compressor: " + compressor.hashCode());
|
||||||
|
}
|
||||||
CodecPool.returnCompressor(compressor);
|
CodecPool.returnCompressor(compressor);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -308,8 +312,10 @@ final class Compression {
|
||||||
// it.
|
// it.
|
||||||
LOG.warn("Deompressor obtained from CodecPool already finished()");
|
LOG.warn("Deompressor obtained from CodecPool already finished()");
|
||||||
} else {
|
} else {
|
||||||
|
if(LOG.isDebugEnabled()) {
|
||||||
LOG.debug("Got a decompressor: " + decompressor.hashCode());
|
LOG.debug("Got a decompressor: " + decompressor.hashCode());
|
||||||
}
|
}
|
||||||
|
}
|
||||||
/**
|
/**
|
||||||
* Following statement is necessary to get around bugs in 0.18 where a
|
* Following statement is necessary to get around bugs in 0.18 where a
|
||||||
* decompressor is referenced after returned back to the codec pool.
|
* decompressor is referenced after returned back to the codec pool.
|
||||||
|
@ -324,7 +330,9 @@ final class Compression {
|
||||||
|
|
||||||
public void returnDecompressor(Decompressor decompressor) {
|
public void returnDecompressor(Decompressor decompressor) {
|
||||||
if (decompressor != null) {
|
if (decompressor != null) {
|
||||||
|
if(LOG.isDebugEnabled()) {
|
||||||
LOG.debug("Returned a decompressor: " + decompressor.hashCode());
|
LOG.debug("Returned a decompressor: " + decompressor.hashCode());
|
||||||
|
}
|
||||||
CodecPool.returnDecompressor(decompressor);
|
CodecPool.returnDecompressor(decompressor);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -67,12 +67,14 @@ class RetryInvocationHandler implements InvocationHandler {
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
if(LOG.isDebugEnabled()) {
|
||||||
LOG.debug("Exception while invoking " + method.getName()
|
LOG.debug("Exception while invoking " + method.getName()
|
||||||
+ " of " + implementation.getClass() + ". Retrying."
|
+ " of " + implementation.getClass() + ". Retrying."
|
||||||
+ StringUtils.stringifyException(e));
|
+ StringUtils.stringifyException(e));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private Object invokeMethod(Method method, Object[] args) throws Throwable {
|
private Object invokeMethod(Method method, Object[] args) throws Throwable {
|
||||||
try {
|
try {
|
||||||
|
|
|
@ -444,8 +444,10 @@ public class Client {
|
||||||
disposeSasl();
|
disposeSasl();
|
||||||
if (shouldAuthenticateOverKrb()) {
|
if (shouldAuthenticateOverKrb()) {
|
||||||
if (currRetries < maxRetries) {
|
if (currRetries < maxRetries) {
|
||||||
|
if(LOG.isDebugEnabled()) {
|
||||||
LOG.debug("Exception encountered while connecting to "
|
LOG.debug("Exception encountered while connecting to "
|
||||||
+ "the server : " + ex);
|
+ "the server : " + ex);
|
||||||
|
}
|
||||||
// try re-login
|
// try re-login
|
||||||
if (UserGroupInformation.isLoginKeytabBased()) {
|
if (UserGroupInformation.isLoginKeytabBased()) {
|
||||||
UserGroupInformation.getLoginUser().reloginFromKeytab();
|
UserGroupInformation.getLoginUser().reloginFromKeytab();
|
||||||
|
|
|
@ -615,7 +615,9 @@ public abstract class Server {
|
||||||
// If there were some calls that have not been sent out for a
|
// If there were some calls that have not been sent out for a
|
||||||
// long time, discard them.
|
// long time, discard them.
|
||||||
//
|
//
|
||||||
|
if(LOG.isDebugEnabled()) {
|
||||||
LOG.debug("Checking for old call responses.");
|
LOG.debug("Checking for old call responses.");
|
||||||
|
}
|
||||||
ArrayList<Call> calls;
|
ArrayList<Call> calls;
|
||||||
|
|
||||||
// get the list of channels from list of keys.
|
// get the list of channels from list of keys.
|
||||||
|
|
|
@ -192,16 +192,15 @@ class WritableRpcEngine implements RpcEngine {
|
||||||
|
|
||||||
public Object invoke(Object proxy, Method method, Object[] args)
|
public Object invoke(Object proxy, Method method, Object[] args)
|
||||||
throws Throwable {
|
throws Throwable {
|
||||||
final boolean logDebug = LOG.isDebugEnabled();
|
|
||||||
long startTime = 0;
|
long startTime = 0;
|
||||||
if (logDebug) {
|
if (LOG.isDebugEnabled()) {
|
||||||
startTime = System.currentTimeMillis();
|
startTime = System.currentTimeMillis();
|
||||||
}
|
}
|
||||||
|
|
||||||
ObjectWritable value = (ObjectWritable)
|
ObjectWritable value = (ObjectWritable)
|
||||||
client.call(new Invocation(method, args), address,
|
client.call(new Invocation(method, args), address,
|
||||||
protocol, ticket, rpcTimeout);
|
protocol, ticket, rpcTimeout);
|
||||||
if (logDebug) {
|
if (LOG.isDebugEnabled()) {
|
||||||
long callTime = System.currentTimeMillis() - startTime;
|
long callTime = System.currentTimeMillis() - startTime;
|
||||||
LOG.debug("Call: " + method.getName() + " " + callTime);
|
LOG.debug("Call: " + method.getName() + " " + callTime);
|
||||||
}
|
}
|
||||||
|
|
|
@ -333,7 +333,9 @@ public class NetworkTopology {
|
||||||
numOfRacks++;
|
numOfRacks++;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if(LOG.isDebugEnabled()) {
|
||||||
LOG.debug("NetworkTopology became:\n" + this.toString());
|
LOG.debug("NetworkTopology became:\n" + this.toString());
|
||||||
|
}
|
||||||
} finally {
|
} finally {
|
||||||
netlock.writeLock().unlock();
|
netlock.writeLock().unlock();
|
||||||
}
|
}
|
||||||
|
@ -359,7 +361,9 @@ public class NetworkTopology {
|
||||||
numOfRacks--;
|
numOfRacks--;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if(LOG.isDebugEnabled()) {
|
||||||
LOG.debug("NetworkTopology became:\n" + this.toString());
|
LOG.debug("NetworkTopology became:\n" + this.toString());
|
||||||
|
}
|
||||||
} finally {
|
} finally {
|
||||||
netlock.writeLock().unlock();
|
netlock.writeLock().unlock();
|
||||||
}
|
}
|
||||||
|
|
|
@ -78,14 +78,18 @@ public class Groups {
|
||||||
long now = System.currentTimeMillis();
|
long now = System.currentTimeMillis();
|
||||||
// if cache has a value and it hasn't expired
|
// if cache has a value and it hasn't expired
|
||||||
if (groups != null && (groups.getTimestamp() + cacheTimeout > now)) {
|
if (groups != null && (groups.getTimestamp() + cacheTimeout > now)) {
|
||||||
|
if(LOG.isDebugEnabled()) {
|
||||||
LOG.debug("Returning cached groups for '" + user + "'");
|
LOG.debug("Returning cached groups for '" + user + "'");
|
||||||
|
}
|
||||||
return groups.getGroups();
|
return groups.getGroups();
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create and cache user's groups
|
// Create and cache user's groups
|
||||||
groups = new CachedGroups(impl.getGroups(user));
|
groups = new CachedGroups(impl.getGroups(user));
|
||||||
userToGroupsMap.put(user, groups);
|
userToGroupsMap.put(user, groups);
|
||||||
|
if(LOG.isDebugEnabled()) {
|
||||||
LOG.debug("Returning fetched groups for '" + user + "'");
|
LOG.debug("Returning fetched groups for '" + user + "'");
|
||||||
|
}
|
||||||
return groups.getGroups();
|
return groups.getGroups();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -132,7 +136,9 @@ public class Groups {
|
||||||
*/
|
*/
|
||||||
public static Groups getUserToGroupsMappingService(Configuration conf) {
|
public static Groups getUserToGroupsMappingService(Configuration conf) {
|
||||||
if(GROUPS == null) {
|
if(GROUPS == null) {
|
||||||
|
if(LOG.isDebugEnabled()) {
|
||||||
LOG.debug(" Creating new Groups object");
|
LOG.debug(" Creating new Groups object");
|
||||||
|
}
|
||||||
GROUPS = new Groups(conf);
|
GROUPS = new Groups(conf);
|
||||||
}
|
}
|
||||||
return GROUPS;
|
return GROUPS;
|
||||||
|
|
|
@ -80,8 +80,7 @@ public class SaslRpcClient {
|
||||||
break;
|
break;
|
||||||
case KERBEROS:
|
case KERBEROS:
|
||||||
if (LOG.isDebugEnabled()) {
|
if (LOG.isDebugEnabled()) {
|
||||||
LOG
|
LOG.debug("Creating SASL " + AuthMethod.KERBEROS.getMechanismName()
|
||||||
.debug("Creating SASL " + AuthMethod.KERBEROS.getMechanismName()
|
|
||||||
+ " client. Server's Kerberos principal name is "
|
+ " client. Server's Kerberos principal name is "
|
||||||
+ serverPrincipal);
|
+ serverPrincipal);
|
||||||
}
|
}
|
||||||
|
|
|
@ -534,13 +534,17 @@ public class UserGroupInformation {
|
||||||
while (true) {
|
while (true) {
|
||||||
try {
|
try {
|
||||||
long now = System.currentTimeMillis();
|
long now = System.currentTimeMillis();
|
||||||
|
if(LOG.isDebugEnabled()) {
|
||||||
LOG.debug("Current time is " + now);
|
LOG.debug("Current time is " + now);
|
||||||
LOG.debug("Next refresh is " + nextRefresh);
|
LOG.debug("Next refresh is " + nextRefresh);
|
||||||
|
}
|
||||||
if (now < nextRefresh) {
|
if (now < nextRefresh) {
|
||||||
Thread.sleep(nextRefresh - now);
|
Thread.sleep(nextRefresh - now);
|
||||||
}
|
}
|
||||||
Shell.execCommand(cmd, "-R");
|
Shell.execCommand(cmd, "-R");
|
||||||
|
if(LOG.isDebugEnabled()) {
|
||||||
LOG.debug("renewed ticket");
|
LOG.debug("renewed ticket");
|
||||||
|
}
|
||||||
reloginFromTicketCache();
|
reloginFromTicketCache();
|
||||||
tgt = getTGT();
|
tgt = getTGT();
|
||||||
if (tgt == null) {
|
if (tgt == null) {
|
||||||
|
|
|
@ -115,8 +115,10 @@ public class ServiceAuthorizationManager {
|
||||||
// just keep going
|
// just keep going
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
LOG.debug("for protocol authorization compare (" + clientPrincipal + "): "
|
if(LOG.isDebugEnabled()) {
|
||||||
+ shortName + " with " + user.getShortUserName());
|
LOG.debug("for protocol authorization compare (" + clientPrincipal +
|
||||||
|
"): " + shortName + " with " + user.getShortUserName());
|
||||||
|
}
|
||||||
if((shortName != null && !shortName.equals(user.getShortUserName())) ||
|
if((shortName != null && !shortName.equals(user.getShortUserName())) ||
|
||||||
!acl.isUserAllowed(user)) {
|
!acl.isUserAllowed(user)) {
|
||||||
AUDITLOG.warn(AUTHZ_FAILED_FOR + user + " for protocol="+protocol);
|
AUDITLOG.warn(AUTHZ_FAILED_FOR + user + " for protocol="+protocol);
|
||||||
|
|
|
@ -316,7 +316,9 @@ public class GenericOptionsParser {
|
||||||
if (!localFs.exists(p)) {
|
if (!localFs.exists(p)) {
|
||||||
throw new FileNotFoundException("File "+fileName+" does not exist.");
|
throw new FileNotFoundException("File "+fileName+" does not exist.");
|
||||||
}
|
}
|
||||||
|
if(LOG.isDebugEnabled()) {
|
||||||
LOG.debug("setting conf tokensFile: " + fileName);
|
LOG.debug("setting conf tokensFile: " + fileName);
|
||||||
|
}
|
||||||
conf.set("mapreduce.job.credentials.json", localFs.makeQualified(p)
|
conf.set("mapreduce.job.credentials.json", localFs.makeQualified(p)
|
||||||
.toString());
|
.toString());
|
||||||
|
|
||||||
|
|
|
@ -42,15 +42,20 @@ public class NativeCodeLoader {
|
||||||
|
|
||||||
static {
|
static {
|
||||||
// Try to load native hadoop library and set fallback flag appropriately
|
// Try to load native hadoop library and set fallback flag appropriately
|
||||||
|
if(LOG.isDebugEnabled()) {
|
||||||
LOG.debug("Trying to load the custom-built native-hadoop library...");
|
LOG.debug("Trying to load the custom-built native-hadoop library...");
|
||||||
|
}
|
||||||
try {
|
try {
|
||||||
System.loadLibrary("hadoop");
|
System.loadLibrary("hadoop");
|
||||||
LOG.info("Loaded the native-hadoop library");
|
LOG.info("Loaded the native-hadoop library");
|
||||||
nativeCodeLoaded = true;
|
nativeCodeLoaded = true;
|
||||||
} catch (Throwable t) {
|
} catch (Throwable t) {
|
||||||
// Ignore failure to load
|
// Ignore failure to load
|
||||||
|
if(LOG.isDebugEnabled()) {
|
||||||
LOG.debug("Failed to load native-hadoop with error: " + t);
|
LOG.debug("Failed to load native-hadoop with error: " + t);
|
||||||
LOG.debug("java.library.path=" + System.getProperty("java.library.path"));
|
LOG.debug("java.library.path=" +
|
||||||
|
System.getProperty("java.library.path"));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!nativeCodeLoaded) {
|
if (!nativeCodeLoaded) {
|
||||||
|
|
|
@ -94,7 +94,9 @@ public class ProbabilityModel {
|
||||||
|
|
||||||
float ret = conf.getFloat(newProbName,
|
float ret = conf.getFloat(newProbName,
|
||||||
conf.getFloat(ALL_PROBABILITIES, DEFAULT_PROB));
|
conf.getFloat(ALL_PROBABILITIES, DEFAULT_PROB));
|
||||||
|
if(LOG.isDebugEnabled()) {
|
||||||
LOG.debug("Request for " + newProbName + " returns=" + ret);
|
LOG.debug("Request for " + newProbName + " returns=" + ret);
|
||||||
|
}
|
||||||
// Make sure that probability level is valid.
|
// Make sure that probability level is valid.
|
||||||
if (ret < DEFAULT_PROB || ret > MAX_PROB) {
|
if (ret < DEFAULT_PROB || ret > MAX_PROB) {
|
||||||
LOG.info("Probability level is incorrect. Default value is set");
|
LOG.info("Probability level is incorrect. Default value is set");
|
||||||
|
|
|
@ -310,7 +310,9 @@ public class LoadGenerator extends Configured implements Tool {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if(LOG.isDebugEnabled()) {
|
||||||
LOG.debug("Done with testing. Waiting for threads to finish.");
|
LOG.debug("Done with testing. Waiting for threads to finish.");
|
||||||
|
}
|
||||||
for (DFSClientThread thread : threads) {
|
for (DFSClientThread thread : threads) {
|
||||||
thread.join();
|
thread.join();
|
||||||
for (int i=0; i<TOTAL_OP_TYPES; i++) {
|
for (int i=0; i<TOTAL_OP_TYPES; i++) {
|
||||||
|
|
|
@ -54,7 +54,9 @@ public class TestArrayFile extends TestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
private static RandomDatum[] generate(int count) {
|
private static RandomDatum[] generate(int count) {
|
||||||
|
if(LOG.isDebugEnabled()) {
|
||||||
LOG.debug("generating " + count + " records in debug");
|
LOG.debug("generating " + count + " records in debug");
|
||||||
|
}
|
||||||
RandomDatum[] data = new RandomDatum[count];
|
RandomDatum[] data = new RandomDatum[count];
|
||||||
RandomDatum.Generator generator = new RandomDatum.Generator();
|
RandomDatum.Generator generator = new RandomDatum.Generator();
|
||||||
for (int i = 0; i < count; i++) {
|
for (int i = 0; i < count; i++) {
|
||||||
|
@ -68,7 +70,9 @@ public class TestArrayFile extends TestCase {
|
||||||
throws IOException {
|
throws IOException {
|
||||||
Configuration conf = new Configuration();
|
Configuration conf = new Configuration();
|
||||||
MapFile.delete(fs, file);
|
MapFile.delete(fs, file);
|
||||||
|
if(LOG.isDebugEnabled()) {
|
||||||
LOG.debug("creating with " + data.length + " debug");
|
LOG.debug("creating with " + data.length + " debug");
|
||||||
|
}
|
||||||
ArrayFile.Writer writer = new ArrayFile.Writer(conf, fs, file, RandomDatum.class);
|
ArrayFile.Writer writer = new ArrayFile.Writer(conf, fs, file, RandomDatum.class);
|
||||||
writer.setIndexInterval(100);
|
writer.setIndexInterval(100);
|
||||||
for (int i = 0; i < data.length; i++)
|
for (int i = 0; i < data.length; i++)
|
||||||
|
@ -79,7 +83,9 @@ public class TestArrayFile extends TestCase {
|
||||||
private static void readTest(FileSystem fs, RandomDatum[] data, String file, Configuration conf)
|
private static void readTest(FileSystem fs, RandomDatum[] data, String file, Configuration conf)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
RandomDatum v = new RandomDatum();
|
RandomDatum v = new RandomDatum();
|
||||||
|
if(LOG.isDebugEnabled()) {
|
||||||
LOG.debug("reading " + data.length + " debug");
|
LOG.debug("reading " + data.length + " debug");
|
||||||
|
}
|
||||||
ArrayFile.Reader reader = new ArrayFile.Reader(fs, file, conf);
|
ArrayFile.Reader reader = new ArrayFile.Reader(fs, file, conf);
|
||||||
for (int i = 0; i < data.length; i++) { // try forwards
|
for (int i = 0; i < data.length; i++) { // try forwards
|
||||||
reader.get(i, v);
|
reader.get(i, v);
|
||||||
|
@ -94,8 +100,10 @@ public class TestArrayFile extends TestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
reader.close();
|
reader.close();
|
||||||
|
if(LOG.isDebugEnabled()) {
|
||||||
LOG.debug("done reading " + data.length + " debug");
|
LOG.debug("done reading " + data.length + " debug");
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
/** For debugging and testing. */
|
/** For debugging and testing. */
|
||||||
|
|
|
@ -83,7 +83,9 @@ public class TestAvroRpc extends TestCase {
|
||||||
try {
|
try {
|
||||||
proxy.error();
|
proxy.error();
|
||||||
} catch (AvroRemoteException e) {
|
} catch (AvroRemoteException e) {
|
||||||
|
if(LOG.isDebugEnabled()) {
|
||||||
LOG.debug("Caught " + e);
|
LOG.debug("Caught " + e);
|
||||||
|
}
|
||||||
caught = true;
|
caught = true;
|
||||||
}
|
}
|
||||||
assertTrue(caught);
|
assertTrue(caught);
|
||||||
|
|
|
@ -296,7 +296,9 @@ public class TestRPC extends TestCase {
|
||||||
try {
|
try {
|
||||||
proxy.error();
|
proxy.error();
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
|
if(LOG.isDebugEnabled()) {
|
||||||
LOG.debug("Caught " + e);
|
LOG.debug("Caught " + e);
|
||||||
|
}
|
||||||
caught = true;
|
caught = true;
|
||||||
}
|
}
|
||||||
assertTrue(caught);
|
assertTrue(caught);
|
||||||
|
|
|
@ -482,7 +482,9 @@ public abstract class AbstractDaemonCluster {
|
||||||
LOG.info("Daemon is : " + daemon.getHostName() + " pinging...");
|
LOG.info("Daemon is : " + daemon.getHostName() + " pinging...");
|
||||||
break;
|
break;
|
||||||
} catch (Exception exp) {
|
} catch (Exception exp) {
|
||||||
|
if(LOG.isDebugEnabled()) {
|
||||||
LOG.debug(daemon.getHostName() + " is waiting to come up.");
|
LOG.debug(daemon.getHostName() + " is waiting to come up.");
|
||||||
|
}
|
||||||
waitFor(60000);
|
waitFor(60000);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -502,7 +504,9 @@ public abstract class AbstractDaemonCluster {
|
||||||
while (true) {
|
while (true) {
|
||||||
try {
|
try {
|
||||||
daemon.ping();
|
daemon.ping();
|
||||||
|
if(LOG.isDebugEnabled()) {
|
||||||
LOG.debug(daemon.getHostName() +" is waiting state to stop.");
|
LOG.debug(daemon.getHostName() +" is waiting state to stop.");
|
||||||
|
}
|
||||||
waitFor(60000);
|
waitFor(60000);
|
||||||
} catch (Exception exp) {
|
} catch (Exception exp) {
|
||||||
LOG.info("Daemon is : " + daemon.getHostName() + " stopped...");
|
LOG.info("Daemon is : " + daemon.getHostName() + " stopped...");
|
||||||
|
|
Loading…
Reference in New Issue