From 7efb9640be26aabe3878310e82248a1b6b767a9a Mon Sep 17 00:00:00 2001 From: Tsz-wo Sze Date: Sat, 28 Aug 2010 22:44:10 +0000 Subject: [PATCH] HADOOP-6884. Add LOG.isDebugEnabled() guard for each LOG.debug(..). Contributed by Erik Steffl git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@990460 13f79535-47bb-0310-9956-ffa450edef68 --- CHANGES.txt | 3 + .../fs/s3native/NativeS3FileSystem.java | 93 ++++++++++++++----- .../org/apache/hadoop/io/SequenceFile.java | 20 ++-- .../apache/hadoop/io/compress/CodecPool.java | 8 +- .../io/compress/zlib/BuiltInZlibDeflater.java | 4 +- .../io/compress/zlib/ZlibCompressor.java | 4 +- .../hadoop/io/file/tfile/Compression.java | 16 +++- .../io/retry/RetryInvocationHandler.java | 8 +- src/java/org/apache/hadoop/ipc/Client.java | 6 +- src/java/org/apache/hadoop/ipc/Server.java | 4 +- .../apache/hadoop/ipc/WritableRpcEngine.java | 5 +- .../apache/hadoop/net/NetworkTopology.java | 8 +- .../org/apache/hadoop/security/Groups.java | 12 ++- .../apache/hadoop/security/SaslRpcClient.java | 7 +- .../hadoop/security/UserGroupInformation.java | 10 +- .../ServiceAuthorizationManager.java | 6 +- .../hadoop/util/GenericOptionsParser.java | 4 +- .../apache/hadoop/util/NativeCodeLoader.java | 11 ++- .../apache/hadoop/fi/ProbabilityModel.java | 4 +- .../fs/loadGenerator/LoadGenerator.java | 4 +- .../org/apache/hadoop/io/TestArrayFile.java | 16 +++- .../org/apache/hadoop/ipc/TestAvroRpc.java | 4 +- .../core/org/apache/hadoop/ipc/TestRPC.java | 4 +- .../test/system/AbstractDaemonCluster.java | 8 +- 24 files changed, 196 insertions(+), 73 deletions(-) diff --git a/CHANGES.txt b/CHANGES.txt index a17d4ed0439..ef7ff9b2594 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -127,6 +127,9 @@ Trunk (unreleased changes) OPTIMIZATIONS + HADOOP-6884. Add LOG.isDebugEnabled() guard for each LOG.debug(..). + (Erik Steffl via szetszwo) + BUG FIXES HADOOP-6638. try to relogin in a case of failed RPC connection (expired tgt) diff --git a/src/java/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java b/src/java/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java index 1f79592fb3d..ea672c44e6d 100644 --- a/src/java/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java +++ b/src/java/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java @@ -331,7 +331,9 @@ public FSDataOutputStream create(Path f, FsPermission permission, throw new IOException("File already exists:"+f); } - LOG.debug("Creating new file '" + f + "' in S3"); + if(LOG.isDebugEnabled()) { + LOG.debug("Creating new file '" + f + "' in S3"); + } Path absolutePath = makeAbsolute(f); String key = pathToKey(absolutePath); return new FSDataOutputStream(new NativeS3FsOutputStream(getConf(), store, @@ -344,7 +346,10 @@ public boolean delete(Path f, boolean recurse) throws IOException { try { status = getFileStatus(f); } catch (FileNotFoundException e) { - LOG.debug("Delete called for '" + f + "' but file does not exist, so returning false"); + if(LOG.isDebugEnabled()) { + LOG.debug("Delete called for '" + f + + "' but file does not exist, so returning false"); + } return false; } Path absolutePath = makeAbsolute(f); @@ -356,7 +361,9 @@ public boolean delete(Path f, boolean recurse) throws IOException { createParent(f); - LOG.debug("Deleting directory '" + f + "'"); + if(LOG.isDebugEnabled()) { + LOG.debug("Deleting directory '" + f + "'"); + } String priorLastKey = null; do { PartialListing listing = store.list(key, S3_MAX_LISTING_LENGTH, priorLastKey, true); @@ -372,7 +379,9 @@ public boolean delete(Path f, boolean recurse) throws IOException { //this is fine, we don't require a marker } } else { - LOG.debug("Deleting file '" + f + "'"); + if(LOG.isDebugEnabled()) { + LOG.debug("Deleting file '" + f + "'"); + } createParent(f); store.delete(key); } @@ -388,27 +397,40 @@ public FileStatus getFileStatus(Path f) throws IOException { return newDirectory(absolutePath); } - LOG.debug("getFileStatus retrieving metadata for key '" + key + "'"); + if(LOG.isDebugEnabled()) { + LOG.debug("getFileStatus retrieving metadata for key '" + key + "'"); + } FileMetadata meta = store.retrieveMetadata(key); if (meta != null) { - LOG.debug("getFileStatus returning 'file' for key '" + key + "'"); + if(LOG.isDebugEnabled()) { + LOG.debug("getFileStatus returning 'file' for key '" + key + "'"); + } return newFile(meta, absolutePath); } if (store.retrieveMetadata(key + FOLDER_SUFFIX) != null) { - LOG.debug("getFileStatus returning 'directory' for key '" + key + "' as '" - + key + FOLDER_SUFFIX + "' exists"); + if(LOG.isDebugEnabled()) { + LOG.debug("getFileStatus returning 'directory' for key '" + key + + "' as '" + key + FOLDER_SUFFIX + "' exists"); + } return newDirectory(absolutePath); } - LOG.debug("getFileStatus listing key '" + key + "'"); + if(LOG.isDebugEnabled()) { + LOG.debug("getFileStatus listing key '" + key + "'"); + } PartialListing listing = store.list(key, 1); if (listing.getFiles().length > 0 || listing.getCommonPrefixes().length > 0) { - LOG.debug("getFileStatus returning 'directory' for key '" + key + "' as it has contents"); + if(LOG.isDebugEnabled()) { + LOG.debug("getFileStatus returning 'directory' for key '" + key + + "' as it has contents"); + } return newDirectory(absolutePath); } - LOG.debug("getFileStatus could not find key '" + key + "'"); + if(LOG.isDebugEnabled()) { + LOG.debug("getFileStatus could not find key '" + key + "'"); + } throw new FileNotFoundException("No such file or directory '" + absolutePath + "'"); } @@ -510,7 +532,9 @@ private boolean mkdir(Path f) throws IOException { } } catch (FileNotFoundException e) { - LOG.debug("Making dir '" + f + "' in S3"); + if(LOG.isDebugEnabled()) { + LOG.debug("Making dir '" + f + "' in S3"); + } String key = pathToKey(f) + FOLDER_SUFFIX; store.storeEmptyFile(key); } @@ -560,22 +584,35 @@ public boolean rename(Path src, Path dst) throws IOException { try { boolean dstIsFile = getFileStatus(dst).isFile(); if (dstIsFile) { - LOG.debug(debugPreamble + "returning false as dst is an already existing file"); + if(LOG.isDebugEnabled()) { + LOG.debug(debugPreamble + + "returning false as dst is an already existing file"); + } return false; } else { - LOG.debug(debugPreamble + "using dst as output directory"); + if(LOG.isDebugEnabled()) { + LOG.debug(debugPreamble + "using dst as output directory"); + } dstKey = pathToKey(makeAbsolute(new Path(dst, src.getName()))); } } catch (FileNotFoundException e) { - LOG.debug(debugPreamble + "using dst as output destination"); + if(LOG.isDebugEnabled()) { + LOG.debug(debugPreamble + "using dst as output destination"); + } dstKey = pathToKey(makeAbsolute(dst)); try { if (getFileStatus(dst.getParent()).isFile()) { - LOG.debug(debugPreamble + "returning false as dst parent exists and is a file"); + if(LOG.isDebugEnabled()) { + LOG.debug(debugPreamble + + "returning false as dst parent exists and is a file"); + } return false; } } catch (FileNotFoundException ex) { - LOG.debug(debugPreamble + "returning false as dst parent does not exist"); + if(LOG.isDebugEnabled()) { + LOG.debug(debugPreamble + + "returning false as dst parent does not exist"); + } return false; } } @@ -584,15 +621,22 @@ public boolean rename(Path src, Path dst) throws IOException { try { srcIsFile = getFileStatus(src).isFile(); } catch (FileNotFoundException e) { - LOG.debug(debugPreamble + "returning false as src does not exist"); + if(LOG.isDebugEnabled()) { + LOG.debug(debugPreamble + "returning false as src does not exist"); + } return false; } if (srcIsFile) { - LOG.debug(debugPreamble + "src is file, so doing copy then delete in S3"); + if(LOG.isDebugEnabled()) { + LOG.debug(debugPreamble + + "src is file, so doing copy then delete in S3"); + } store.copy(srcKey, dstKey); store.delete(srcKey); } else { - LOG.debug(debugPreamble + "src is directory, so copying contents"); + if(LOG.isDebugEnabled()) { + LOG.debug(debugPreamble + "src is directory, so copying contents"); + } store.storeEmptyFile(dstKey + FOLDER_SUFFIX); List keysToDelete = new ArrayList(); @@ -606,7 +650,10 @@ public boolean rename(Path src, Path dst) throws IOException { priorLastKey = listing.getPriorLastKey(); } while (priorLastKey != null); - LOG.debug(debugPreamble + "all files in src copied, now removing src files"); + if(LOG.isDebugEnabled()) { + LOG.debug(debugPreamble + + "all files in src copied, now removing src files"); + } for (String key: keysToDelete) { store.delete(key); } @@ -616,7 +663,9 @@ public boolean rename(Path src, Path dst) throws IOException { } catch (FileNotFoundException e) { //this is fine, we don't require a marker } - LOG.debug(debugPreamble + "done"); + if(LOG.isDebugEnabled()) { + LOG.debug(debugPreamble + "done"); + } } return true; diff --git a/src/java/org/apache/hadoop/io/SequenceFile.java b/src/java/org/apache/hadoop/io/SequenceFile.java index 2cfe3f242f6..a2eab7d5c67 100644 --- a/src/java/org/apache/hadoop/io/SequenceFile.java +++ b/src/java/org/apache/hadoop/io/SequenceFile.java @@ -1834,7 +1834,7 @@ public synchronized void getCurrentValue(Writable val) --noBufferedValues; // Sanity check - if (valLength < 0) { + if ((valLength < 0) && LOG.isDebugEnabled()) { LOG.debug(val + " is a zero-length value"); } } @@ -1873,7 +1873,7 @@ public synchronized Object getCurrentValue(Object val) --noBufferedValues; // Sanity check - if (valLength < 0) { + if ((valLength < 0) && LOG.isDebugEnabled()) { LOG.debug(val + " is a zero-length value"); } } @@ -2415,7 +2415,9 @@ public void sort(Path inFile, Path outFile) throws IOException { } private int sortPass(boolean deleteInput) throws IOException { - LOG.debug("running sort pass"); + if(LOG.isDebugEnabled()) { + LOG.debug("running sort pass"); + } SortPass sortPass = new SortPass(); // make the SortPass sortPass.setProgressable(progressable); mergeSort = new MergeSort(sortPass.new SeqFileComparator()); @@ -2515,7 +2517,9 @@ public int run(boolean deleteInput) throws IOException { } // buffer is full -- sort & flush it - LOG.debug("flushing segment " + segments); + if(LOG.isDebugEnabled()) { + LOG.debug("flushing segment " + segments); + } rawBuffer = rawKeys.getData(); sort(count); // indicate we're making progress @@ -2798,7 +2802,9 @@ public void merge(Path[] inFiles, Path outFile) throws IOException { /** sort calls this to generate the final merged output */ private int mergePass(Path tmpDir) throws IOException { - LOG.debug("running merge pass"); + if(LOG.isDebugEnabled()) { + LOG.debug("running merge pass"); + } Writer writer = cloneFileAttributes( outFile.suffix(".0"), outFile, null); RawKeyValueIterator r = merge(outFile.suffix(".0"), @@ -3028,7 +3034,9 @@ public RawKeyValueIterator merge() throws IOException { Path outputFile = lDirAlloc.getLocalPathForWrite( tmpFilename.toString(), approxOutputSize, conf); - LOG.debug("writing intermediate results to " + outputFile); + if(LOG.isDebugEnabled()) { + LOG.debug("writing intermediate results to " + outputFile); + } Writer writer = cloneFileAttributes( fs.makeQualified(segmentsToMerge.get(0).segmentPathName), fs.makeQualified(outputFile), null); diff --git a/src/java/org/apache/hadoop/io/compress/CodecPool.java b/src/java/org/apache/hadoop/io/compress/CodecPool.java index 811671a5205..bf1eec54bac 100644 --- a/src/java/org/apache/hadoop/io/compress/CodecPool.java +++ b/src/java/org/apache/hadoop/io/compress/CodecPool.java @@ -107,7 +107,9 @@ public static Compressor getCompressor(CompressionCodec codec, Configuration con LOG.info("Got brand-new compressor"); } else { compressor.reinit(conf); - LOG.debug("Got recycled compressor"); + if(LOG.isDebugEnabled()) { + LOG.debug("Got recycled compressor"); + } } return compressor; } @@ -131,7 +133,9 @@ public static Decompressor getDecompressor(CompressionCodec codec) { decompressor = codec.createDecompressor(); LOG.info("Got brand-new decompressor"); } else { - LOG.debug("Got recycled decompressor"); + if(LOG.isDebugEnabled()) { + LOG.debug("Got recycled decompressor"); + } } return decompressor; } diff --git a/src/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibDeflater.java b/src/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibDeflater.java index 9fe8ca6623f..b269d557b76 100644 --- a/src/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibDeflater.java +++ b/src/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibDeflater.java @@ -76,6 +76,8 @@ public void reinit(Configuration conf) { LOG.warn(strategy + " not supported by BuiltInZlibDeflater."); setStrategy(DEFAULT_STRATEGY); } - LOG.debug("Reinit compressor with new compression configuration"); + if(LOG.isDebugEnabled()) { + LOG.debug("Reinit compressor with new compression configuration"); + } } } diff --git a/src/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java b/src/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java index 739de5f3da9..4cb10e6da62 100644 --- a/src/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java +++ b/src/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java @@ -253,7 +253,9 @@ public synchronized void reinit(Configuration conf) { stream = init(level.compressionLevel(), strategy.compressionStrategy(), windowBits.windowBits()); - LOG.debug("Reinit compressor with new compression configuration"); + if(LOG.isDebugEnabled()) { + LOG.debug("Reinit compressor with new compression configuration"); + } } public synchronized void setInput(byte[] b, int off, int len) { diff --git a/src/java/org/apache/hadoop/io/file/tfile/Compression.java b/src/java/org/apache/hadoop/io/file/tfile/Compression.java index a8371ca7984..ec947299d2d 100644 --- a/src/java/org/apache/hadoop/io/file/tfile/Compression.java +++ b/src/java/org/apache/hadoop/io/file/tfile/Compression.java @@ -278,7 +278,9 @@ public Compressor getCompressor() throws IOException { // it. LOG.warn("Compressor obtained from CodecPool already finished()"); } else { - LOG.debug("Got a compressor: " + compressor.hashCode()); + if(LOG.isDebugEnabled()) { + LOG.debug("Got a compressor: " + compressor.hashCode()); + } } /** * Following statement is necessary to get around bugs in 0.18 where a @@ -293,7 +295,9 @@ public Compressor getCompressor() throws IOException { public void returnCompressor(Compressor compressor) { if (compressor != null) { - LOG.debug("Return a compressor: " + compressor.hashCode()); + if(LOG.isDebugEnabled()) { + LOG.debug("Return a compressor: " + compressor.hashCode()); + } CodecPool.returnCompressor(compressor); } } @@ -308,7 +312,9 @@ public Decompressor getDecompressor() throws IOException { // it. LOG.warn("Deompressor obtained from CodecPool already finished()"); } else { - LOG.debug("Got a decompressor: " + decompressor.hashCode()); + if(LOG.isDebugEnabled()) { + LOG.debug("Got a decompressor: " + decompressor.hashCode()); + } } /** * Following statement is necessary to get around bugs in 0.18 where a @@ -324,7 +330,9 @@ public Decompressor getDecompressor() throws IOException { public void returnDecompressor(Decompressor decompressor) { if (decompressor != null) { - LOG.debug("Returned a decompressor: " + decompressor.hashCode()); + if(LOG.isDebugEnabled()) { + LOG.debug("Returned a decompressor: " + decompressor.hashCode()); + } CodecPool.returnDecompressor(decompressor); } } diff --git a/src/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java b/src/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java index 19b68538539..073e86e1563 100644 --- a/src/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java +++ b/src/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java @@ -67,9 +67,11 @@ public Object invoke(Object proxy, Method method, Object[] args) } return null; } - LOG.debug("Exception while invoking " + method.getName() - + " of " + implementation.getClass() + ". Retrying." - + StringUtils.stringifyException(e)); + if(LOG.isDebugEnabled()) { + LOG.debug("Exception while invoking " + method.getName() + + " of " + implementation.getClass() + ". Retrying." + + StringUtils.stringifyException(e)); + } } } } diff --git a/src/java/org/apache/hadoop/ipc/Client.java b/src/java/org/apache/hadoop/ipc/Client.java index 974df85bf71..e65b2e812e7 100644 --- a/src/java/org/apache/hadoop/ipc/Client.java +++ b/src/java/org/apache/hadoop/ipc/Client.java @@ -444,8 +444,10 @@ public Object run() throws IOException, InterruptedException { disposeSasl(); if (shouldAuthenticateOverKrb()) { if (currRetries < maxRetries) { - LOG.debug("Exception encountered while connecting to " - + "the server : " + ex); + if(LOG.isDebugEnabled()) { + LOG.debug("Exception encountered while connecting to " + + "the server : " + ex); + } // try re-login if (UserGroupInformation.isLoginKeytabBased()) { UserGroupInformation.getLoginUser().reloginFromKeytab(); diff --git a/src/java/org/apache/hadoop/ipc/Server.java b/src/java/org/apache/hadoop/ipc/Server.java index 5a3a449fc92..e8ee049cb60 100644 --- a/src/java/org/apache/hadoop/ipc/Server.java +++ b/src/java/org/apache/hadoop/ipc/Server.java @@ -615,7 +615,9 @@ public void run() { // If there were some calls that have not been sent out for a // long time, discard them. // - LOG.debug("Checking for old call responses."); + if(LOG.isDebugEnabled()) { + LOG.debug("Checking for old call responses."); + } ArrayList calls; // get the list of channels from list of keys. diff --git a/src/java/org/apache/hadoop/ipc/WritableRpcEngine.java b/src/java/org/apache/hadoop/ipc/WritableRpcEngine.java index 030dc8ee08f..48b23acbb62 100644 --- a/src/java/org/apache/hadoop/ipc/WritableRpcEngine.java +++ b/src/java/org/apache/hadoop/ipc/WritableRpcEngine.java @@ -192,16 +192,15 @@ public Invoker(Class protocol, public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { - final boolean logDebug = LOG.isDebugEnabled(); long startTime = 0; - if (logDebug) { + if (LOG.isDebugEnabled()) { startTime = System.currentTimeMillis(); } ObjectWritable value = (ObjectWritable) client.call(new Invocation(method, args), address, protocol, ticket, rpcTimeout); - if (logDebug) { + if (LOG.isDebugEnabled()) { long callTime = System.currentTimeMillis() - startTime; LOG.debug("Call: " + method.getName() + " " + callTime); } diff --git a/src/java/org/apache/hadoop/net/NetworkTopology.java b/src/java/org/apache/hadoop/net/NetworkTopology.java index 73d9e1c63de..a91f694ae39 100644 --- a/src/java/org/apache/hadoop/net/NetworkTopology.java +++ b/src/java/org/apache/hadoop/net/NetworkTopology.java @@ -333,7 +333,9 @@ public void add(Node node) { numOfRacks++; } } - LOG.debug("NetworkTopology became:\n" + this.toString()); + if(LOG.isDebugEnabled()) { + LOG.debug("NetworkTopology became:\n" + this.toString()); + } } finally { netlock.writeLock().unlock(); } @@ -359,7 +361,9 @@ public void remove(Node node) { numOfRacks--; } } - LOG.debug("NetworkTopology became:\n" + this.toString()); + if(LOG.isDebugEnabled()) { + LOG.debug("NetworkTopology became:\n" + this.toString()); + } } finally { netlock.writeLock().unlock(); } diff --git a/src/java/org/apache/hadoop/security/Groups.java b/src/java/org/apache/hadoop/security/Groups.java index 75a5903db23..9a204c78e88 100644 --- a/src/java/org/apache/hadoop/security/Groups.java +++ b/src/java/org/apache/hadoop/security/Groups.java @@ -78,14 +78,18 @@ public List getGroups(String user) throws IOException { long now = System.currentTimeMillis(); // if cache has a value and it hasn't expired if (groups != null && (groups.getTimestamp() + cacheTimeout > now)) { - LOG.debug("Returning cached groups for '" + user + "'"); + if(LOG.isDebugEnabled()) { + LOG.debug("Returning cached groups for '" + user + "'"); + } return groups.getGroups(); } // Create and cache user's groups groups = new CachedGroups(impl.getGroups(user)); userToGroupsMap.put(user, groups); - LOG.debug("Returning fetched groups for '" + user + "'"); + if(LOG.isDebugEnabled()) { + LOG.debug("Returning fetched groups for '" + user + "'"); + } return groups.getGroups(); } @@ -132,7 +136,9 @@ public static Groups getUserToGroupsMappingService() { */ public static Groups getUserToGroupsMappingService(Configuration conf) { if(GROUPS == null) { - LOG.debug(" Creating new Groups object"); + if(LOG.isDebugEnabled()) { + LOG.debug(" Creating new Groups object"); + } GROUPS = new Groups(conf); } return GROUPS; diff --git a/src/java/org/apache/hadoop/security/SaslRpcClient.java b/src/java/org/apache/hadoop/security/SaslRpcClient.java index 3defd42da90..54b1502acc2 100644 --- a/src/java/org/apache/hadoop/security/SaslRpcClient.java +++ b/src/java/org/apache/hadoop/security/SaslRpcClient.java @@ -80,10 +80,9 @@ public SaslRpcClient(AuthMethod method, break; case KERBEROS: if (LOG.isDebugEnabled()) { - LOG - .debug("Creating SASL " + AuthMethod.KERBEROS.getMechanismName() - + " client. Server's Kerberos principal name is " - + serverPrincipal); + LOG.debug("Creating SASL " + AuthMethod.KERBEROS.getMechanismName() + + " client. Server's Kerberos principal name is " + + serverPrincipal); } if (serverPrincipal == null || serverPrincipal.length() == 0) { throw new IOException( diff --git a/src/java/org/apache/hadoop/security/UserGroupInformation.java b/src/java/org/apache/hadoop/security/UserGroupInformation.java index bc8235a7dc9..8d1513e8e14 100644 --- a/src/java/org/apache/hadoop/security/UserGroupInformation.java +++ b/src/java/org/apache/hadoop/security/UserGroupInformation.java @@ -534,13 +534,17 @@ public void run() { while (true) { try { long now = System.currentTimeMillis(); - LOG.debug("Current time is " + now); - LOG.debug("Next refresh is " + nextRefresh); + if(LOG.isDebugEnabled()) { + LOG.debug("Current time is " + now); + LOG.debug("Next refresh is " + nextRefresh); + } if (now < nextRefresh) { Thread.sleep(nextRefresh - now); } Shell.execCommand(cmd, "-R"); - LOG.debug("renewed ticket"); + if(LOG.isDebugEnabled()) { + LOG.debug("renewed ticket"); + } reloginFromTicketCache(); tgt = getTGT(); if (tgt == null) { diff --git a/src/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java b/src/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java index 71ec4af99bd..3f78cf9ef2e 100644 --- a/src/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java +++ b/src/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java @@ -115,8 +115,10 @@ public static void authorize(UserGroupInformation user, // just keep going } } - LOG.debug("for protocol authorization compare (" + clientPrincipal + "): " - + shortName + " with " + user.getShortUserName()); + if(LOG.isDebugEnabled()) { + LOG.debug("for protocol authorization compare (" + clientPrincipal + + "): " + shortName + " with " + user.getShortUserName()); + } if((shortName != null && !shortName.equals(user.getShortUserName())) || !acl.isUserAllowed(user)) { AUDITLOG.warn(AUTHZ_FAILED_FOR + user + " for protocol="+protocol); diff --git a/src/java/org/apache/hadoop/util/GenericOptionsParser.java b/src/java/org/apache/hadoop/util/GenericOptionsParser.java index 1efc767600a..4eb280d9d4d 100644 --- a/src/java/org/apache/hadoop/util/GenericOptionsParser.java +++ b/src/java/org/apache/hadoop/util/GenericOptionsParser.java @@ -316,7 +316,9 @@ private void processGeneralOptions(Configuration conf, if (!localFs.exists(p)) { throw new FileNotFoundException("File "+fileName+" does not exist."); } - LOG.debug("setting conf tokensFile: " + fileName); + if(LOG.isDebugEnabled()) { + LOG.debug("setting conf tokensFile: " + fileName); + } conf.set("mapreduce.job.credentials.json", localFs.makeQualified(p) .toString()); diff --git a/src/java/org/apache/hadoop/util/NativeCodeLoader.java b/src/java/org/apache/hadoop/util/NativeCodeLoader.java index 588ffb6eac7..3e4f99bbe68 100644 --- a/src/java/org/apache/hadoop/util/NativeCodeLoader.java +++ b/src/java/org/apache/hadoop/util/NativeCodeLoader.java @@ -42,15 +42,20 @@ public class NativeCodeLoader { static { // Try to load native hadoop library and set fallback flag appropriately - LOG.debug("Trying to load the custom-built native-hadoop library..."); + if(LOG.isDebugEnabled()) { + LOG.debug("Trying to load the custom-built native-hadoop library..."); + } try { System.loadLibrary("hadoop"); LOG.info("Loaded the native-hadoop library"); nativeCodeLoaded = true; } catch (Throwable t) { // Ignore failure to load - LOG.debug("Failed to load native-hadoop with error: " + t); - LOG.debug("java.library.path=" + System.getProperty("java.library.path")); + if(LOG.isDebugEnabled()) { + LOG.debug("Failed to load native-hadoop with error: " + t); + LOG.debug("java.library.path=" + + System.getProperty("java.library.path")); + } } if (!nativeCodeLoaded) { diff --git a/src/test/aop/org/apache/hadoop/fi/ProbabilityModel.java b/src/test/aop/org/apache/hadoop/fi/ProbabilityModel.java index 891e14716c8..2a3db5f7f52 100644 --- a/src/test/aop/org/apache/hadoop/fi/ProbabilityModel.java +++ b/src/test/aop/org/apache/hadoop/fi/ProbabilityModel.java @@ -94,7 +94,9 @@ protected static float getProbability(final String klass) { float ret = conf.getFloat(newProbName, conf.getFloat(ALL_PROBABILITIES, DEFAULT_PROB)); - LOG.debug("Request for " + newProbName + " returns=" + ret); + if(LOG.isDebugEnabled()) { + LOG.debug("Request for " + newProbName + " returns=" + ret); + } // Make sure that probability level is valid. if (ret < DEFAULT_PROB || ret > MAX_PROB) { LOG.info("Probability level is incorrect. Default value is set"); diff --git a/src/test/core/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java b/src/test/core/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java index 4049b1b18d8..76e95c2a376 100644 --- a/src/test/core/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java +++ b/src/test/core/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java @@ -310,7 +310,9 @@ public int run(String[] args) throws Exception { } } - LOG.debug("Done with testing. Waiting for threads to finish."); + if(LOG.isDebugEnabled()) { + LOG.debug("Done with testing. Waiting for threads to finish."); + } for (DFSClientThread thread : threads) { thread.join(); for (int i=0; i