HBASE-8621 More log edits; we log too much

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1486291 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2013-05-25 06:05:07 +00:00
parent e849b9bfb9
commit fe6bbb0d29
8 changed files with 22 additions and 21 deletions

View File

@ -204,7 +204,8 @@ public class ClientScanner extends AbstractClientScanner {
localStartKey = this.scan.getStartRow(); localStartKey = this.scan.getStartRow();
} }
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled() && this.currentRegion != null) {
// Only worth logging if NOT first region in scan.
LOG.debug("Advancing internal scanner to startKey at '" + LOG.debug("Advancing internal scanner to startKey at '" +
Bytes.toStringBinary(localStartKey) + "'"); Bytes.toStringBinary(localStartKey) + "'");
} }

View File

@ -265,8 +265,8 @@ public class CatalogJanitor extends Chore {
LOG.info("Scanned " + count + " catalog row(s), gc'd " + mergeCleaned LOG.info("Scanned " + count + " catalog row(s), gc'd " + mergeCleaned
+ " unreferenced merged region(s) and " + splitCleaned + " unreferenced merged region(s) and " + splitCleaned
+ " unreferenced parent region(s)"); + " unreferenced parent region(s)");
} else if (LOG.isDebugEnabled()) { } else if (LOG.isTraceEnabled()) {
LOG.debug("Scanned " + count + " catalog row(s), gc'd " + mergeCleaned LOG.trace("Scanned " + count + " catalog row(s), gc'd " + mergeCleaned
+ " unreferenced merged region(s) and " + splitCleaned + " unreferenced merged region(s) and " + splitCleaned
+ " unreferenced parent region(s)"); + " unreferenced parent region(s)");
} }

View File

@ -378,11 +378,14 @@ public abstract class BaseLoadBalancer implements LoadBalancer {
int ceiling = (int) Math.ceil(average * (1 + slop)); int ceiling = (int) Math.ceil(average * (1 + slop));
if (!(cs.getMinLoad() > ceiling || cs.getMaxLoad() < floor)) { if (!(cs.getMinLoad() > ceiling || cs.getMaxLoad() < floor)) {
NavigableMap<ServerAndLoad, List<HRegionInfo>> serversByLoad = cs.getServersByLoad(); NavigableMap<ServerAndLoad, List<HRegionInfo>> serversByLoad = cs.getServersByLoad();
LOG.info("Skipping load balancing because balanced cluster; " + if (LOG.isTraceEnabled()) {
// If nothing to balance, then don't say anything unless trace-level logging.
LOG.trace("Skipping load balancing because balanced cluster; " +
"servers=" + cs.getNumServers() + " " + "servers=" + cs.getNumServers() + " " +
"regions=" + cs.getNumRegions() + " average=" + average + " " + "regions=" + cs.getNumRegions() + " average=" + average + " " +
"mostloaded=" + serversByLoad.lastKey().getLoad() + "mostloaded=" + serversByLoad.lastKey().getLoad() +
" leastloaded=" + serversByLoad.firstKey().getLoad()); " leastloaded=" + serversByLoad.firstKey().getLoad());
}
return false; return false;
} }
return true; return true;

View File

@ -143,7 +143,7 @@ public class CreateTableHandler extends EventHandler {
@Override @Override
public void process() { public void process() {
String tableName = this.hTableDescriptor.getNameAsString(); String tableName = this.hTableDescriptor.getNameAsString();
LOG.info("Attempting to create the table " + tableName); LOG.info("Create table " + tableName);
try { try {
MasterCoprocessorHost cpHost = ((HMaster) this.server).getCoprocessorHost(); MasterCoprocessorHost cpHost = ((HMaster) this.server).getCoprocessorHost();

View File

@ -27,16 +27,14 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HDFSBlocksDistribution; import org.apache.hadoop.hbase.HDFSBlocksDistribution;
import org.apache.hadoop.hbase.fs.HFileSystem;
import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper; import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
import org.apache.hadoop.hbase.io.HFileLink; import org.apache.hadoop.hbase.io.HFileLink;
import org.apache.hadoop.hbase.io.Reference;
import org.apache.hadoop.hbase.io.HalfStoreFileReader; import org.apache.hadoop.hbase.io.HalfStoreFileReader;
import org.apache.hadoop.hbase.io.Reference;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.io.hfile.CacheConfig; import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.FSUtils;
@ -101,13 +99,12 @@ public class StoreFileInfo {
public StoreFileInfo(final Configuration conf, final FileSystem fs, final FileStatus fileStatus) public StoreFileInfo(final Configuration conf, final FileSystem fs, final FileStatus fileStatus)
throws IOException { throws IOException {
this.fileStatus = fileStatus; this.fileStatus = fileStatus;
Path p = fileStatus.getPath(); Path p = fileStatus.getPath();
if (HFileLink.isHFileLink(p)) { if (HFileLink.isHFileLink(p)) {
// HFileLink // HFileLink
this.reference = null; this.reference = null;
this.link = new HFileLink(conf, p); this.link = new HFileLink(conf, p);
LOG.debug("Store file " + p + " is a link"); if (LOG.isTraceEnabled()) LOG.trace(p + " is a link");
} else if (isReference(p)) { } else if (isReference(p)) {
this.reference = Reference.read(fs, p); this.reference = Reference.read(fs, p);
Path referencePath = getReferredToFile(p); Path referencePath = getReferredToFile(p);
@ -118,7 +115,7 @@ public class StoreFileInfo {
// Reference // Reference
this.link = null; this.link = null;
} }
LOG.debug("Store file " + p + " is a " + reference.getFileRegion() + if (LOG.isTraceEnabled()) LOG.trace(p + " is a " + reference.getFileRegion() +
" reference to " + referencePath); " reference to " + referencePath);
} else if (isHFile(p)) { } else if (isHFile(p)) {
// HFile // HFile

View File

@ -519,10 +519,10 @@ class FSHLog implements HLog, Syncable {
this.hdfs_out = nextHdfsOut; this.hdfs_out = nextHdfsOut;
this.numEntries.set(0); this.numEntries.set(0);
} }
LOG.info("Rolled WAL " + (oldFile != null ? if (oldFile == null) LOG.info("New WAL " + FSUtils.getPath(newPath));
FSUtils.getPath(oldFile) + ", entries=" + oldNumEntries + ", filesize=" + else LOG.info("Rolled WAL " + FSUtils.getPath(oldFile) + " with entries=" + oldNumEntries +
StringUtils.humanReadableInt(this.fs.getFileStatus(oldFile).getLen()): ", filesize=" + StringUtils.humanReadableInt(this.fs.getFileStatus(oldFile).getLen()) +
"" ) + "; new WAL=" + FSUtils.getPath(newPath)); "; new WAL " + FSUtils.getPath(newPath));
// Tell our listeners that a new log was created // Tell our listeners that a new log was created
if (!this.listeners.isEmpty()) { if (!this.listeners.isEmpty()) {
@ -765,8 +765,7 @@ class FSHLog implements HLog, Syncable {
} }
} }
} }
LOG.debug("Moved " + files.length + " log files to " + LOG.debug("Moved " + files.length + " WAL file(s) to " + FSUtils.getPath(this.oldLogDir));
FSUtils.getPath(this.oldLogDir));
} }
if (!fs.delete(dir, true)) { if (!fs.delete(dir, true)) {
LOG.info("Unable to delete " + dir); LOG.info("Unable to delete " + dir);
@ -806,7 +805,7 @@ class FSHLog implements HLog, Syncable {
synchronized (updateLock) { synchronized (updateLock) {
this.closed = true; this.closed = true;
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
LOG.debug("closing hlog writer in " + this.dir.toString()); LOG.debug("Closing WAL writer in " + this.dir.toString());
} }
if (this.writer != null) { if (this.writer != null) {
this.writer.close(); this.writer.close();

View File

@ -292,7 +292,8 @@ public abstract class FSUtils {
// Function was properly called, but threw it's own exception. // Function was properly called, but threw it's own exception.
throw new IOException(ite.getCause()); throw new IOException(ite.getCause());
} catch (NoSuchMethodException e) { } catch (NoSuchMethodException e) {
LOG.debug("Ignoring (most likely Reflection related exception) " + e); LOG.debug("DFS Client does not support most favored nodes create; using default create");
if (LOG.isTraceEnabled()) LOG.trace("Ignoring; use default create", e);
} catch (IllegalArgumentException e) { } catch (IllegalArgumentException e) {
LOG.debug("Ignoring (most likely Reflection related exception) " + e); LOG.debug("Ignoring (most likely Reflection related exception) " + e);
} catch (SecurityException e) { } catch (SecurityException e) {

View File

@ -223,7 +223,7 @@ public abstract class ZKInterProcessLockBase implements InterProcessLock {
} }
} }
updateAcquiredLock(createdZNode); updateAcquiredLock(createdZNode);
LOG.debug("Successfully acquired a lock for " + createdZNode); LOG.debug("Acquired a lock for " + createdZNode);
return true; return true;
} }
@ -325,7 +325,7 @@ public abstract class ZKInterProcessLockBase implements InterProcessLock {
} }
} }
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
LOG.debug("Successfully released " + lock.getPath()); LOG.debug("Released " + lock.getPath());
} }
} catch (BadVersionException e) { } catch (BadVersionException e) {
throw new IllegalStateException(e); throw new IllegalStateException(e);