HBASE-8621 More log edits; we log too much
git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1486291 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
e849b9bfb9
commit
fe6bbb0d29
|
@ -204,7 +204,8 @@ public class ClientScanner extends AbstractClientScanner {
|
|||
localStartKey = this.scan.getStartRow();
|
||||
}
|
||||
|
||||
if (LOG.isDebugEnabled()) {
|
||||
if (LOG.isDebugEnabled() && this.currentRegion != null) {
|
||||
// Only worth logging if NOT first region in scan.
|
||||
LOG.debug("Advancing internal scanner to startKey at '" +
|
||||
Bytes.toStringBinary(localStartKey) + "'");
|
||||
}
|
||||
|
|
|
@ -265,8 +265,8 @@ public class CatalogJanitor extends Chore {
|
|||
LOG.info("Scanned " + count + " catalog row(s), gc'd " + mergeCleaned
|
||||
+ " unreferenced merged region(s) and " + splitCleaned
|
||||
+ " unreferenced parent region(s)");
|
||||
} else if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("Scanned " + count + " catalog row(s), gc'd " + mergeCleaned
|
||||
} else if (LOG.isTraceEnabled()) {
|
||||
LOG.trace("Scanned " + count + " catalog row(s), gc'd " + mergeCleaned
|
||||
+ " unreferenced merged region(s) and " + splitCleaned
|
||||
+ " unreferenced parent region(s)");
|
||||
}
|
||||
|
|
|
@ -378,11 +378,14 @@ public abstract class BaseLoadBalancer implements LoadBalancer {
|
|||
int ceiling = (int) Math.ceil(average * (1 + slop));
|
||||
if (!(cs.getMinLoad() > ceiling || cs.getMaxLoad() < floor)) {
|
||||
NavigableMap<ServerAndLoad, List<HRegionInfo>> serversByLoad = cs.getServersByLoad();
|
||||
LOG.info("Skipping load balancing because balanced cluster; " +
|
||||
if (LOG.isTraceEnabled()) {
|
||||
// If nothing to balance, then don't say anything unless trace-level logging.
|
||||
LOG.trace("Skipping load balancing because balanced cluster; " +
|
||||
"servers=" + cs.getNumServers() + " " +
|
||||
"regions=" + cs.getNumRegions() + " average=" + average + " " +
|
||||
"mostloaded=" + serversByLoad.lastKey().getLoad() +
|
||||
" leastloaded=" + serversByLoad.firstKey().getLoad());
|
||||
}
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
|
|
|
@ -143,7 +143,7 @@ public class CreateTableHandler extends EventHandler {
|
|||
@Override
|
||||
public void process() {
|
||||
String tableName = this.hTableDescriptor.getNameAsString();
|
||||
LOG.info("Attempting to create the table " + tableName);
|
||||
LOG.info("Create table " + tableName);
|
||||
|
||||
try {
|
||||
MasterCoprocessorHost cpHost = ((HMaster) this.server).getCoprocessorHost();
|
||||
|
|
|
@ -27,16 +27,14 @@ import org.apache.commons.logging.Log;
|
|||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FSDataInputStream;
|
||||
import org.apache.hadoop.fs.FileStatus;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hbase.HDFSBlocksDistribution;
|
||||
import org.apache.hadoop.hbase.fs.HFileSystem;
|
||||
import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
|
||||
import org.apache.hadoop.hbase.io.HFileLink;
|
||||
import org.apache.hadoop.hbase.io.Reference;
|
||||
import org.apache.hadoop.hbase.io.HalfStoreFileReader;
|
||||
import org.apache.hadoop.hbase.io.Reference;
|
||||
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
|
||||
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
|
||||
import org.apache.hadoop.hbase.util.FSUtils;
|
||||
|
@ -101,13 +99,12 @@ public class StoreFileInfo {
|
|||
public StoreFileInfo(final Configuration conf, final FileSystem fs, final FileStatus fileStatus)
|
||||
throws IOException {
|
||||
this.fileStatus = fileStatus;
|
||||
|
||||
Path p = fileStatus.getPath();
|
||||
if (HFileLink.isHFileLink(p)) {
|
||||
// HFileLink
|
||||
this.reference = null;
|
||||
this.link = new HFileLink(conf, p);
|
||||
LOG.debug("Store file " + p + " is a link");
|
||||
if (LOG.isTraceEnabled()) LOG.trace(p + " is a link");
|
||||
} else if (isReference(p)) {
|
||||
this.reference = Reference.read(fs, p);
|
||||
Path referencePath = getReferredToFile(p);
|
||||
|
@ -118,7 +115,7 @@ public class StoreFileInfo {
|
|||
// Reference
|
||||
this.link = null;
|
||||
}
|
||||
LOG.debug("Store file " + p + " is a " + reference.getFileRegion() +
|
||||
if (LOG.isTraceEnabled()) LOG.trace(p + " is a " + reference.getFileRegion() +
|
||||
" reference to " + referencePath);
|
||||
} else if (isHFile(p)) {
|
||||
// HFile
|
||||
|
|
|
@ -519,10 +519,10 @@ class FSHLog implements HLog, Syncable {
|
|||
this.hdfs_out = nextHdfsOut;
|
||||
this.numEntries.set(0);
|
||||
}
|
||||
LOG.info("Rolled WAL " + (oldFile != null ?
|
||||
FSUtils.getPath(oldFile) + ", entries=" + oldNumEntries + ", filesize=" +
|
||||
StringUtils.humanReadableInt(this.fs.getFileStatus(oldFile).getLen()):
|
||||
"" ) + "; new WAL=" + FSUtils.getPath(newPath));
|
||||
if (oldFile == null) LOG.info("New WAL " + FSUtils.getPath(newPath));
|
||||
else LOG.info("Rolled WAL " + FSUtils.getPath(oldFile) + " with entries=" + oldNumEntries +
|
||||
", filesize=" + StringUtils.humanReadableInt(this.fs.getFileStatus(oldFile).getLen()) +
|
||||
"; new WAL " + FSUtils.getPath(newPath));
|
||||
|
||||
// Tell our listeners that a new log was created
|
||||
if (!this.listeners.isEmpty()) {
|
||||
|
@ -765,8 +765,7 @@ class FSHLog implements HLog, Syncable {
|
|||
}
|
||||
}
|
||||
}
|
||||
LOG.debug("Moved " + files.length + " log files to " +
|
||||
FSUtils.getPath(this.oldLogDir));
|
||||
LOG.debug("Moved " + files.length + " WAL file(s) to " + FSUtils.getPath(this.oldLogDir));
|
||||
}
|
||||
if (!fs.delete(dir, true)) {
|
||||
LOG.info("Unable to delete " + dir);
|
||||
|
@ -806,7 +805,7 @@ class FSHLog implements HLog, Syncable {
|
|||
synchronized (updateLock) {
|
||||
this.closed = true;
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("closing hlog writer in " + this.dir.toString());
|
||||
LOG.debug("Closing WAL writer in " + this.dir.toString());
|
||||
}
|
||||
if (this.writer != null) {
|
||||
this.writer.close();
|
||||
|
|
|
@ -292,7 +292,8 @@ public abstract class FSUtils {
|
|||
// Function was properly called, but threw it's own exception.
|
||||
throw new IOException(ite.getCause());
|
||||
} catch (NoSuchMethodException e) {
|
||||
LOG.debug("Ignoring (most likely Reflection related exception) " + e);
|
||||
LOG.debug("DFS Client does not support most favored nodes create; using default create");
|
||||
if (LOG.isTraceEnabled()) LOG.trace("Ignoring; use default create", e);
|
||||
} catch (IllegalArgumentException e) {
|
||||
LOG.debug("Ignoring (most likely Reflection related exception) " + e);
|
||||
} catch (SecurityException e) {
|
||||
|
|
|
@ -223,7 +223,7 @@ public abstract class ZKInterProcessLockBase implements InterProcessLock {
|
|||
}
|
||||
}
|
||||
updateAcquiredLock(createdZNode);
|
||||
LOG.debug("Successfully acquired a lock for " + createdZNode);
|
||||
LOG.debug("Acquired a lock for " + createdZNode);
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -325,7 +325,7 @@ public abstract class ZKInterProcessLockBase implements InterProcessLock {
|
|||
}
|
||||
}
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("Successfully released " + lock.getPath());
|
||||
LOG.debug("Released " + lock.getPath());
|
||||
}
|
||||
} catch (BadVersionException e) {
|
||||
throw new IllegalStateException(e);
|
||||
|
|
Loading…
Reference in New Issue