HADOOP-10005. No need to check INFO severity level is enabled or not. Contributed by Jackie Chang.
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1532907 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
f28f5ed628
commit
dea1b2e84a
|
@ -585,6 +585,9 @@ Release 2.1.1-beta - 2013-09-23
|
|||
HADOOP-9977. Hadoop services won't start with different keypass and
|
||||
keystorepass when https is enabled. (cnauroth)
|
||||
|
||||
HADOOP-10005. No need to check INFO severity level is enabled or not.
|
||||
(Jackie Chang via suresh)
|
||||
|
||||
Release 2.1.0-beta - 2013-08-22
|
||||
|
||||
INCOMPATIBLE CHANGES
|
||||
|
|
|
@ -1315,7 +1315,6 @@ public class DataNode extends Configured
|
|||
|
||||
int numTargets = xferTargets.length;
|
||||
if (numTargets > 0) {
|
||||
if (LOG.isInfoEnabled()) {
|
||||
StringBuilder xfersBuilder = new StringBuilder();
|
||||
for (int i = 0; i < numTargets; i++) {
|
||||
xfersBuilder.append(xferTargets[i]);
|
||||
|
@ -1323,7 +1322,6 @@ public class DataNode extends Configured
|
|||
}
|
||||
LOG.info(bpReg + " Starting thread to transfer " +
|
||||
block + " to " + xfersBuilder);
|
||||
}
|
||||
|
||||
new Daemon(new DataTransfer(xferTargets, block,
|
||||
BlockConstructionStage.PIPELINE_SETUP_CREATE, "")).start();
|
||||
|
|
|
@ -972,12 +972,10 @@ public class MapTask extends Task {
|
|||
maxRec = kvmeta.capacity() / NMETA;
|
||||
softLimit = (int)(kvbuffer.length * spillper);
|
||||
bufferRemaining = softLimit;
|
||||
if (LOG.isInfoEnabled()) {
|
||||
LOG.info(JobContext.IO_SORT_MB + ": " + sortmb);
|
||||
LOG.info("soft limit at " + softLimit);
|
||||
LOG.info("bufstart = " + bufstart + "; bufvoid = " + bufvoid);
|
||||
LOG.info("kvstart = " + kvstart + "; length = " + maxRec);
|
||||
}
|
||||
|
||||
// k/v serialization
|
||||
comparator = job.getOutputKeyComparator();
|
||||
|
@ -1180,11 +1178,9 @@ public class MapTask extends Task {
|
|||
final int aligned = pos - (pos % METASIZE);
|
||||
kvindex =
|
||||
((aligned - METASIZE + kvbuffer.length) % kvbuffer.length) / 4;
|
||||
if (LOG.isInfoEnabled()) {
|
||||
LOG.info("(EQUATOR) " + pos + " kvi " + kvindex +
|
||||
"(" + (kvindex * 4) + ")");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The spill is complete, so set the buffer and meta indices to be equal to
|
||||
|
@ -1198,11 +1194,9 @@ public class MapTask extends Task {
|
|||
// set start/end to point to first meta record
|
||||
kvstart = kvend =
|
||||
((aligned - METASIZE + kvbuffer.length) % kvbuffer.length) / 4;
|
||||
if (LOG.isInfoEnabled()) {
|
||||
LOG.info("(RESET) equator " + e + " kv " + kvstart + "(" +
|
||||
(kvstart * 4) + ")" + " kvi " + kvindex + "(" + (kvindex * 4) + ")");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute the distance in bytes between two indices in the serialization
|
||||
|
@ -1456,7 +1450,6 @@ public class MapTask extends Task {
|
|||
if (kvindex != kvend) {
|
||||
kvend = (kvindex + NMETA) % kvmeta.capacity();
|
||||
bufend = bufmark;
|
||||
if (LOG.isInfoEnabled()) {
|
||||
LOG.info("Spilling map output");
|
||||
LOG.info("bufstart = " + bufstart + "; bufend = " + bufmark +
|
||||
"; bufvoid = " + bufvoid);
|
||||
|
@ -1464,7 +1457,6 @@ public class MapTask extends Task {
|
|||
"); kvend = " + kvend + "(" + (kvend * 4) +
|
||||
"); length = " + (distanceTo(kvend, kvstart,
|
||||
kvmeta.capacity()) + 1) + "/" + maxRec);
|
||||
}
|
||||
sortAndSpill();
|
||||
}
|
||||
} catch (InterruptedException e) {
|
||||
|
@ -1547,7 +1539,6 @@ public class MapTask extends Task {
|
|||
kvend = (kvindex + NMETA) % kvmeta.capacity();
|
||||
bufend = bufmark;
|
||||
spillInProgress = true;
|
||||
if (LOG.isInfoEnabled()) {
|
||||
LOG.info("Spilling map output");
|
||||
LOG.info("bufstart = " + bufstart + "; bufend = " + bufmark +
|
||||
"; bufvoid = " + bufvoid);
|
||||
|
@ -1555,7 +1546,6 @@ public class MapTask extends Task {
|
|||
"); kvend = " + kvend + "(" + (kvend * 4) +
|
||||
"); length = " + (distanceTo(kvend, kvstart,
|
||||
kvmeta.capacity()) + 1) + "/" + maxRec);
|
||||
}
|
||||
spillReady.signal();
|
||||
}
|
||||
|
||||
|
|
|
@ -81,14 +81,12 @@ public class TestMultiFileInputFormat extends TestCase{
|
|||
}
|
||||
|
||||
public void testFormat() throws IOException {
|
||||
if(LOG.isInfoEnabled()) {
|
||||
LOG.info("Test started");
|
||||
LOG.info("Max split count = " + MAX_SPLIT_COUNT);
|
||||
LOG.info("Split count increment = " + SPLIT_COUNT_INCR);
|
||||
LOG.info("Max bytes per file = " + MAX_BYTES);
|
||||
LOG.info("Max number of files = " + MAX_NUM_FILES);
|
||||
LOG.info("Number of files increment = " + NUM_FILES_INCR);
|
||||
}
|
||||
|
||||
MultiFileInputFormat<Text,Text> format = new DummyMultiFileInputFormat();
|
||||
FileSystem fs = FileSystem.getLocal(job);
|
||||
|
|
|
@ -122,12 +122,10 @@ public class DebugJobProducer implements JobStoryProducer {
|
|||
// Add/remove excess
|
||||
recs[0] += totalrecs - tot_recs;
|
||||
bytes[0] += totalbytes - tot_bytes;
|
||||
if (LOG.isInfoEnabled()) {
|
||||
LOG.info(
|
||||
"DIST: " + Arrays.toString(recs) + " " + tot_recs + "/" + totalrecs +
|
||||
" " + Arrays.toString(bytes) + " " + tot_bytes + "/" + totalbytes);
|
||||
}
|
||||
}
|
||||
|
||||
private static final AtomicInteger seq = new AtomicInteger(0);
|
||||
// set timestamp in the past
|
||||
|
|
Loading…
Reference in New Issue