svn merge -c 1335505 FIXES: HADOOP-8341. Fix or filter findbugs issues in hadoop-tools (bobby)
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1335506 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
797da76660
commit
09c2172681
|
@ -416,6 +416,8 @@ Release 0.23.3 - UNRELEASED
|
|||
HADOOP-8327. distcpv2 and distcpv1 jars should not coexist (Dave Thompson
|
||||
via bobby)
|
||||
|
||||
HADOOP-8341. Fix or filter findbugs issues in hadoop-tools (bobby)
|
||||
|
||||
Release 0.23.2 - UNRELEASED
|
||||
|
||||
NEW FEATURES
|
||||
|
|
|
@ -117,7 +117,7 @@ public class HadoopArchives implements Tool {
|
|||
// will when running the mapreduce job.
|
||||
String testJar = System.getProperty(TEST_HADOOP_ARCHIVES_JAR_PATH, null);
|
||||
if (testJar != null) {
|
||||
((JobConf)conf).setJar(testJar);
|
||||
this.conf.setJar(testJar);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -136,10 +136,13 @@ public class DistCp extends Configured implements Tool {
|
|||
|
||||
Job job = null;
|
||||
try {
|
||||
metaFolder = createMetaFolderPath();
|
||||
jobFS = metaFolder.getFileSystem(getConf());
|
||||
synchronized(this) {
|
||||
//Don't cleanup while we are setting up.
|
||||
metaFolder = createMetaFolderPath();
|
||||
jobFS = metaFolder.getFileSystem(getConf());
|
||||
|
||||
job = createJob();
|
||||
job = createJob();
|
||||
}
|
||||
createInputFileListing(job);
|
||||
|
||||
job.submit();
|
||||
|
|
|
@ -65,9 +65,9 @@ import org.apache.hadoop.mapreduce.lib.map.RegexMapper;
|
|||
public class Logalyzer {
|
||||
// Constants
|
||||
private static Configuration fsConfig = new Configuration();
|
||||
public static String SORT_COLUMNS =
|
||||
public static final String SORT_COLUMNS =
|
||||
"logalizer.logcomparator.sort.columns";
|
||||
public static String COLUMN_SEPARATOR =
|
||||
public static final String COLUMN_SEPARATOR =
|
||||
"logalizer.logcomparator.column.separator";
|
||||
|
||||
static {
|
||||
|
|
|
@ -0,0 +1,31 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
-->
|
||||
<FindBugsFilter>
|
||||
<And>
|
||||
<Class name="org.apache.hadoop.tools.rumen.LoggedJob"/>
|
||||
<Method name="getMapperTriesToSucceed"/>
|
||||
<Bug pattern="EI_EXPOSE_REP"/>
|
||||
<Bug code="EI"/>
|
||||
</And>
|
||||
<And>
|
||||
<Class name="org.apache.hadoop.tools.rumen.ZombieJob"/>
|
||||
<Method name="getInputSplits"/>
|
||||
<Bug pattern="EI_EXPOSE_REP"/>
|
||||
<Bug code="EI"/>
|
||||
</And>
|
||||
</FindBugsFilter>
|
|
@ -90,6 +90,16 @@
|
|||
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>findbugs-maven-plugin</artifactId>
|
||||
<configuration>
|
||||
<findbugsXmlOutput>true</findbugsXmlOutput>
|
||||
<xmlOutput>true</xmlOutput>
|
||||
<excludeFilterFile>${basedir}/dev-support/findbugs-exclude.xml</excludeFilterFile>
|
||||
<effort>Max</effort>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-antrun-plugin</artifactId>
|
||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.hadoop.tools.rumen;
|
|||
|
||||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
||||
import java.io.Serializable;
|
||||
import java.util.Comparator;
|
||||
import java.util.Iterator;
|
||||
import java.util.PriorityQueue;
|
||||
|
@ -59,7 +60,8 @@ public class DeskewedJobTraceReader implements Closeable {
|
|||
static final private Log LOG =
|
||||
LogFactory.getLog(DeskewedJobTraceReader.class);
|
||||
|
||||
static private class JobComparator implements Comparator<LoggedJob> {
|
||||
static private class JobComparator implements Comparator<LoggedJob>,
|
||||
Serializable {
|
||||
@Override
|
||||
public int compare(LoggedJob j1, LoggedJob j2) {
|
||||
return (j1.getSubmitTime() < j2.getSubmitTime()) ? -1 : (j1
|
||||
|
|
|
@ -17,6 +17,8 @@
|
|||
*/
|
||||
package org.apache.hadoop.tools.rumen;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.apache.hadoop.mapreduce.MRJobConfig;
|
||||
|
||||
public enum JobConfPropertyNames {
|
||||
|
@ -33,6 +35,6 @@ public enum JobConfPropertyNames {
|
|||
}
|
||||
|
||||
public String[] getCandidates() {
|
||||
return candidates;
|
||||
return Arrays.copyOf(candidates, candidates.length);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
*/
|
||||
package org.apache.hadoop.tools.rumen;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
|
@ -68,7 +69,8 @@ public class LoggedNetworkTopology implements DeepCompare {
|
|||
* order.
|
||||
*
|
||||
*/
|
||||
static class TopoSort implements Comparator<LoggedNetworkTopology> {
|
||||
static class TopoSort implements Comparator<LoggedNetworkTopology>,
|
||||
Serializable {
|
||||
public int compare(LoggedNetworkTopology t1, LoggedNetworkTopology t2) {
|
||||
return t1.name.getValue().compareTo(t2.name.getValue());
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.hadoop.tools.rumen;
|
|||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.Serializable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Comparator;
|
||||
|
@ -98,7 +99,7 @@ public class TraceBuilder extends Configured implements Tool {
|
|||
* history file names should result in the order of jobs' submission times.
|
||||
*/
|
||||
private static class HistoryLogsComparator
|
||||
implements Comparator<FileStatus> {
|
||||
implements Comparator<FileStatus>, Serializable {
|
||||
@Override
|
||||
public int compare(FileStatus file1, FileStatus file2) {
|
||||
return file1.getPath().getName().compareTo(
|
||||
|
|
|
@ -27,7 +27,7 @@ import org.apache.commons.lang.StringUtils;
|
|||
* //TODO There is no caching for saving memory.
|
||||
*/
|
||||
public class WordListAnonymizerUtility {
|
||||
public static final String[] KNOWN_WORDS =
|
||||
static final String[] KNOWN_WORDS =
|
||||
new String[] {"job", "tmp", "temp", "home", "homes", "usr", "user", "test"};
|
||||
|
||||
/**
|
||||
|
|
|
@ -93,16 +93,8 @@ public class NodeName implements AnonymizableDataType<String> {
|
|||
}
|
||||
|
||||
public NodeName(String rName, String hName) {
|
||||
rName = (rName == null)
|
||||
? rName
|
||||
: rName.length() == 0
|
||||
? null
|
||||
: rName;
|
||||
hName = (hName == null)
|
||||
? hName
|
||||
: hName.length() == 0
|
||||
? null
|
||||
: hName;
|
||||
rName = (rName == null || rName.length() == 0) ? null : rName;
|
||||
hName = (hName == null || hName.length() == 0) ? null : hName;
|
||||
if (hName == null) {
|
||||
nodeName = rName;
|
||||
rackName = rName;
|
||||
|
|
|
@ -0,0 +1,30 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
-->
|
||||
<FindBugsFilter>
|
||||
<Match>
|
||||
<Or>
|
||||
<Class name="org.apache.hadoop.streaming.PipeMapper" />
|
||||
<Class name="org.apache.hadoop.streaming.PipeReducer"/>
|
||||
</Or>
|
||||
<Or>
|
||||
<Method name="getFieldSeparator"/>
|
||||
<Method name="getInputSeparator"/>
|
||||
</Or>
|
||||
<Bug pattern="EI_EXPOSE_REP"/>
|
||||
</Match>
|
||||
</FindBugsFilter>
|
|
@ -96,6 +96,16 @@
|
|||
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>findbugs-maven-plugin</artifactId>
|
||||
<configuration>
|
||||
<findbugsXmlOutput>true</findbugsXmlOutput>
|
||||
<xmlOutput>true</xmlOutput>
|
||||
<excludeFilterFile>${basedir}/dev-support/findbugs-exclude.xml</excludeFilterFile>
|
||||
<effort>Max</effort>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-antrun-plugin</artifactId>
|
||||
|
|
|
@ -91,7 +91,7 @@ public class StreamJob implements Tool {
|
|||
@Deprecated
|
||||
public StreamJob(String[] argv, boolean mayExit) {
|
||||
this();
|
||||
argv_ = argv;
|
||||
argv_ = Arrays.copyOf(argv, argv.length);
|
||||
this.config_ = new Configuration();
|
||||
}
|
||||
|
||||
|
@ -113,7 +113,7 @@ public class StreamJob implements Tool {
|
|||
@Override
|
||||
public int run(String[] args) throws Exception {
|
||||
try {
|
||||
this.argv_ = args;
|
||||
this.argv_ = Arrays.copyOf(args, args.length);
|
||||
init();
|
||||
|
||||
preProcessArgs();
|
||||
|
@ -290,7 +290,7 @@ public class StreamJob implements Tool {
|
|||
LOG.warn("-file option is deprecated, please use generic option" +
|
||||
" -files instead.");
|
||||
|
||||
String fileList = null;
|
||||
StringBuffer fileList = new StringBuffer();
|
||||
for (String file : values) {
|
||||
packageFiles_.add(file);
|
||||
try {
|
||||
|
@ -298,13 +298,15 @@ public class StreamJob implements Tool {
|
|||
Path path = new Path(pathURI);
|
||||
FileSystem localFs = FileSystem.getLocal(config_);
|
||||
String finalPath = path.makeQualified(localFs).toString();
|
||||
fileList = fileList == null ? finalPath : fileList + "," + finalPath;
|
||||
if(fileList.length() > 0) {
|
||||
fileList.append(',');
|
||||
}
|
||||
fileList.append(finalPath);
|
||||
} catch (Exception e) {
|
||||
throw new IllegalArgumentException(e);
|
||||
}
|
||||
}
|
||||
config_.set("tmpfiles", config_.get("tmpfiles", "") +
|
||||
(fileList == null ? "" : fileList));
|
||||
config_.set("tmpfiles", config_.get("tmpfiles", "") + fileList);
|
||||
validate(packageFiles_);
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue