Merge -r 1213907:1213908 and 1213910:1213911 from trunk to branch. FIXES: HADOOP-7810

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-0.23@1213915 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Alejandro Abdelnur 2011-12-13 20:29:06 +00:00
parent abd9145994
commit 771a5e7dce
7 changed files with 145 additions and 0 deletions

View File

@ -89,6 +89,8 @@ Release 0.23.1 - Unreleased
HADOOP-7914. Remove the duplicated declaration of hadoop-hdfs test-jar in HADOOP-7914. Remove the duplicated declaration of hadoop-hdfs test-jar in
hadoop-project/pom.xml. (szetszwo) hadoop-project/pom.xml. (szetszwo)
HADOOP-7810 move hadoop archive to core from tools. (tucu)
Release 0.23.0 - 2011-11-01 Release 0.23.0 - 2011-11-01
INCOMPATIBLE CHANGES INCOMPATIBLE CHANGES

View File

@ -0,0 +1,126 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. See accompanying LICENSE file.
-->
<project>
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-project</artifactId>
<version>0.23.1-SNAPSHOT</version>
<relativePath>../../hadoop-project</relativePath>
</parent>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-archives</artifactId>
<version>0.23.1-SNAPSHOT</version>
<description>Apache Hadoop Archives</description>
<name>Apache Hadoop Archives</name>
<packaging>jar</packaging>
<properties>
<hadoop.log.dir>${project.build.directory}/log</hadoop.log.dir>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-annotations</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-hs</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-core</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-jobclient</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-jobclient</artifactId>
<scope>test</scope>
<type>test-jar</type>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<scope>test</scope>
<type>test-jar</type>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<scope>test</scope>
<type>test-jar</type>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-yarn-server-tests</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
<executions>
<execution>
<id>create-log-dir</id>
<phase>process-test-resources</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<target>
<delete dir="${test.build.data}"/>
<mkdir dir="${test.build.data}"/>
<mkdir dir="${hadoop.log.dir}"/>
</target>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<configuration>
<archive>
<manifest>
<mainClass>org.apache.hadoop.tools.HadoopArchives</mainClass>
</manifest>
</archive>
</configuration>
</plugin>
</plugins>
</build>
</project>

View File

@ -111,6 +111,14 @@ public class HadoopArchives implements Tool {
} else { } else {
this.conf = new JobConf(conf, HadoopArchives.class); this.conf = new JobConf(conf, HadoopArchives.class);
} }
// This is for test purposes since MR2, different from Streaming
// here it is not possible to add a JAR to the classpath the tool
// will when running the mapreduce job.
String testJar = System.getProperty(TEST_HADOOP_ARCHIVES_JAR_PATH, null);
if (testJar != null) {
((JobConf)conf).setJar(testJar);
}
} }
public Configuration getConf() { public Configuration getConf() {
@ -868,9 +876,12 @@ public class HadoopArchives implements Tool {
return 0; return 0;
} }
static final String TEST_HADOOP_ARCHIVES_JAR_PATH = "test.hadoop.archives.jar";
/** the main functions **/ /** the main functions **/
public static void main(String[] args) { public static void main(String[] args) {
JobConf job = new JobConf(HadoopArchives.class); JobConf job = new JobConf(HadoopArchives.class);
HadoopArchives harchives = new HadoopArchives(job); HadoopArchives harchives = new HadoopArchives(job);
int ret = 0; int ret = 0;

View File

@ -39,6 +39,7 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.mapred.MiniMRCluster; import org.apache.hadoop.mapred.MiniMRCluster;
import org.apache.hadoop.util.JarFinder;
import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.util.ToolRunner;
import org.apache.log4j.Level; import org.apache.log4j.Level;
@ -46,6 +47,9 @@ import org.apache.log4j.Level;
* test {@link HadoopArchives} * test {@link HadoopArchives}
*/ */
public class TestHadoopArchives extends TestCase { public class TestHadoopArchives extends TestCase {
public static final String HADOOP_ARCHIVES_JAR = JarFinder.getJar(HadoopArchives.class);
{ {
((Log4JLogger)LogFactory.getLog(org.apache.hadoop.security.Groups.class) ((Log4JLogger)LogFactory.getLog(org.apache.hadoop.security.Groups.class)
).getLogger().setLevel(Level.OFF); ).getLogger().setLevel(Level.OFF);
@ -136,6 +140,7 @@ public class TestHadoopArchives extends TestCase {
"*", "*",
archivePath.toString() archivePath.toString()
}; };
System.setProperty(HadoopArchives.TEST_HADOOP_ARCHIVES_JAR_PATH, HADOOP_ARCHIVES_JAR);
final HadoopArchives har = new HadoopArchives(mapred.createJobConf()); final HadoopArchives har = new HadoopArchives(mapred.createJobConf());
assertEquals(0, ToolRunner.run(har, args)); assertEquals(0, ToolRunner.run(har, args));

View File

@ -29,6 +29,7 @@
<modules> <modules>
<module>hadoop-streaming</module> <module>hadoop-streaming</module>
<module>hadoop-archives</module>
</modules> </modules>
<build> <build>