HADOOP-11007. Reinstate building of ant tasks support. Contributed by

Jason Lowe.
(cherry picked from commit 99d7a452be)
This commit is contained in:
Kihwal Lee 2014-10-07 16:23:52 -05:00
parent 9ff7c0c96c
commit 7910c6d035
10 changed files with 14 additions and 374 deletions

View File

@ -241,6 +241,8 @@ Release 2.6.0 - UNRELEASED
HADOOP-11153. Make number of KMS threads configurable. (wang) HADOOP-11153. Make number of KMS threads configurable. (wang)
HADOOP-11007. Reinstate building of ant tasks support. (jlowe via kihwal)
OPTIMIZATIONS OPTIMIZATIONS
HADOOP-10838. Byte array native checksumming. (James Thomas via todd) HADOOP-10838. Byte array native checksumming. (James Thomas via todd)

View File

@ -1,205 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.ant;
import java.io.ByteArrayOutputStream;
import java.io.OutputStream;
import java.io.PrintStream;
import java.util.LinkedList;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FsShell;
import org.apache.tools.ant.AntClassLoader;
import org.apache.tools.ant.BuildException;
import org.apache.tools.ant.Task;
import org.apache.hadoop.util.ToolRunner;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hdfs.HdfsConfiguration;
/**
* {@link org.apache.hadoop.fs.FsShell FsShell} wrapper for ant Task.
*/
@InterfaceAudience.Private
public class DfsTask extends Task {
/**
* Default sink for {@link java.lang.System.out System.out}
* and {@link java.lang.System.err System.err}.
*/
private static final OutputStream nullOut = new OutputStream() {
public void write(int b) { /* ignore */ }
public String toString() { return ""; }
};
private static final FsShell shell = new FsShell();
protected AntClassLoader confloader;
protected OutputStream out = nullOut;
protected OutputStream err = nullOut;
// set by ant
protected String cmd;
protected final LinkedList<String> argv = new LinkedList<String>();
protected String outprop;
protected String errprop;
protected boolean failonerror = true;
// saved ant context
private PrintStream antOut;
private PrintStream antErr;
/**
* Sets the command to run in {@link org.apache.hadoop.fs.FsShell FsShell}.
* @param cmd A valid command to FsShell, sans &quot;-&quot;.
*/
public void setCmd(String cmd) {
this.cmd = "-" + cmd.trim();
}
/**
* Sets the argument list from a String of comma-separated values.
* @param args A String of comma-separated arguments to FsShell.
*/
public void setArgs(String args) {
for (String s : args.trim().split("\\s*,\\s*"))
argv.add(s);
}
/**
* Sets the property into which System.out will be written.
* @param outprop The name of the property into which System.out is written.
* If the property is defined before this task is executed, it will not be updated.
*/
public void setOut(String outprop) {
this.outprop = outprop;
out = new ByteArrayOutputStream();
if (outprop.equals(errprop))
err = out;
}
/**
* Sets the property into which System.err will be written. If this property
* has the same name as the property for System.out, the two will be interlaced.
* @param errprop The name of the property into which System.err is written.
* If the property is defined before this task is executed, it will not be updated.
*/
public void setErr(String errprop) {
this.errprop = errprop;
err = (errprop.equals(outprop)) ? err = out : new ByteArrayOutputStream();
}
/**
* Sets the path for the parent-last ClassLoader, intended to be used for
* {@link org.apache.hadoop.conf.Configuration Configuration}.
* @param confpath The path to search for resources, classes, etc. before
* parent ClassLoaders.
*/
public void setConf(String confpath) {
confloader = new AntClassLoader(getClass().getClassLoader(), false);
confloader.setProject(getProject());
if (null != confpath)
confloader.addPathElement(confpath);
}
/**
* Sets a property controlling whether or not a
* {@link org.apache.tools.ant.BuildException BuildException} will be thrown
* if the command returns a value less than zero or throws an exception.
* @param failonerror If true, throw a BuildException on error.
*/
public void setFailonerror(boolean failonerror) {
this.failonerror = failonerror;
}
/**
* Save the current values of System.out, System.err and configure output
* streams for FsShell.
*/
protected void pushContext() {
antOut = System.out;
antErr = System.err;
System.setOut(new PrintStream(out));
System.setErr(out == err ? System.out : new PrintStream(err));
}
/**
* Create the appropriate output properties with their respective output,
* restore System.out, System.err and release any resources from created
* ClassLoaders to aid garbage collection.
*/
protected void popContext() {
// write output to property, if applicable
if (outprop != null && !System.out.checkError())
getProject().setNewProperty(outprop, out.toString());
if (out != err && errprop != null && !System.err.checkError())
getProject().setNewProperty(errprop, err.toString());
System.setErr(antErr);
System.setOut(antOut);
confloader.cleanup();
confloader.setParent(null);
}
// in case DfsTask is overridden
protected int postCmd(int exit_code) {
if ("-test".equals(cmd) && exit_code != 0)
outprop = null;
return exit_code;
}
/**
* Invoke {@link org.apache.hadoop.fs.FsShell#doMain FsShell.doMain} after a
* few cursory checks of the configuration.
*/
public void execute() throws BuildException {
if (null == cmd)
throw new BuildException("Missing command (cmd) argument");
argv.add(0, cmd);
if (null == confloader) {
setConf(getProject().getProperty("hadoop.conf.dir"));
}
int exit_code = 0;
try {
pushContext();
Configuration conf = new HdfsConfiguration();
conf.setClassLoader(confloader);
exit_code = ToolRunner.run(conf, shell,
argv.toArray(new String[argv.size()]));
exit_code = postCmd(exit_code);
if (0 > exit_code) {
StringBuilder msg = new StringBuilder();
for (String s : argv)
msg.append(s + " ");
msg.append("failed: " + exit_code);
throw new Exception(msg.toString());
}
} catch (Exception e) {
if (failonerror)
throw new BuildException(e);
} finally {
popContext();
}
}
}

View File

@ -1,29 +0,0 @@
<?xml version="1.0"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<antlib>
<taskdef name="hdfs"
classname="org.apache.hadoop.ant.DfsTask" />
<taskdef name="exists"
classname="org.apache.hadoop.ant.condition.DfsExists" />
<taskdef name="isdir"
classname="org.apache.hadoop.ant.condition.DfsIsDir" />
<taskdef name="sizezero"
classname="org.apache.hadoop.ant.condition.DfsZeroLen" />
</antlib>

View File

@ -1,68 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.ant.condition;
import org.apache.tools.ant.taskdefs.condition.Condition;
/**
* This wrapper around {@link org.apache.hadoop.ant.DfsTask} implements the
* Ant &gt;1.5
* {@link org.apache.tools.ant.taskdefs.condition.Condition Condition}
* interface for HDFS tests. So one can test conditions like this:
* {@code
* <condition property="precond">
* <and>
* <hadoop:exists file="fileA" />
* <hadoop:exists file="fileB" />
* <hadoop:sizezero file="fileB" />
* </and>
* </condition>
* }
* This will define the property precond if fileA exists and fileB has zero
* length.
*/
public abstract class DfsBaseConditional extends org.apache.hadoop.ant.DfsTask
implements Condition {
protected boolean result;
String file;
private void initArgs() {
setCmd("test");
setArgs("-" + getFlag() + "," + file);
}
public void setFile(String file) {
this.file = file;
}
protected abstract char getFlag();
protected int postCmd(int exit_code) {
exit_code = super.postCmd(exit_code);
result = exit_code == 0;
return exit_code;
}
public boolean eval() {
initArgs();
execute();
return result;
}
}

View File

@ -1,24 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.ant.condition;
public class DfsExists extends DfsBaseConditional {
protected final char flag = 'e';
protected char getFlag() { return flag; }
}

View File

@ -1,24 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.ant.condition;
public class DfsIsDir extends DfsBaseConditional {
protected final char flag = 'd';
protected char getFlag() { return flag; }
}

View File

@ -1,24 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.ant.condition;
public class DfsZeroLen extends DfsBaseConditional {
protected final char flag = 'z';
protected char getFlag() { return flag; }
}

View File

@ -297,6 +297,11 @@
<artifactId>hadoop-extras</artifactId> <artifactId>hadoop-extras</artifactId>
<version>${project.version}</version> <version>${project.version}</version>
</dependency> </dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-ant</artifactId>
<version>${project.version}</version>
</dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>

View File

@ -94,6 +94,12 @@
<artifactId>hadoop-sls</artifactId> <artifactId>hadoop-sls</artifactId>
<scope>compile</scope> <scope>compile</scope>
</dependency> </dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-ant</artifactId>
<version>${project.version}</version>
<scope>compile</scope>
</dependency>
</dependencies> </dependencies>
<build> <build>

View File

@ -37,6 +37,7 @@
<module>hadoop-rumen</module> <module>hadoop-rumen</module>
<module>hadoop-gridmix</module> <module>hadoop-gridmix</module>
<module>hadoop-datajoin</module> <module>hadoop-datajoin</module>
<module>hadoop-ant</module>
<module>hadoop-tools-dist</module> <module>hadoop-tools-dist</module>
<module>hadoop-extras</module> <module>hadoop-extras</module>
<module>hadoop-pipes</module> <module>hadoop-pipes</module>