YARN-5690. Integrate native services modules into maven build. Contributed by Billie Rinaldi

This commit is contained in:
Gour Saha 2016-10-27 08:50:36 -07:00 committed by Jian He
parent ef5a3628c2
commit 09e4b9e897
14 changed files with 343 additions and 42 deletions

View File

@ -86,6 +86,32 @@
<include>*-sources.jar</include>
</includes>
</fileSet>
<fileSet>
<directory>hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/target</directory>
<outputDirectory>/share/hadoop/${hadoop.component}/sources</outputDirectory>
<includes>
<include>*-sources.jar</include>
</includes>
</fileSet>
<fileSet>
<directory>hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/conf</directory>
<outputDirectory>etc/hadoop</outputDirectory>
</fileSet>
<fileSet>
<directory>hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/target/hadoop-yarn-slider-core-${project.version}</directory>
<outputDirectory>/share/hadoop/${hadoop.component}/lib/slider</outputDirectory>
</fileSet>
<fileSet>
<directory>hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services-api/target</directory>
<outputDirectory>/share/hadoop/${hadoop.component}/sources</outputDirectory>
<includes>
<include>*-sources.jar</include>
</includes>
</fileSet>
<fileSet>
<directory>hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services-api/target/hadoop-yarn-services-api-${project.version}</directory>
<outputDirectory>/share/hadoop/${hadoop.component}/lib/services-api</outputDirectory>
</fileSet>
<fileSet>
<directory>hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-unmanaged-am-launcher/target</directory>
<outputDirectory>/share/hadoop/${hadoop.component}/sources</outputDirectory>

View File

@ -0,0 +1,36 @@
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.01
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0 http://maven.apache.org/xsd/assembly-1.1.0.xsd">
<id>hadoop-yarn-services-api-dist</id>
<formats>
<format>dir</format>
</formats>
<includeBaseDirectory>false</includeBaseDirectory>
<dependencySets>
<dependencySet>
<useProjectArtifact>false</useProjectArtifact>
<includes>
<include>com.fasterxml.jackson.jaxrs:jackson-jaxrs-base</include>
<include>com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider</include>
<include>com.fasterxml.jackson.module:jackson-module-jaxb-annotations</include>
<include>io.swagger:swagger-annotations</include>
</includes>
</dependencySet>
</dependencySets>
</assembly>

View File

@ -0,0 +1,30 @@
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.01
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0 http://maven.apache.org/xsd/assembly-1.1.0.xsd">
<id>hadoop-yarn-slider-dist</id>
<formats>
<format>dir</format>
</formats>
<includeBaseDirectory>false</includeBaseDirectory>
<dependencySets>
<dependencySet>
<useProjectArtifact>false</useProjectArtifact>
</dependencySet>
</dependencySets>
</assembly>

View File

@ -425,6 +425,12 @@
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-yarn-slider-core</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-jobclient</artifactId>

View File

@ -49,7 +49,9 @@ function hadoop_usage
hadoop_add_subcommand "router" daemon "run the Router daemon"
hadoop_add_subcommand "schedulerconf" client "Updates scheduler configuration"
hadoop_add_subcommand "scmadmin" admin "SharedCacheManager admin tools"
hadoop_add_subcommand "services-api" "run slider services api"
hadoop_add_subcommand "sharedcachemanager" daemon "run the SharedCacheManager daemon"
hadoop_add_subcommand "slider" "run a slider app"
hadoop_add_subcommand "timelinereader" client "run the timeline reader server"
hadoop_add_subcommand "timelineserver" daemon "run the timeline server"
hadoop_add_subcommand "top" client "view cluster information"
@ -149,10 +151,38 @@ function yarncmd_case
scmadmin)
HADOOP_CLASSNAME='org.apache.hadoop.yarn.client.SCMAdmin'
;;
services-api)
HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
hadoop_add_classpath "${HADOOP_YARN_HOME}/${YARN_LIB_JARS_DIR}/slider"'/*'
hadoop_add_classpath "${HADOOP_YARN_HOME}/${YARN_LIB_JARS_DIR}/services-api"'/*'
HADOOP_CLASSNAME='org.apache.hadoop.yarn.services.webapp.ApplicationApiWebApp'
hadoop_debug "Append YARN_CLIENT_OPTS onto HADOOP_OPTS"
HADOOP_OPTS="${HADOOP_OPTS} ${YARN_CLIENT_OPTS} \
-Dslider.libdir=${HADOOP_YARN_HOME}/${YARN_DIR},\
${HADOOP_YARN_HOME}/${YARN_LIB_JARS_DIR},\
${HADOOP_YARN_HOME}/${YARN_LIB_JARS_DIR}/slider,\
${HADOOP_HDFS_HOME}/${HDFS_DIR},\
${HADOOP_HDFS_HOME}/${HDFS_LIB_JARS_DIR},\
${HADOOP_COMMON_HOME}/${HADOOP_COMMON_DIR},\
${HADOOP_COMMON_HOME}/${HADOOP_COMMON_LIB_JARS_DIR}"
;;
sharedcachemanager)
HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
HADOOP_CLASSNAME='org.apache.hadoop.yarn.server.sharedcachemanager.SharedCacheManager'
;;
slider)
hadoop_add_classpath "${HADOOP_YARN_HOME}/${YARN_LIB_JARS_DIR}/slider"'/*'
HADOOP_CLASSNAME='org.apache.slider.Slider'
hadoop_debug "Append YARN_CLIENT_OPTS onto HADOOP_OPTS"
HADOOP_OPTS="${HADOOP_OPTS} ${YARN_CLIENT_OPTS} \
-Dslider.libdir=${HADOOP_YARN_HOME}/${YARN_DIR},\
${HADOOP_YARN_HOME}/${YARN_LIB_JARS_DIR},\
${HADOOP_YARN_HOME}/${YARN_LIB_JARS_DIR}/slider,\
${HADOOP_HDFS_HOME}/${HDFS_DIR},\
${HADOOP_HDFS_HOME}/${HDFS_LIB_JARS_DIR},\
${HADOOP_COMMON_HOME}/${HADOOP_COMMON_DIR},\
${HADOOP_COMMON_HOME}/${HADOOP_COMMON_LIB_JARS_DIR}"
;;
timelinereader)
HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
HADOOP_CLASSNAME='org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderServer'

View File

@ -159,12 +159,6 @@
<dependency>
<groupId>com.fasterxml.jackson.jaxrs</groupId>
<artifactId>jackson-jaxrs-json-provider</artifactId>
<exclusions>
<exclusion>
<groupId>com.fasterxml.jackson.jaxrs</groupId>
<artifactId>jackson-jaxrs-base</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
@ -192,6 +186,44 @@
</dependencies>
<profiles>
<profile>
<id>dist</id>
<activation>
<activeByDefault>false</activeByDefault>
</activation>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-assemblies</artifactId>
<version>${project.version}</version>
</dependency>
</dependencies>
<executions>
<execution>
<id>dist</id>
<phase>prepare-package</phase>
<goals>
<goal>single</goal>
</goals>
<configuration>
<appendAssemblyId>false</appendAssemblyId>
<attach>false</attach>
<finalName>${project.artifactId}-${project.version}</finalName>
<descriptorRefs>
<descriptorRef>hadoop-yarn-services-api</descriptorRef>
</descriptorRefs>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<profile>
<id>rat</id>

View File

@ -0,0 +1,68 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This is the log4j configuration for Slider Application Master
# Log rotation based on size (100KB) with a max of 10 backup files
log4j.rootLogger=INFO, amlog
log4j.threshhold=ALL
log4j.appender.amlog=org.apache.log4j.RollingFileAppender
log4j.appender.amlog.layout=org.apache.log4j.PatternLayout
log4j.appender.amlog.File=${LOG_DIR}/slider.log
log4j.appender.amlog.MaxFileSize=1MB
log4j.appender.amlog.MaxBackupIndex=10
# log layout skips stack-trace creation operations by avoiding line numbers and method
log4j.appender.amlog.layout.ConversionPattern=%d{ISO8601} [%t] %-5p %c{2} - %m%n
# debug edition is much more expensive
#log4j.appender.amlog.layout.ConversionPattern=%d{ISO8601} [%t] %-5p %c{2} (%F:%M(%L)) - %m%n
# configure stderr
# set the conversion pattern of stderr
# Print the date in ISO 8601 format
log4j.appender.stderr=org.apache.log4j.ConsoleAppender
log4j.appender.stderr.Target=System.err
log4j.appender.stderr.layout=org.apache.log4j.PatternLayout
log4j.appender.stderr.layout.ConversionPattern=%d{ISO8601} [%t] %-5p %c{2} - %m%n
log4j.appender.subprocess=org.apache.log4j.ConsoleAppender
log4j.appender.subprocess.layout=org.apache.log4j.PatternLayout
log4j.appender.subprocess.layout.ConversionPattern=[%c{1}]: %m%n
#log4j.logger.org.apache.slider.yarn.appmaster.SliderAppMasterer.master=INFO,subprocess
# for debugging Slider
#log4j.logger.org.apache.slider=DEBUG
# uncomment to debug service lifecycle issues
#log4j.logger.org.apache.hadoop.yarn.service.launcher=DEBUG
#log4j.logger.org.apache.hadoop.yarn.service=DEBUG
# uncomment for YARN operations
#log4j.logger.org.apache.hadoop.yarn.client=DEBUG
# uncomment this to debug security problems
#log4j.logger.org.apache.hadoop.security=DEBUG
#crank back on some noise
log4j.logger.org.apache.hadoop.util.NativeCodeLoader=ERROR
log4j.logger.org.apache.hadoop.hdfs=WARN
log4j.logger.org.apache.hadoop.hdfs.shortcircuit=ERROR
log4j.logger.org.apache.zookeeper=WARN
log4j.logger.org.apache.curator.framework.state=ERROR
log4j.logger.org.apache.curator.framework.imps=WARN

View File

@ -331,6 +331,44 @@
<profiles>
<profile>
<id>dist</id>
<activation>
<activeByDefault>false</activeByDefault>
</activation>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-assemblies</artifactId>
<version>${project.version}</version>
</dependency>
</dependencies>
<executions>
<execution>
<id>dist</id>
<phase>prepare-package</phase>
<goals>
<goal>single</goal>
</goals>
<configuration>
<appendAssemblyId>false</appendAssemblyId>
<attach>false</attach>
<finalName>${project.artifactId}-${project.version}</finalName>
<descriptorRefs>
<descriptorRef>hadoop-yarn-slider-dist</descriptorRef>
</descriptorRefs>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<profile>
<id>compile-protobuf</id>
<build>

View File

@ -2162,6 +2162,24 @@ protected AppMasterLauncher setupAppMasterLauncher(String clustername,
new File(confDir, SliderKeys.LOG4J_SERVER_PROP_FILENAME);
hasServerLog4jProperties = log4jserver.isFile();
}
if (!hasServerLog4jProperties) {
// check for log4j properties in hadoop conf dir
String hadoopConfDir = System.getenv(ApplicationConstants.Environment
.HADOOP_CONF_DIR.name());
if (hadoopConfDir != null) {
File localFile = new File(hadoopConfDir, SliderKeys
.LOG4J_SERVER_PROP_FILENAME);
if (localFile.exists()) {
Path localFilePath = createLocalPath(localFile);
remoteConfPath = new Path(clusterDirectory,
SliderKeys.SUBMITTED_CONF_DIR);
Path remoteFilePath = new Path(remoteConfPath, SliderKeys
.LOG4J_SERVER_PROP_FILENAME);
copy(config, localFilePath, remoteFilePath);
hasServerLog4jProperties = true;
}
}
}
// the assumption here is that minimr cluster => this is a test run
// and the classpath can look after itself
@ -2300,7 +2318,7 @@ protected AppMasterLauncher setupAppMasterLauncher(String clustername,
// enable asserts
commandLine.enableJavaAssertions();
// if the conf dir has a log4j-server.properties, switch to that
// if the conf dir has a slideram-log4j.properties, switch to that
if (hasServerLog4jProperties) {
commandLine.sysprop(SYSPROP_LOG4J_CONFIGURATION, LOG4J_SERVER_PROP_FILENAME);
commandLine.sysprop(SYSPROP_LOG_DIR, ApplicationConstants.LOG_DIR_EXPANSION_VAR);
@ -4471,14 +4489,13 @@ public int actionDependency(ActionDependencyArgs args) throws IOException,
return EXIT_SUCCESS;
}
String libDir = System.getProperty(SliderKeys.PROPERTY_LIB_DIR);
if (isSet(libDir)) {
File srcFolder = new File(libDir);
String[] libDirs = SliderUtils.getLibDirs();
if (libDirs.length > 0) {
File tempLibTarGzipFile = File.createTempFile(
SliderKeys.SLIDER_DEPENDENCY_TAR_GZ_FILE_NAME + "_",
SliderKeys.SLIDER_DEPENDENCY_TAR_GZ_FILE_EXT);
// copy all jars except slider-core-<version>.jar
tarGzipFolder(srcFolder, tempLibTarGzipFile, createJarFilter());
// copy all jars
tarGzipFolder(libDirs, tempLibTarGzipFile, createJarFilter());
log.info("Uploading dependency for AM (version {}) from {} to {}",
version, tempLibTarGzipFile.toURI(), dependencyLibTarGzip.toUri());

View File

@ -182,7 +182,7 @@ public interface SliderKeys extends SliderXmlConfKeys {
/**
* Slider AM log4j file name : {@value}
*/
String LOG4J_SERVER_PROP_FILENAME = "log4j-server.properties";
String LOG4J_SERVER_PROP_FILENAME = "slideram-log4j.properties";
/**
* Standard log4j file name : {@value}

View File

@ -25,6 +25,7 @@
import org.apache.commons.compress.archivers.zip.ZipArchiveEntry;
import org.apache.commons.compress.archivers.zip.ZipArchiveInputStream;
import org.apache.commons.io.output.ByteArrayOutputStream;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
@ -1399,6 +1400,22 @@ public static void forceLogin() throws IOException {
}
}
public static String getLibDir() {
String[] libDirs = getLibDirs();
if (libDirs == null || libDirs.length == 0) {
return null;
}
return libDirs[0];
}
public static String[] getLibDirs() {
String libDirStr = System.getProperty(SliderKeys.PROPERTY_LIB_DIR);
if (isUnset(libDirStr)) {
return ArrayUtils.EMPTY_STRING_ARRAY;
}
return StringUtils.split(libDirStr, ',');
}
/**
* Submit a JAR containing a specific class and map it
* @param providerResources provider map to build up
@ -1962,31 +1979,34 @@ public static void zipFolder(File srcFolder, File zipFile) throws IOException {
/**
* Given a source folder create a tar.gz file
*
* @param srcFolder
* @param libDirs
* @param tarGzipFile
*
* @throws IOException
*/
public static void tarGzipFolder(File srcFolder, File tarGzipFile,
public static void tarGzipFolder(String[] libDirs, File tarGzipFile,
FilenameFilter filter) throws IOException {
log.info("Tar-gzipping folder {} to {}", srcFolder.getAbsolutePath(),
log.info("Tar-gzipping folders {} to {}", libDirs,
tarGzipFile.getAbsolutePath());
List<String> files = new ArrayList<>();
generateFileList(files, srcFolder, srcFolder, true, filter);
try(TarArchiveOutputStream taos =
new TarArchiveOutputStream(new GZIPOutputStream(
new BufferedOutputStream(new FileOutputStream(tarGzipFile))))) {
for (String file : files) {
File srcFile = new File(srcFolder, file);
TarArchiveEntry tarEntry = new TarArchiveEntry(
srcFile, file);
taos.putArchiveEntry(tarEntry);
try(FileInputStream in = new FileInputStream(srcFile)) {
org.apache.commons.io.IOUtils.copy(in, taos);
for (String libDir : libDirs) {
File srcFolder = new File(libDir);
List<String> files = new ArrayList<>();
generateFileList(files, srcFolder, srcFolder, true, filter);
for (String file : files) {
File srcFile = new File(srcFolder, file);
TarArchiveEntry tarEntry = new TarArchiveEntry(
srcFile, file);
taos.putArchiveEntry(tarEntry);
try(FileInputStream in = new FileInputStream(srcFile)) {
org.apache.commons.io.IOUtils.copy(in, taos);
}
taos.flush();
taos.closeArchiveEntry();
}
taos.flush();
taos.closeArchiveEntry();
}
}
}

View File

@ -562,8 +562,7 @@ protected void runCommand(
}
private void expandAgentTar(File agentPkgDir) throws IOException {
String libDirProp =
System.getProperty(PROPERTY_LIB_DIR);
String libDirProp = SliderUtils.getLibDir();
File tarFile = new File(libDirProp, AGENT_TAR);
expandTar(tarFile, agentPkgDir);
}

View File

@ -184,19 +184,20 @@ public void prepareAMAndConfigForLaunch(SliderFileSystem fileSystem,
libdir,
miniClusterTestRun);
String libDirProp =
System.getProperty(SliderKeys.PROPERTY_LIB_DIR);
log.info("Loading all dependencies for AM.");
// If slider.tar.gz is available in hdfs use it, else upload all jars
Path dependencyLibTarGzip = fileSystem.getDependencyTarGzip();
if (fileSystem.isFile(dependencyLibTarGzip)) {
SliderUtils.putAmTarGzipAndUpdate(providerResources, fileSystem);
} else {
ProviderUtils.addAllDependencyJars(providerResources,
fileSystem,
tempPath,
libdir,
libDirProp);
for (String libDirProp : SliderUtils.getLibDirs()) {
ProviderUtils.addAllDependencyJars(providerResources,
fileSystem,
tempPath,
libdir,
libDirProp);
}
}
addKeytabResourceIfNecessary(fileSystem,
instanceDescription,

View File

@ -82,16 +82,14 @@ public void testXml() throws IOException {
String output = configurationOutputter.asString().replaceAll("( |\\r|\\n)",
"");
assert output.contains(
"<configuration><property><name>key1</name><value>val1</value><source/></property></configuration>");
assert output.contains("<name>key1</name><value>val1</value>");
File file = tmpDir.newFile();
configurationOutputter.save(file);
assert FileUtils.readFileToString(file, Charsets.UTF_8)
.replaceAll("( |\\r|\\n)", "")
.contains(
"<configuration><property><name>key1</name><value>val1</value><source/></property></configuration>");
.contains("<name>key1</name><value>val1</value>");
}
@Test
@ -103,14 +101,14 @@ public void testHadoopXml() throws IOException {
String output = configurationOutputter.asString().replaceAll("( |\\r|\\n)",
"");
assert output.contains("<configuration><property><name>key1</name><value>val1</value><source/></property></configuration>");
assert output.contains("<name>key1</name><value>val1</value>");
File file = tmpDir.newFile();
configurationOutputter.save(file);
assert FileUtils.readFileToString(file, Charsets.UTF_8)
.replaceAll("( |\\r|\\n)", "")
.contains( "<configuration><property><name>key1</name><value>val1</value><source/></property></configuration>");
.contains("<name>key1</name><value>val1</value>");
}
@Test