YARN-7113. Clean up packaging and dependencies for yarn-native-services. Contributed by Billie Rinaldi
This commit is contained in:
parent
40ab068eab
commit
fecf22b2fd
14
NOTICE.txt
14
NOTICE.txt
|
@ -581,3 +581,17 @@ The binary distribution of this product bundles binaries of
|
|||
Ehcache 3.3.1,
|
||||
which has the following notices:
|
||||
* Ehcache V3 Copyright 2014-2016 Terracotta, Inc.
|
||||
|
||||
JCommander (https://github.com/cbeust/jcommander),
|
||||
which has the following notices:
|
||||
* Copyright 2010 Cedric Beust cedric@beust.com
|
||||
|
||||
The binary distribution of this product bundles binaries of
|
||||
snakeyaml (https://bitbucket.org/asomov/snakeyaml),
|
||||
which has the following notices:
|
||||
* Copyright (c) 2008, http://www.snakeyaml.org
|
||||
|
||||
The binary distribution of this product bundles binaries of
|
||||
swagger-annotations (https://github.com/swagger-api/swagger-core),
|
||||
which has the following notices:
|
||||
* Copyright 2016 SmartBear Software
|
||||
|
|
|
@ -97,10 +97,6 @@
|
|||
<directory>hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/conf</directory>
|
||||
<outputDirectory>etc/hadoop</outputDirectory>
|
||||
</fileSet>
|
||||
<fileSet>
|
||||
<directory>hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/target/hadoop-yarn-services-core-${project.version}</directory>
|
||||
<outputDirectory>/share/hadoop/${hadoop.component}/lib/services</outputDirectory>
|
||||
</fileSet>
|
||||
<fileSet>
|
||||
<directory>hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services-api/target</directory>
|
||||
<outputDirectory>/share/hadoop/${hadoop.component}/sources</outputDirectory>
|
||||
|
@ -108,10 +104,6 @@
|
|||
<include>*-sources.jar</include>
|
||||
</includes>
|
||||
</fileSet>
|
||||
<fileSet>
|
||||
<directory>hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services-api/target/hadoop-yarn-services-api-${project.version}</directory>
|
||||
<outputDirectory>/share/hadoop/${hadoop.component}/lib/services-api</outputDirectory>
|
||||
</fileSet>
|
||||
<fileSet>
|
||||
<directory>hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-unmanaged-am-launcher/target</directory>
|
||||
<outputDirectory>/share/hadoop/${hadoop.component}/sources</outputDirectory>
|
||||
|
|
|
@ -1,36 +0,0 @@
|
|||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.01
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
-->
|
||||
<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0 http://maven.apache.org/xsd/assembly-1.1.0.xsd">
|
||||
<id>hadoop-yarn-services-api-dist</id>
|
||||
<formats>
|
||||
<format>dir</format>
|
||||
</formats>
|
||||
<includeBaseDirectory>false</includeBaseDirectory>
|
||||
<dependencySets>
|
||||
<dependencySet>
|
||||
<useProjectArtifact>false</useProjectArtifact>
|
||||
<includes>
|
||||
<include>com.fasterxml.jackson.jaxrs:jackson-jaxrs-base</include>
|
||||
<include>com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider</include>
|
||||
<include>com.fasterxml.jackson.module:jackson-module-jaxb-annotations</include>
|
||||
<include>io.swagger:swagger-annotations</include>
|
||||
</includes>
|
||||
</dependencySet>
|
||||
</dependencySets>
|
||||
</assembly>
|
|
@ -1,30 +0,0 @@
|
|||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.01
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
-->
|
||||
<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0 http://maven.apache.org/xsd/assembly-1.1.0.xsd">
|
||||
<id>hadoop-yarn-services-dist</id>
|
||||
<formats>
|
||||
<format>dir</format>
|
||||
</formats>
|
||||
<includeBaseDirectory>false</includeBaseDirectory>
|
||||
<dependencySets>
|
||||
<dependencySet>
|
||||
<useProjectArtifact>false</useProjectArtifact>
|
||||
</dependencySet>
|
||||
</dependencySets>
|
||||
</assembly>
|
|
@ -145,7 +145,8 @@
|
|||
<declared.hadoop.version>${project.version}</declared.hadoop.version>
|
||||
|
||||
<swagger-annotations-version>1.5.4</swagger-annotations-version>
|
||||
<maven-doxia-module-markdown.version>1.4</maven-doxia-module-markdown.version>
|
||||
<snakeyaml.version>1.16</snakeyaml.version>
|
||||
<jcommander.version>1.30</jcommander.version>
|
||||
</properties>
|
||||
|
||||
<dependencyManagement>
|
||||
|
@ -607,6 +608,11 @@
|
|||
<artifactId>javax.servlet-api</artifactId>
|
||||
<version>3.1.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>javax.ws.rs</groupId>
|
||||
<artifactId>jsr311-api</artifactId>
|
||||
<version>1.1.1</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-server</artifactId>
|
||||
|
@ -1343,9 +1349,14 @@
|
|||
<version>${jackson2.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.maven.doxia</groupId>
|
||||
<artifactId>doxia-module-markdown</artifactId>
|
||||
<version>${maven-doxia-module-markdown.version}</version>
|
||||
<groupId>org.yaml</groupId>
|
||||
<artifactId>snakeyaml</artifactId>
|
||||
<version>${snakeyaml.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.beust</groupId>
|
||||
<artifactId>jcommander</artifactId>
|
||||
<version>${jcommander.version}</version>
|
||||
</dependency>
|
||||
|
||||
</dependencies>
|
||||
|
|
|
@ -31,6 +31,7 @@ function hadoop_usage
|
|||
hadoop_add_option "--hosts filename" "list of hosts to use in worker mode"
|
||||
hadoop_add_option "--workers" "turn on worker mode"
|
||||
|
||||
hadoop_add_subcommand "apiserver" "run yarn-native-service rest server"
|
||||
hadoop_add_subcommand "application" client "prints application(s) report/kill application"
|
||||
hadoop_add_subcommand "applicationattempt" client "prints applicationattempt(s) report"
|
||||
hadoop_add_subcommand "classpath" client "prints the class path needed to get the hadoop jar and the required libraries"
|
||||
|
@ -52,6 +53,7 @@ function hadoop_usage
|
|||
hadoop_add_subcommand "apiserver" "run yarn-native-service rest server"
|
||||
hadoop_add_subcommand "sharedcachemanager" daemon "run the SharedCacheManager daemon"
|
||||
hadoop_add_subcommand "service" "run a service"
|
||||
hadoop_add_subcommand "sharedcachemanager" admin "run the SharedCacheManager daemon"
|
||||
hadoop_add_subcommand "timelinereader" client "run the timeline reader server"
|
||||
hadoop_add_subcommand "timelineserver" daemon "run the timeline server"
|
||||
hadoop_add_subcommand "top" client "view cluster information"
|
||||
|
@ -70,6 +72,18 @@ function yarncmd_case
|
|||
shift
|
||||
|
||||
case ${subcmd} in
|
||||
apiserver)
|
||||
HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
|
||||
HADOOP_CLASSNAME='org.apache.hadoop.yarn.service.webapp.ApiServerWebApp'
|
||||
local sld="${HADOOP_YARN_HOME}/${YARN_DIR},\
|
||||
${HADOOP_YARN_HOME}/${YARN_LIB_JARS_DIR},\
|
||||
${HADOOP_HDFS_HOME}/${HDFS_DIR},\
|
||||
${HADOOP_HDFS_HOME}/${HDFS_LIB_JARS_DIR},\
|
||||
${HADOOP_COMMON_HOME}/${HADOOP_COMMON_DIR},\
|
||||
${HADOOP_COMMON_HOME}/${HADOOP_COMMON_LIB_JARS_DIR}"
|
||||
hadoop_translate_cygwin_path sld
|
||||
hadoop_add_param HADOOP_OPTS service.libdir "-Dservice.libdir=${sld}"
|
||||
;;
|
||||
application|applicationattempt|container)
|
||||
HADOOP_CLASSNAME=org.apache.hadoop.yarn.client.cli.ApplicationCLI
|
||||
set -- "${subcmd}" "$@"
|
||||
|
@ -151,14 +165,10 @@ function yarncmd_case
|
|||
scmadmin)
|
||||
HADOOP_CLASSNAME='org.apache.hadoop.yarn.client.SCMAdmin'
|
||||
;;
|
||||
apiserver)
|
||||
HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
|
||||
hadoop_add_classpath "${HADOOP_YARN_HOME}/${YARN_LIB_JARS_DIR}/services"'/*'
|
||||
hadoop_add_classpath "${HADOOP_YARN_HOME}/${YARN_LIB_JARS_DIR}/services-api"'/*'
|
||||
HADOOP_CLASSNAME='org.apache.hadoop.yarn.service.webapp.ApiServerWebApp'
|
||||
service)
|
||||
HADOOP_CLASSNAME='org.apache.hadoop.yarn.service.client.ServiceCLI'
|
||||
local sld="${HADOOP_YARN_HOME}/${YARN_DIR},\
|
||||
${HADOOP_YARN_HOME}/${YARN_LIB_JARS_DIR},\
|
||||
${HADOOP_YARN_HOME}/${YARN_LIB_JARS_DIR}/services,\
|
||||
${HADOOP_HDFS_HOME}/${HDFS_DIR},\
|
||||
${HADOOP_HDFS_HOME}/${HDFS_LIB_JARS_DIR},\
|
||||
${HADOOP_COMMON_HOME}/${HADOOP_COMMON_DIR},\
|
||||
|
@ -170,19 +180,6 @@ ${HADOOP_COMMON_HOME}/${HADOOP_COMMON_LIB_JARS_DIR}"
|
|||
HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
|
||||
HADOOP_CLASSNAME='org.apache.hadoop.yarn.server.sharedcachemanager.SharedCacheManager'
|
||||
;;
|
||||
service)
|
||||
hadoop_add_classpath "${HADOOP_YARN_HOME}/${YARN_LIB_JARS_DIR}/services"'/*'
|
||||
HADOOP_CLASSNAME='org.apache.hadoop.yarn.service.client.ServiceCLI'
|
||||
local sld="${HADOOP_YARN_HOME}/${YARN_DIR},\
|
||||
${HADOOP_YARN_HOME}/${YARN_LIB_JARS_DIR},\
|
||||
${HADOOP_YARN_HOME}/${YARN_LIB_JARS_DIR}/services,\
|
||||
${HADOOP_HDFS_HOME}/${HDFS_DIR},\
|
||||
${HADOOP_HDFS_HOME}/${HDFS_LIB_JARS_DIR},\
|
||||
${HADOOP_COMMON_HOME}/${HADOOP_COMMON_DIR},\
|
||||
${HADOOP_COMMON_HOME}/${HADOOP_COMMON_LIB_JARS_DIR}"
|
||||
hadoop_translate_cygwin_path sld
|
||||
hadoop_add_param HADOOP_OPTS service.libdir "-Dservice.libdir=${sld}"
|
||||
;;
|
||||
timelinereader)
|
||||
HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
|
||||
HADOOP_CLASSNAME='org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderServer'
|
||||
|
|
|
@ -23,7 +23,6 @@
|
|||
</parent>
|
||||
<artifactId>hadoop-yarn-services-api</artifactId>
|
||||
<name>Apache Hadoop YARN Services API</name>
|
||||
<version>3.0.0-beta1-SNAPSHOT</version>
|
||||
<packaging>jar</packaging>
|
||||
<description>Hadoop YARN REST APIs for services</description>
|
||||
|
||||
|
@ -46,7 +45,6 @@
|
|||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-jar-plugin</artifactId>
|
||||
<version>${maven-jar-plugin.version}</version>
|
||||
<!-- The configuration of the plugin -->
|
||||
<configuration>
|
||||
<!-- Configuration of the archiver -->
|
||||
|
@ -59,9 +57,6 @@
|
|||
<manifest>
|
||||
</manifest>
|
||||
</archive>
|
||||
<excludes>
|
||||
<exclude>**/run_rest_service.sh</exclude>
|
||||
</excludes>
|
||||
</configuration>
|
||||
<executions>
|
||||
<execution>
|
||||
|
@ -92,97 +87,34 @@
|
|||
<dependency>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-yarn-services-core</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.swagger</groupId>
|
||||
<artifactId>swagger-annotations</artifactId>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-yarn-api</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-core</artifactId>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-yarn-common</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-annotations</artifactId>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-common</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-databind</artifactId>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.jaxrs</groupId>
|
||||
<artifactId>jackson-jaxrs-json-provider</artifactId>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-webapp</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.inject</groupId>
|
||||
<artifactId>guice</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>javax.ws.rs</groupId>
|
||||
<artifactId>jsr311-api</artifactId>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<profiles>
|
||||
<profile>
|
||||
<id>dist</id>
|
||||
<activation>
|
||||
<activeByDefault>false</activeByDefault>
|
||||
</activation>
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-assembly-plugin</artifactId>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-assemblies</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>dist</id>
|
||||
<phase>prepare-package</phase>
|
||||
<goals>
|
||||
<goal>single</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<appendAssemblyId>false</appendAssemblyId>
|
||||
<attach>false</attach>
|
||||
<finalName>${project.artifactId}-${project.version}</finalName>
|
||||
<descriptorRefs>
|
||||
<descriptorRef>hadoop-yarn-services-api</descriptorRef>
|
||||
</descriptorRefs>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</profile>
|
||||
|
||||
<profile>
|
||||
<id>rat</id>
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.rat</groupId>
|
||||
<artifactId>apache-rat-plugin</artifactId>
|
||||
<version>${apache-rat-plugin.version}</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>check-licenses</id>
|
||||
<goals>
|
||||
<goal>check</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
<configuration>
|
||||
<excludes>
|
||||
<exclude>**/*.json</exclude>
|
||||
<exclude>**/THIRD-PARTY.properties</exclude>
|
||||
</excludes>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</profile>
|
||||
|
||||
</profiles>
|
||||
|
||||
</project>
|
||||
|
|
|
@ -59,8 +59,10 @@ public class ApiServerWebApp extends AbstractService {
|
|||
public static void main(String[] args) throws IOException {
|
||||
ApiServerWebApp apiWebApp = new ApiServerWebApp();
|
||||
try {
|
||||
apiWebApp.startWebApp();
|
||||
apiWebApp.init(new YarnConfiguration());
|
||||
apiWebApp.serviceStart();
|
||||
} catch (Exception e) {
|
||||
logger.error("Got exception starting", e);
|
||||
apiWebApp.close();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -104,7 +104,6 @@
|
|||
<dependency>
|
||||
<groupId>com.beust</groupId>
|
||||
<artifactId>jcommander</artifactId>
|
||||
<version>1.30</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
|
@ -126,37 +125,16 @@
|
|||
<dependency>
|
||||
<groupId>org.codehaus.jackson</groupId>
|
||||
<artifactId>jackson-core-asl</artifactId>
|
||||
<scope>compile</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.codehaus.jackson</groupId>
|
||||
<artifactId>jackson-jaxrs</artifactId>
|
||||
<scope>compile</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.codehaus.jackson</groupId>
|
||||
<artifactId>jackson-mapper-asl</artifactId>
|
||||
<scope>compile</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.codehaus.jackson</groupId>
|
||||
<artifactId>jackson-xc</artifactId>
|
||||
<scope>compile</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-common</artifactId>
|
||||
<type>test-jar</type>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-hdfs</artifactId>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-annotations</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
|
@ -167,25 +145,31 @@
|
|||
<dependency>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-yarn-client</artifactId>
|
||||
<scope>compile</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-yarn-server-web-proxy</artifactId>
|
||||
<scope>compile</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-yarn-registry</artifactId>
|
||||
<scope>compile</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<scope>test</scope>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-yarn-common</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-common</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-annotations</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-yarn-api</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
|
@ -195,13 +179,12 @@
|
|||
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-compress</artifactId>
|
||||
<artifactId>commons-configuration2</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>commons-digester</groupId>
|
||||
<artifactId>commons-digester</artifactId>
|
||||
<version>1.8</version>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-compress</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
|
@ -215,37 +198,13 @@
|
|||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>commons-logging</groupId>
|
||||
<artifactId>commons-logging</artifactId>
|
||||
<groupId>org.apache.curator</groupId>
|
||||
<artifactId>curator-client</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.codahale.metrics</groupId>
|
||||
<artifactId>metrics-core</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.codahale.metrics</groupId>
|
||||
<artifactId>metrics-servlets</artifactId>
|
||||
<version>3.0.1</version>
|
||||
</dependency>
|
||||
|
||||
<!-- ======================================================== -->
|
||||
<!-- service registry -->
|
||||
<!-- ======================================================== -->
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.zookeeper</groupId>
|
||||
<artifactId>zookeeper</artifactId>
|
||||
</dependency>
|
||||
|
||||
<!-- ======================================================== -->
|
||||
<!-- Jersey and webapp support -->
|
||||
<!-- ======================================================== -->
|
||||
|
||||
<dependency>
|
||||
<groupId>javax.servlet</groupId>
|
||||
<artifactId>javax.servlet-api</artifactId>
|
||||
<groupId>org.apache.curator</groupId>
|
||||
<artifactId>curator-framework</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
|
@ -254,38 +213,23 @@
|
|||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.sun.jersey</groupId>
|
||||
<artifactId>jersey-client</artifactId>
|
||||
<groupId>org.yaml</groupId>
|
||||
<artifactId>snakeyaml</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.sun.jersey</groupId>
|
||||
<artifactId>jersey-json</artifactId>
|
||||
<groupId>io.swagger</groupId>
|
||||
<artifactId>swagger-annotations</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.sun.jersey</groupId>
|
||||
<artifactId>jersey-server</artifactId>
|
||||
</dependency>
|
||||
<!-- ======================================================== -->
|
||||
<!-- Test dependencies -->
|
||||
<!-- ======================================================== -->
|
||||
|
||||
<dependency>
|
||||
<groupId>com.google.inject</groupId>
|
||||
<artifactId>guice</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.google.code.gson</groupId>
|
||||
<artifactId>gson</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.google.inject.extensions</groupId>
|
||||
<artifactId>guice-servlet</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.sun.jersey.contribs</groupId>
|
||||
<artifactId>jersey-guice</artifactId>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
|
@ -294,115 +238,24 @@
|
|||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.easymock</groupId>
|
||||
<artifactId>easymock</artifactId>
|
||||
<version>3.1</version>
|
||||
<scope>test</scope>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>org.objenesis</groupId>
|
||||
<artifactId>objenesis</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.powermock</groupId>
|
||||
<artifactId>powermock-api-easymock</artifactId>
|
||||
<version>1.6.5</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.powermock</groupId>
|
||||
<artifactId>powermock-module-junit4</artifactId>
|
||||
<version>1.6.5</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>org.javassist</groupId>
|
||||
<artifactId>javassist</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.objenesis</groupId>
|
||||
<artifactId>objenesis</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>javax.servlet.jsp</groupId>
|
||||
<artifactId>jsp-api</artifactId>
|
||||
<scope>runtime</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.codehaus.jettison</groupId>
|
||||
<artifactId>jettison</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.yaml</groupId>
|
||||
<artifactId>snakeyaml</artifactId>
|
||||
<version>1.16</version>
|
||||
<scope>compile</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.swagger</groupId>
|
||||
<artifactId>swagger-annotations</artifactId>
|
||||
<version>1.5.4</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-minicluster</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-yarn-server-resourcemanager</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.curator</groupId>
|
||||
<artifactId>curator-test</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
</dependencies>
|
||||
|
||||
|
||||
<profiles>
|
||||
<profile>
|
||||
<id>dist</id>
|
||||
<activation>
|
||||
<activeByDefault>false</activeByDefault>
|
||||
</activation>
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-assembly-plugin</artifactId>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-assemblies</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>dist</id>
|
||||
<phase>prepare-package</phase>
|
||||
<goals>
|
||||
<goal>single</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<appendAssemblyId>false</appendAssemblyId>
|
||||
<attach>false</attach>
|
||||
<finalName>${project.artifactId}-${project.version}</finalName>
|
||||
<descriptorRefs>
|
||||
<descriptorRef>hadoop-yarn-services-dist</descriptorRef>
|
||||
</descriptorRefs>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</profile>
|
||||
|
||||
</profiles>
|
||||
|
||||
</project>
|
||||
|
|
|
@ -1,76 +0,0 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.service.client.params;
|
||||
|
||||
import com.beust.jcommander.Parameter;
|
||||
import com.beust.jcommander.Parameters;
|
||||
import org.apache.hadoop.yarn.service.utils.SliderUtils;
|
||||
import org.apache.hadoop.yarn.service.exceptions.BadCommandArgumentsException;
|
||||
import org.apache.hadoop.yarn.service.exceptions.UsageException;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
@Parameters(commandNames = { SliderActions.ACTION_KDIAG},
|
||||
commandDescription = SliderActions.DESCRIBE_ACTION_KDIAG)
|
||||
|
||||
public class ActionKDiagArgs extends AbstractActionArgs {
|
||||
|
||||
@Override
|
||||
public String getActionName() {
|
||||
return SliderActions.ACTION_KDIAG;
|
||||
}
|
||||
|
||||
@Parameter(names = {ARG_SERVICES}, variableArity = true,
|
||||
description =" list of services to check")
|
||||
public List<String> services = new ArrayList<>();
|
||||
|
||||
@Parameter(names = {ARG_OUTPUT, ARG_OUTPUT_SHORT},
|
||||
description = "output file for report")
|
||||
public File out;
|
||||
|
||||
@Parameter(names = {ARG_KEYTAB}, description = "keytab to use")
|
||||
public File keytab;
|
||||
|
||||
@Parameter(names = {ARG_KEYLEN}, description = "minimum key length")
|
||||
public int keylen = 256;
|
||||
|
||||
@Parameter(names = {ARG_PRINCIPAL}, description = "principal to log in from a keytab")
|
||||
public String principal;
|
||||
|
||||
@Parameter(names = {ARG_SECURE}, description = "Is security required")
|
||||
public boolean secure = false;
|
||||
|
||||
@Override
|
||||
public int getMinParams() {
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void validate() throws BadCommandArgumentsException, UsageException {
|
||||
super.validate();
|
||||
if (keytab != null && SliderUtils.isUnset(principal)) {
|
||||
throw new UsageException("Missing argument " + ARG_PRINCIPAL);
|
||||
}
|
||||
if (keytab == null && SliderUtils.isSet(principal)) {
|
||||
throw new UsageException("Missing argument " + ARG_KEYTAB);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -47,7 +47,6 @@ public class ClientArgs extends CommonArgs {
|
|||
private final ActionFlexArgs actionFlexArgs = new ActionFlexArgs();
|
||||
private final ActionFreezeArgs actionFreezeArgs = new ActionFreezeArgs();
|
||||
private final ActionHelpArgs actionHelpArgs = new ActionHelpArgs();
|
||||
private final ActionKDiagArgs actionKDiagArgs = new ActionKDiagArgs();
|
||||
private final ActionKeytabArgs actionKeytabArgs = new ActionKeytabArgs();
|
||||
private final ActionListArgs actionListArgs = new ActionListArgs();
|
||||
private final ActionRegistryArgs actionRegistryArgs = new ActionRegistryArgs();
|
||||
|
@ -207,10 +206,6 @@ public class ClientArgs extends CommonArgs {
|
|||
bindCoreAction(actionHelpArgs);
|
||||
break;
|
||||
|
||||
case ACTION_KDIAG:
|
||||
bindCoreAction(actionKDiagArgs);
|
||||
break;
|
||||
|
||||
case ACTION_KEYTAB:
|
||||
bindCoreAction(actionKeytabArgs);
|
||||
break;
|
||||
|
|
|
@ -19,8 +19,6 @@
|
|||
package org.apache.hadoop.yarn.service.registry;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.fs.PathNotFoundException;
|
||||
import org.apache.hadoop.registry.client.api.RegistryConstants;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
||||
|
@ -32,6 +30,8 @@ import org.apache.hadoop.registry.client.binding.RegistryPathUtils;
|
|||
import org.apache.hadoop.registry.client.types.ServiceRecord;
|
||||
import org.apache.hadoop.yarn.service.component.instance.ComponentInstanceId;
|
||||
import org.apache.hadoop.yarn.service.utils.SliderUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
@ -43,8 +43,8 @@ import static org.apache.hadoop.registry.client.binding.RegistryPathUtils.join;
|
|||
* is registered, offers access to the record and other things.
|
||||
*/
|
||||
public class YarnRegistryViewForProviders {
|
||||
private static final Log LOG =
|
||||
LogFactory.getLog(YarnRegistryViewForProviders.class);
|
||||
private static final Logger LOG =
|
||||
LoggerFactory.getLogger(YarnRegistryViewForProviders.class);
|
||||
|
||||
private final RegistryOperations registryOperations;
|
||||
private final String user;
|
||||
|
|
|
@ -1,680 +0,0 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.service.utils;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.security.Credentials;
|
||||
import org.apache.hadoop.security.SaslPropertiesResolver;
|
||||
import org.apache.hadoop.security.SecurityUtil;
|
||||
import org.apache.hadoop.security.UserGroupInformation;
|
||||
import org.apache.hadoop.security.token.Token;
|
||||
import org.apache.hadoop.security.token.TokenIdentifier;
|
||||
import org.apache.hadoop.util.ExitUtil;
|
||||
import org.apache.hadoop.util.Shell;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import javax.crypto.Cipher;
|
||||
import java.io.Closeable;
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.PrintStream;
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.net.InetAddress;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.util.Collection;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import static org.apache.hadoop.security.UserGroupInformation.*;
|
||||
import static org.apache.hadoop.security.authentication.util.KerberosUtil.*;
|
||||
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.*;
|
||||
|
||||
/**
|
||||
* Kerberos diagnostics
|
||||
* At some point this may move to hadoop core, so please keep use of slider
|
||||
* methods and classes to ~0.
|
||||
*
|
||||
* This operation expands some of the diagnostic output of the security code,
|
||||
* but not all. For completeness
|
||||
*
|
||||
* Set the environment variable {@code HADOOP_JAAS_DEBUG=true}
|
||||
* Set the log level for {@code org.apache.hadoop.security=DEBUG}
|
||||
*/
|
||||
public class KerberosDiags implements Closeable {
|
||||
|
||||
private static final Logger LOG = LoggerFactory.getLogger(KerberosDiags.class);
|
||||
public static final String KRB5_CCNAME = "KRB5CCNAME";
|
||||
public static final String JAVA_SECURITY_KRB5_CONF
|
||||
= "java.security.krb5.conf";
|
||||
public static final String JAVA_SECURITY_KRB5_REALM
|
||||
= "java.security.krb5.realm";
|
||||
public static final String SUN_SECURITY_KRB5_DEBUG
|
||||
= "sun.security.krb5.debug";
|
||||
public static final String SUN_SECURITY_SPNEGO_DEBUG
|
||||
= "sun.security.spnego.debug";
|
||||
public static final String SUN_SECURITY_JAAS_FILE
|
||||
= "java.security.auth.login.config";
|
||||
public static final String KERBEROS_KINIT_COMMAND
|
||||
= "hadoop.kerberos.kinit.command";
|
||||
public static final String HADOOP_AUTHENTICATION_IS_DISABLED
|
||||
= "Hadoop authentication is disabled";
|
||||
public static final String UNSET = "(unset)";
|
||||
public static final String NO_DEFAULT_REALM = "Cannot locate default realm";
|
||||
|
||||
private final Configuration conf;
|
||||
private final List<String> services;
|
||||
private final PrintStream out;
|
||||
private final File keytab;
|
||||
private final String principal;
|
||||
private final long minKeyLength;
|
||||
private final boolean securityRequired;
|
||||
|
||||
public static final String CAT_JVM = "JVM";
|
||||
public static final String CAT_JAAS = "JAAS";
|
||||
public static final String CAT_CONFIG = "CONFIG";
|
||||
public static final String CAT_LOGIN = "LOGIN";
|
||||
public static final String CAT_KERBEROS = "KERBEROS";
|
||||
public static final String CAT_SASL = "SASL";
|
||||
|
||||
@SuppressWarnings("IOResourceOpenedButNotSafelyClosed")
|
||||
public KerberosDiags(Configuration conf,
|
||||
PrintStream out,
|
||||
List<String> services,
|
||||
File keytab,
|
||||
String principal,
|
||||
long minKeyLength,
|
||||
boolean securityRequired) {
|
||||
this.conf = conf;
|
||||
this.services = services;
|
||||
this.keytab = keytab;
|
||||
this.principal = principal;
|
||||
this.out = out;
|
||||
this.minKeyLength = minKeyLength;
|
||||
this.securityRequired = securityRequired;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
flush();
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute diagnostics.
|
||||
* <p>
|
||||
* Things it would be nice if UGI made accessible
|
||||
* <ol>
|
||||
* <li>A way to enable JAAS debug programatically</li>
|
||||
* <li>Access to the TGT</li>
|
||||
* </ol>
|
||||
* @return true if security was enabled and all probes were successful
|
||||
* @throws KerberosDiagsFailure explicitly raised failure
|
||||
* @throws Exception other security problems
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
public boolean execute() throws Exception {
|
||||
|
||||
title("Kerberos Diagnostics scan at %s",
|
||||
new Date(System.currentTimeMillis()));
|
||||
|
||||
// check that the machine has a name
|
||||
println("Hostname: %s",
|
||||
InetAddress.getLocalHost().getCanonicalHostName());
|
||||
|
||||
// Fail fast on a JVM without JCE installed.
|
||||
validateKeyLength();
|
||||
|
||||
// look at realm
|
||||
println("JVM Kerberos Login Module = %s", getKrb5LoginModuleName());
|
||||
printDefaultRealm();
|
||||
|
||||
title("System Properties");
|
||||
for (String prop : new String[]{
|
||||
JAVA_SECURITY_KRB5_CONF,
|
||||
JAVA_SECURITY_KRB5_REALM,
|
||||
SUN_SECURITY_KRB5_DEBUG,
|
||||
SUN_SECURITY_SPNEGO_DEBUG,
|
||||
SUN_SECURITY_JAAS_FILE
|
||||
}) {
|
||||
printSysprop(prop);
|
||||
}
|
||||
|
||||
title("Environment Variables");
|
||||
for (String env : new String[]{
|
||||
"HADOOP_JAAS_DEBUG",
|
||||
KRB5_CCNAME,
|
||||
"HADOOP_USER_NAME",
|
||||
"HADOOP_PROXY_USER",
|
||||
HADOOP_TOKEN_FILE_LOCATION,
|
||||
}) {
|
||||
printEnv(env);
|
||||
}
|
||||
|
||||
for (String prop : new String[]{
|
||||
KERBEROS_KINIT_COMMAND,
|
||||
HADOOP_SECURITY_AUTHENTICATION,
|
||||
HADOOP_SECURITY_AUTHORIZATION,
|
||||
"hadoop.kerberos.min.seconds.before.relogin", // not in 2.6
|
||||
"hadoop.security.dns.interface", // not in 2.6
|
||||
"hadoop.security.dns.nameserver", // not in 2.6
|
||||
HADOOP_RPC_PROTECTION,
|
||||
HADOOP_SECURITY_SASL_PROPS_RESOLVER_CLASS,
|
||||
HADOOP_SECURITY_CRYPTO_CODEC_CLASSES_KEY_PREFIX,
|
||||
HADOOP_SECURITY_GROUP_MAPPING,
|
||||
"hadoop.security.impersonation.provider.class", // not in 2.6
|
||||
"dfs.data.transfer.protection" // HDFS
|
||||
}) {
|
||||
printConfOpt(prop);
|
||||
}
|
||||
|
||||
// check that authentication is enabled
|
||||
if (SecurityUtil.getAuthenticationMethod(conf)
|
||||
.equals(AuthenticationMethod.SIMPLE)) {
|
||||
println(HADOOP_AUTHENTICATION_IS_DISABLED);
|
||||
failif(securityRequired, CAT_CONFIG, HADOOP_AUTHENTICATION_IS_DISABLED);
|
||||
// no security, skip rest of test
|
||||
return false;
|
||||
}
|
||||
|
||||
validateKrb5File();
|
||||
validateSasl(HADOOP_SECURITY_SASL_PROPS_RESOLVER_CLASS);
|
||||
validateSasl("dfs.data.transfer.saslproperties.resolver.class");
|
||||
validateKinitExecutable();
|
||||
validateJAAS();
|
||||
// now the big test: login, then try again
|
||||
boolean krb5Debug = getAndSet(SUN_SECURITY_KRB5_DEBUG);
|
||||
boolean spnegoDebug = getAndSet(SUN_SECURITY_SPNEGO_DEBUG);
|
||||
try {
|
||||
title("Logging in");
|
||||
|
||||
if (keytab != null) {
|
||||
dumpKeytab(keytab);
|
||||
loginFromKeytab();
|
||||
} else {
|
||||
UserGroupInformation loginUser = getLoginUser();
|
||||
dumpUGI("Log in user", loginUser);
|
||||
validateUGI("Login user", loginUser);
|
||||
println("Ticket based login: %b", isLoginTicketBased());
|
||||
println("Keytab based login: %b", isLoginKeytabBased());
|
||||
}
|
||||
|
||||
return true;
|
||||
} finally {
|
||||
// restore original system properties
|
||||
System.setProperty(SUN_SECURITY_KRB5_DEBUG,
|
||||
Boolean.toString(krb5Debug));
|
||||
System.setProperty(SUN_SECURITY_SPNEGO_DEBUG,
|
||||
Boolean.toString(spnegoDebug));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fail fast on a JVM without JCE installed.
|
||||
*
|
||||
* This is a recurrent problem
|
||||
* (that is: it keeps creeping back with JVM updates);
|
||||
* a fast failure is the best tactic
|
||||
* @throws NoSuchAlgorithmException
|
||||
*/
|
||||
|
||||
protected void validateKeyLength() throws NoSuchAlgorithmException {
|
||||
int aesLen = Cipher.getMaxAllowedKeyLength("AES");
|
||||
println("Maximum AES encryption key length %d bits", aesLen);
|
||||
failif (aesLen < minKeyLength,
|
||||
CAT_JVM,
|
||||
"Java Cryptography Extensions are not installed on this JVM."
|
||||
+" Maximum supported key length %s - minimum required %d",
|
||||
aesLen, minKeyLength);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the default realm.
|
||||
* <p>
|
||||
* Not having a default realm may be harmless, so is noted at info.
|
||||
* All other invocation failures are downgraded to warn, as
|
||||
* follow-on actions may still work.
|
||||
* failure to invoke the method via introspection is rejected,
|
||||
* as it's a sign of JVM compatibility issues that may have other
|
||||
* consequences
|
||||
*/
|
||||
protected void printDefaultRealm() {
|
||||
try {
|
||||
println("Default Realm = %s",
|
||||
getDefaultRealm());
|
||||
} catch (ClassNotFoundException
|
||||
| IllegalAccessException
|
||||
| NoSuchMethodException e) {
|
||||
|
||||
throw new KerberosDiagsFailure(CAT_JVM, e,
|
||||
"Failed to invoke krb5.Config.getDefaultRealm: %s", e);
|
||||
} catch (InvocationTargetException e) {
|
||||
Throwable cause = e.getCause() != null ? e.getCause() : e;
|
||||
if (cause.toString().contains(NO_DEFAULT_REALM)) {
|
||||
// exception raised if there is no default realm. This is not
|
||||
// always a problem, so downgrade to a message.
|
||||
println("Host has no default realm");
|
||||
LOG.debug(cause.toString(), cause);
|
||||
} else {
|
||||
println("Kerberos.getDefaultRealm() failed: %s\n%s",
|
||||
cause,
|
||||
org.apache.hadoop.util.StringUtils.stringifyException(cause));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Locate the krb5.conf file and dump it.
|
||||
* No-op on windows.
|
||||
* @throws IOException
|
||||
*/
|
||||
private void validateKrb5File() throws IOException {
|
||||
if (!Shell.WINDOWS) {
|
||||
title("Locating Kerberos configuration file");
|
||||
String krbPath = "/etc/krb5.conf";
|
||||
String jvmKrbPath = System.getProperty(JAVA_SECURITY_KRB5_CONF);
|
||||
if (jvmKrbPath != null) {
|
||||
println("Setting kerberos path from sysprop %s: %s",
|
||||
JAVA_SECURITY_KRB5_CONF, jvmKrbPath);
|
||||
krbPath = jvmKrbPath;
|
||||
}
|
||||
|
||||
String krb5name = System.getenv(KRB5_CCNAME);
|
||||
if (krb5name != null) {
|
||||
println("Setting kerberos path from environment variable %s: %s",
|
||||
KRB5_CCNAME, krb5name);
|
||||
krbPath = krb5name;
|
||||
if (jvmKrbPath != null) {
|
||||
println("Warning - both %s and %s were set - %s takes priority",
|
||||
JAVA_SECURITY_KRB5_CONF, KRB5_CCNAME, KRB5_CCNAME);
|
||||
}
|
||||
}
|
||||
|
||||
File krbFile = new File(krbPath);
|
||||
println("Kerberos configuration file = %s", krbFile);
|
||||
failif(!krbFile.exists(),
|
||||
CAT_KERBEROS,
|
||||
"Kerberos configuration file %s not found", krbFile);
|
||||
dump(krbFile);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Dump a keytab: list all principals.
|
||||
* @param keytabFile the keytab file
|
||||
* @throws IOException IO problems
|
||||
*/
|
||||
public void dumpKeytab(File keytabFile) throws IOException {
|
||||
title("Examining keytab %s", keytabFile);
|
||||
File kt = keytabFile.getCanonicalFile();
|
||||
failif(!kt.exists(), CAT_CONFIG, "Keytab not found: %s", kt);
|
||||
failif(!kt.isFile(), CAT_CONFIG, "Keytab is not a valid file: %s", kt);
|
||||
|
||||
String[] names = getPrincipalNames(keytabFile.getCanonicalPath(),
|
||||
Pattern.compile(".*"));
|
||||
println("keytab entry count: %d", names.length);
|
||||
for (String name : names) {
|
||||
println(" %s", name);
|
||||
}
|
||||
println("-----");
|
||||
}
|
||||
|
||||
/**
|
||||
* Log in from a keytab, dump the UGI, validate it, then try and log in again.
|
||||
* That second-time login catches JVM/Hadoop compatibility problems.
|
||||
* @throws IOException
|
||||
*/
|
||||
private void loginFromKeytab() throws IOException {
|
||||
UserGroupInformation ugi;
|
||||
String identity;
|
||||
if (keytab != null) {
|
||||
File kt = keytab.getCanonicalFile();
|
||||
println("Using keytab %s principal %s", kt, principal);
|
||||
identity = principal;
|
||||
|
||||
failif(StringUtils.isEmpty(principal), CAT_KERBEROS,
|
||||
"No principal defined");
|
||||
ugi = loginUserFromKeytabAndReturnUGI(principal, kt.getPath());
|
||||
dumpUGI(identity, ugi);
|
||||
validateUGI(principal, ugi);
|
||||
|
||||
title("Attempting to log in from keytab again");
|
||||
// package scoped -hence the reason why this class must be in the
|
||||
// hadoop.security package
|
||||
setShouldRenewImmediatelyForTests(true);
|
||||
// attempt a new login
|
||||
ugi.reloginFromKeytab();
|
||||
} else {
|
||||
println("No keytab: logging is as current user");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Dump a UGI.
|
||||
* @param title title of this section
|
||||
* @param ugi UGI to dump
|
||||
* @throws IOException
|
||||
*/
|
||||
private void dumpUGI(String title, UserGroupInformation ugi)
|
||||
throws IOException {
|
||||
title(title);
|
||||
println("UGI instance = %s", ugi);
|
||||
println("Has kerberos credentials: %b", ugi.hasKerberosCredentials());
|
||||
println("Authentication method: %s", ugi.getAuthenticationMethod());
|
||||
println("Real Authentication method: %s",
|
||||
ugi.getRealAuthenticationMethod());
|
||||
title("Group names");
|
||||
for (String name : ugi.getGroupNames()) {
|
||||
println(name);
|
||||
}
|
||||
title("Credentials");
|
||||
Credentials credentials = ugi.getCredentials();
|
||||
List<Text> secretKeys = credentials.getAllSecretKeys();
|
||||
title("Secret keys");
|
||||
if (!secretKeys.isEmpty()) {
|
||||
for (Text secret: secretKeys) {
|
||||
println("%s", secret);
|
||||
}
|
||||
} else {
|
||||
println("(none)");
|
||||
}
|
||||
|
||||
dumpTokens(ugi);
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate the UGI: verify it is kerberized.
|
||||
* @param messagePrefix message in exceptions
|
||||
* @param user user to validate
|
||||
*/
|
||||
private void validateUGI(String messagePrefix, UserGroupInformation user) {
|
||||
failif(!user.hasKerberosCredentials(),
|
||||
CAT_LOGIN, "%s: No kerberos credentials for %s", messagePrefix, user);
|
||||
failif(user.getAuthenticationMethod() == null,
|
||||
CAT_LOGIN, "%s: Null AuthenticationMethod for %s", messagePrefix, user);
|
||||
}
|
||||
|
||||
/**
|
||||
* A cursory look at the {@code kinit} executable.
|
||||
* If it is an absolute path: it must exist with a size > 0.
|
||||
* If it is just a command, it has to be on the path. There's no check
|
||||
* for that -but the PATH is printed out.
|
||||
*/
|
||||
private void validateKinitExecutable() {
|
||||
String kinit = conf.getTrimmed(KERBEROS_KINIT_COMMAND, "");
|
||||
if (!kinit.isEmpty()) {
|
||||
File kinitPath = new File(kinit);
|
||||
println("%s = %s", KERBEROS_KINIT_COMMAND, kinitPath);
|
||||
if (kinitPath.isAbsolute()) {
|
||||
failif(!kinitPath.exists(), CAT_KERBEROS,
|
||||
"%s executable does not exist: %s",
|
||||
KERBEROS_KINIT_COMMAND, kinitPath);
|
||||
failif(!kinitPath.isFile(), CAT_KERBEROS,
|
||||
"%s path does not refer to a file: %s",
|
||||
KERBEROS_KINIT_COMMAND, kinitPath);
|
||||
failif(kinitPath.length() == 0, CAT_KERBEROS,
|
||||
"%s file is empty: %s",
|
||||
KERBEROS_KINIT_COMMAND, kinitPath);
|
||||
} else {
|
||||
println("Executable %s is relative -must be on the PATH", kinit);
|
||||
printEnv("PATH");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Try to load the SASL resolver.
|
||||
* @param saslPropsResolverKey key for the SASL resolver
|
||||
*/
|
||||
private void validateSasl(String saslPropsResolverKey) {
|
||||
title("Resolving SASL property %s", saslPropsResolverKey);
|
||||
String saslPropsResolver = conf.getTrimmed(saslPropsResolverKey);
|
||||
try {
|
||||
Class<? extends SaslPropertiesResolver> resolverClass = conf.getClass(
|
||||
saslPropsResolverKey,
|
||||
SaslPropertiesResolver.class, SaslPropertiesResolver.class);
|
||||
println("Resolver is %s", resolverClass);
|
||||
} catch (RuntimeException e) {
|
||||
throw new KerberosDiagsFailure(CAT_SASL, e,
|
||||
"Failed to load %s class %s",
|
||||
saslPropsResolverKey, saslPropsResolver);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate any JAAS entry referenced in the {@link #SUN_SECURITY_JAAS_FILE}
|
||||
* property.
|
||||
*/
|
||||
private void validateJAAS() {
|
||||
String jaasFilename = System.getProperty(SUN_SECURITY_JAAS_FILE);
|
||||
if (jaasFilename != null) {
|
||||
title("JAAS");
|
||||
File jaasFile = new File(jaasFilename);
|
||||
println("JAAS file is defined in %s: %s",
|
||||
SUN_SECURITY_JAAS_FILE, jaasFile);
|
||||
failif(!jaasFile.exists(), CAT_JAAS,
|
||||
"JAAS file does not exist: %s", jaasFile);
|
||||
failif(!jaasFile.isFile(), CAT_JAAS,
|
||||
"Specified JAAS file is not a file: %s", jaasFile);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Dump all tokens of a user
|
||||
* @param user user
|
||||
*/
|
||||
public void dumpTokens(UserGroupInformation user) {
|
||||
Collection<Token<? extends TokenIdentifier>> tokens
|
||||
= user.getCredentials().getAllTokens();
|
||||
title("Token Count: %d", tokens.size());
|
||||
for (Token<? extends TokenIdentifier> token : tokens) {
|
||||
println("Token %s", token.getKind());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the System property to true; return the old value for caching
|
||||
* @param sysprop property
|
||||
* @return the previous value
|
||||
*/
|
||||
private boolean getAndSet(String sysprop) {
|
||||
boolean old = Boolean.getBoolean(sysprop);
|
||||
System.setProperty(sysprop, "true");
|
||||
return old;
|
||||
}
|
||||
|
||||
/**
|
||||
* Flush all active output channels, including {@Code System.err},
|
||||
* so as to stay in sync with any JRE log messages.
|
||||
*/
|
||||
private void flush() {
|
||||
if (out != null) {
|
||||
out.flush();
|
||||
} else {
|
||||
System.out.flush();
|
||||
}
|
||||
System.err.flush();
|
||||
}
|
||||
|
||||
/**
|
||||
* Format and print a line of output.
|
||||
* This goes to any output file, or
|
||||
* is logged at info. The output is flushed before and after, to
|
||||
* try and stay in sync with JRE logging.
|
||||
* @param format format string
|
||||
* @param args any arguments
|
||||
*/
|
||||
@VisibleForTesting
|
||||
public void println(String format, Object... args) {
|
||||
println(format(format, args));
|
||||
}
|
||||
|
||||
/**
|
||||
* Print a line of output. This goes to any output file, or
|
||||
* is logged at info. The output is flushed before and after, to
|
||||
* try and stay in sync with JRE logging.
|
||||
* @param msg message string
|
||||
*/
|
||||
@VisibleForTesting
|
||||
private void println(String msg) {
|
||||
flush();
|
||||
if (out != null) {
|
||||
out.println(msg);
|
||||
} else {
|
||||
LOG.info(msg);
|
||||
}
|
||||
flush();
|
||||
}
|
||||
|
||||
/**
|
||||
* Print a title entry
|
||||
* @param format format string
|
||||
* @param args any arguments
|
||||
*/
|
||||
private void title(String format, Object... args) {
|
||||
println("");
|
||||
println("");
|
||||
String msg = "== " + format(format, args) + " ==";
|
||||
println(msg);
|
||||
println("");
|
||||
}
|
||||
|
||||
/**
|
||||
* Print a system property, or {@link #UNSET} if unset.
|
||||
* @param property property to print
|
||||
*/
|
||||
private void printSysprop(String property) {
|
||||
println("%s = \"%s\"", property,
|
||||
System.getProperty(property, UNSET));
|
||||
}
|
||||
|
||||
/**
|
||||
* Print a configuration option, or {@link #UNSET} if unset.
|
||||
* @param option option to print
|
||||
*/
|
||||
private void printConfOpt(String option) {
|
||||
println("%s = \"%s\"", option, conf.get(option, UNSET));
|
||||
}
|
||||
|
||||
/**
|
||||
* Print an environment variable's name and value; printing
|
||||
* {@link #UNSET} if it is not set
|
||||
* @param variable environment variable
|
||||
*/
|
||||
private void printEnv(String variable) {
|
||||
String env = System.getenv(variable);
|
||||
println("%s = \"%s\"", variable, env != null ? env : UNSET);
|
||||
}
|
||||
|
||||
/**
|
||||
* Dump any file to standard out; add a trailing newline
|
||||
* @param file file to dump
|
||||
* @throws IOException IO problems
|
||||
*/
|
||||
public void dump(File file) throws IOException {
|
||||
try (FileInputStream in = new FileInputStream(file)) {
|
||||
for (String line : IOUtils.readLines(in)) {
|
||||
println("%s", line);
|
||||
}
|
||||
}
|
||||
println("");
|
||||
}
|
||||
|
||||
/**
|
||||
* Format and raise a failure
|
||||
*
|
||||
* @param category category for exception
|
||||
* @param message string formatting message
|
||||
* @param args any arguments for the formatting
|
||||
* @throws KerberosDiagsFailure containing the formatted text
|
||||
*/
|
||||
private void fail(String category, String message, Object... args)
|
||||
throws KerberosDiagsFailure {
|
||||
throw new KerberosDiagsFailure(category, message, args);
|
||||
}
|
||||
|
||||
/**
|
||||
* Conditional failure with string formatted arguments
|
||||
* @param condition failure condition
|
||||
* @param category category for exception
|
||||
* @param message string formatting message
|
||||
* @param args any arguments for the formatting
|
||||
* @throws KerberosDiagsFailure containing the formatted text
|
||||
* if the condition was met
|
||||
*/
|
||||
private void failif(boolean condition,
|
||||
String category,
|
||||
String message,
|
||||
Object... args)
|
||||
throws KerberosDiagsFailure {
|
||||
if (condition) {
|
||||
fail(category, message, args);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Format a string, treating a call where there are no varags values
|
||||
* as a string to pass through unformatted.
|
||||
* @param message message, which is either a format string + args, or
|
||||
* a general string
|
||||
* @param args argument array
|
||||
* @return a string for printing.
|
||||
*/
|
||||
public static String format(String message, Object... args) {
|
||||
if (args.length == 0) {
|
||||
return message;
|
||||
} else {
|
||||
return String.format(message, args);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Diagnostics failures return the exit code 41, "unauthorized".
|
||||
*
|
||||
* They have a category, initially for testing: the category can be
|
||||
* validated without having to match on the entire string.
|
||||
*/
|
||||
public static class KerberosDiagsFailure extends ExitUtil.ExitException {
|
||||
private final String category;
|
||||
|
||||
public KerberosDiagsFailure(String category, String message) {
|
||||
super(41, category + ": " + message);
|
||||
this.category = category;
|
||||
}
|
||||
|
||||
public KerberosDiagsFailure(String category, String message, Object... args) {
|
||||
this(category, format(message, args));
|
||||
}
|
||||
|
||||
public KerberosDiagsFailure(String category, Throwable throwable,
|
||||
String message, Object... args) {
|
||||
this(category, message, args);
|
||||
initCause(throwable);
|
||||
}
|
||||
|
||||
public String getCategory() {
|
||||
return category;
|
||||
}
|
||||
}
|
||||
}
|
File diff suppressed because it is too large
Load Diff
|
@ -18,12 +18,22 @@
|
|||
|
||||
package org.apache.hadoop.yarn.service;
|
||||
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.yarn.service.api.records.Service;
|
||||
import org.apache.hadoop.yarn.service.api.records.Component;
|
||||
import org.apache.hadoop.yarn.service.api.records.Resource;
|
||||
import org.apache.hadoop.yarn.service.utils.JsonSerDeser;
|
||||
import org.apache.hadoop.yarn.service.utils.ServiceApiUtil;
|
||||
import org.apache.hadoop.yarn.service.utils.SliderFileSystem;
|
||||
import org.codehaus.jackson.map.PropertyNamingStrategy;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.mockito.Matchers.anyObject;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
public class ServiceTestUtils {
|
||||
|
||||
public static final JsonSerDeser<Service> JSON_SER_DESER =
|
||||
|
@ -56,4 +66,22 @@ public class ServiceTestUtils {
|
|||
resource.setCpus(1);
|
||||
return comp1;
|
||||
}
|
||||
|
||||
public static SliderFileSystem initMockFs() throws IOException {
|
||||
return initMockFs(null);
|
||||
}
|
||||
|
||||
public static SliderFileSystem initMockFs(Service ext) throws IOException {
|
||||
SliderFileSystem sfs = mock(SliderFileSystem.class);
|
||||
FileSystem mockFs = mock(FileSystem.class);
|
||||
JsonSerDeser<Service> jsonSerDeser = mock(JsonSerDeser.class);
|
||||
when(sfs.getFileSystem()).thenReturn(mockFs);
|
||||
when(sfs.buildClusterDirPath(anyObject())).thenReturn(
|
||||
new Path("cluster_dir_path"));
|
||||
if (ext != null) {
|
||||
when(jsonSerDeser.load(anyObject(), anyObject())).thenReturn(ext);
|
||||
}
|
||||
ServiceApiUtil.setJsonSerDeser(jsonSerDeser);
|
||||
return sfs;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,8 +18,6 @@
|
|||
package org.apache.hadoop.yarn.service;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.registry.client.api.RegistryConstants;
|
||||
import org.apache.hadoop.yarn.conf.YarnConfiguration;
|
||||
import org.apache.hadoop.yarn.service.api.records.Service;
|
||||
|
@ -27,7 +25,6 @@ import org.apache.hadoop.yarn.service.exceptions.RestApiErrorMessages;
|
|||
import org.apache.hadoop.yarn.service.api.records.Artifact;
|
||||
import org.apache.hadoop.yarn.service.api.records.Component;
|
||||
import org.apache.hadoop.yarn.service.api.records.Resource;
|
||||
import org.apache.hadoop.yarn.service.utils.JsonSerDeser;
|
||||
import org.apache.hadoop.yarn.service.utils.ServiceApiUtil;
|
||||
import org.apache.hadoop.yarn.service.utils.SliderFileSystem;
|
||||
import org.junit.Assert;
|
||||
|
@ -44,10 +41,8 @@ import java.util.List;
|
|||
import static org.apache.hadoop.yarn.service.conf.RestApiConstants.DEFAULT_COMPONENT_NAME;
|
||||
import static org.apache.hadoop.yarn.service.conf.RestApiConstants.DEFAULT_UNLIMITED_LIFETIME;
|
||||
import static org.apache.hadoop.yarn.service.exceptions.RestApiErrorMessages.*;
|
||||
import static org.easymock.EasyMock.*;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
/**
|
||||
* Test for ServiceApiUtil helper methods.
|
||||
|
@ -78,7 +73,7 @@ public class TestServiceApiUtil {
|
|||
assertEquals(RegistryConstants.MAX_FQDN_LABEL_LENGTH + 1, LEN_64_STR
|
||||
.length());
|
||||
|
||||
SliderFileSystem sfs = initMock(null);
|
||||
SliderFileSystem sfs = ServiceTestUtils.initMockFs();
|
||||
|
||||
Service app = new Service();
|
||||
|
||||
|
@ -230,7 +225,7 @@ public class TestServiceApiUtil {
|
|||
|
||||
@Test
|
||||
public void testArtifacts() throws IOException {
|
||||
SliderFileSystem sfs = initMock(null);
|
||||
SliderFileSystem sfs = ServiceTestUtils.initMockFs();
|
||||
|
||||
Service app = new Service();
|
||||
app.setName("name");
|
||||
|
@ -309,27 +304,10 @@ public class TestServiceApiUtil {
|
|||
return app;
|
||||
}
|
||||
|
||||
private static SliderFileSystem initMock(Service ext) throws IOException {
|
||||
SliderFileSystem sfs = createNiceMock(SliderFileSystem.class);
|
||||
FileSystem mockFs = createNiceMock(FileSystem.class);
|
||||
JsonSerDeser<Service> jsonSerDeser = createNiceMock(JsonSerDeser
|
||||
.class);
|
||||
expect(sfs.getFileSystem()).andReturn(mockFs).anyTimes();
|
||||
expect(sfs.buildClusterDirPath(anyObject())).andReturn(
|
||||
new Path("cluster_dir_path")).anyTimes();
|
||||
if (ext != null) {
|
||||
expect(jsonSerDeser.load(anyObject(), anyObject())).andReturn(ext)
|
||||
.anyTimes();
|
||||
}
|
||||
replay(sfs, mockFs, jsonSerDeser);
|
||||
ServiceApiUtil.setJsonSerDeser(jsonSerDeser);
|
||||
return sfs;
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testExternalApplication() throws IOException {
|
||||
Service ext = createValidApplication("comp1");
|
||||
SliderFileSystem sfs = initMock(ext);
|
||||
SliderFileSystem sfs = ServiceTestUtils.initMockFs(ext);
|
||||
|
||||
Service app = createValidApplication(null);
|
||||
|
||||
|
@ -350,7 +328,7 @@ public class TestServiceApiUtil {
|
|||
|
||||
@Test
|
||||
public void testDuplicateComponents() throws IOException {
|
||||
SliderFileSystem sfs = initMock(null);
|
||||
SliderFileSystem sfs = ServiceTestUtils.initMockFs();
|
||||
|
||||
String compName = "comp1";
|
||||
Service app = createValidApplication(compName);
|
||||
|
@ -368,7 +346,7 @@ public class TestServiceApiUtil {
|
|||
@Test
|
||||
public void testExternalDuplicateComponent() throws IOException {
|
||||
Service ext = createValidApplication("comp1");
|
||||
SliderFileSystem sfs = initMock(ext);
|
||||
SliderFileSystem sfs = ServiceTestUtils.initMockFs(ext);
|
||||
|
||||
Service app = createValidApplication("comp1");
|
||||
Artifact artifact = new Artifact();
|
||||
|
@ -387,7 +365,7 @@ public class TestServiceApiUtil {
|
|||
@Test
|
||||
public void testExternalComponent() throws IOException {
|
||||
Service ext = createValidApplication("comp1");
|
||||
SliderFileSystem sfs = initMock(ext);
|
||||
SliderFileSystem sfs = ServiceTestUtils.initMockFs(ext);
|
||||
|
||||
Service app = createValidApplication("comp2");
|
||||
Artifact artifact = new Artifact();
|
||||
|
@ -454,7 +432,7 @@ public class TestServiceApiUtil {
|
|||
e)), ex.getMessage());
|
||||
}
|
||||
|
||||
SliderFileSystem sfs = initMock(null);
|
||||
SliderFileSystem sfs = ServiceTestUtils.initMockFs();
|
||||
Service service = createValidApplication(null);
|
||||
service.setComponents(Arrays.asList(c, d, e));
|
||||
try {
|
||||
|
@ -470,7 +448,7 @@ public class TestServiceApiUtil {
|
|||
|
||||
@Test
|
||||
public void testInvalidComponent() throws IOException {
|
||||
SliderFileSystem sfs = initMock(null);
|
||||
SliderFileSystem sfs = ServiceTestUtils.initMockFs();
|
||||
testComponent(sfs);
|
||||
}
|
||||
|
||||
|
|
|
@ -19,8 +19,6 @@
|
|||
package org.apache.hadoop.yarn.service;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.curator.test.TestingCluster;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
|
@ -51,6 +49,8 @@ import org.junit.Before;
|
|||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.rules.TemporaryFolder;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.File;
|
||||
|
@ -78,8 +78,8 @@ import static org.apache.hadoop.yarn.service.conf.YarnServiceConf.YARN_SERVICE_B
|
|||
*/
|
||||
public class TestYarnNativeServices extends ServiceTestUtils{
|
||||
|
||||
private static final Log LOG =
|
||||
LogFactory.getLog(TestYarnNativeServices.class);
|
||||
private static final Logger LOG =
|
||||
LoggerFactory.getLogger(TestYarnNativeServices.class);
|
||||
|
||||
private MiniYARNCluster yarnCluster = null;
|
||||
private MiniDFSCluster hdfsCluster = null;
|
||||
|
@ -416,7 +416,7 @@ public class TestYarnNativeServices extends ServiceTestUtils{
|
|||
LOG.info("Num Components " + retrievedApp.getComponents().size());
|
||||
for (Component component : retrievedApp.getComponents()) {
|
||||
LOG.info("looking for " + component.getName());
|
||||
LOG.info(component);
|
||||
LOG.info(component.toString());
|
||||
if (component.getContainers() != null) {
|
||||
if (component.getContainers().size() == exampleApp
|
||||
.getComponent(component.getName()).getNumberOfContainers()) {
|
||||
|
|
|
@ -45,7 +45,6 @@ import static org.apache.hadoop.yarn.conf.YarnConfiguration.RESOURCEMANAGER_CONN
|
|||
import static org.apache.hadoop.yarn.conf.YarnConfiguration.RESOURCEMANAGER_CONNECT_RETRY_INTERVAL_MS;
|
||||
import static org.apache.hadoop.yarn.service.client.params.Arguments.ARG_APPDEF;
|
||||
import static org.apache.hadoop.yarn.service.conf.YarnServiceConf.YARN_SERVICE_BASE_PATH;
|
||||
import static org.mockito.Matchers.any;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
public class TestServiceCLI {
|
||||
|
|
|
@ -18,13 +18,11 @@
|
|||
|
||||
package org.apache.hadoop.yarn.service.conf;
|
||||
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.yarn.conf.YarnConfiguration;
|
||||
import org.apache.hadoop.yarn.service.ServiceTestUtils;
|
||||
import org.apache.hadoop.yarn.service.api.records.Service;
|
||||
import org.apache.hadoop.yarn.service.api.records.ConfigFile;
|
||||
import org.apache.hadoop.yarn.service.api.records.Configuration;
|
||||
import org.apache.hadoop.yarn.service.utils.JsonSerDeser;
|
||||
import org.apache.hadoop.yarn.service.utils.ServiceApiUtil;
|
||||
import org.apache.hadoop.yarn.service.utils.SliderFileSystem;
|
||||
import org.junit.Assert;
|
||||
|
@ -40,7 +38,6 @@ import java.util.Map;
|
|||
import java.util.Set;
|
||||
|
||||
import static org.apache.hadoop.yarn.service.conf.ExampleAppJson.*;
|
||||
import static org.easymock.EasyMock.*;
|
||||
|
||||
/**
|
||||
* Test global configuration resolution.
|
||||
|
@ -78,12 +75,7 @@ public class TestAppJsonResolve extends Assert {
|
|||
assertEquals("1000", worker.getProperty("timeout"));
|
||||
|
||||
// here is the resolution
|
||||
SliderFileSystem sfs = createNiceMock(SliderFileSystem.class);
|
||||
FileSystem mockFs = createNiceMock(FileSystem.class);
|
||||
expect(sfs.getFileSystem()).andReturn(mockFs).anyTimes();
|
||||
expect(sfs.buildClusterDirPath(anyObject())).andReturn(
|
||||
new Path("cluster_dir_path")).anyTimes();
|
||||
replay(sfs, mockFs);
|
||||
SliderFileSystem sfs = ServiceTestUtils.initMockFs();
|
||||
ServiceApiUtil.validateAndResolveService(orig, sfs, new
|
||||
YarnConfiguration());
|
||||
|
||||
|
@ -162,27 +154,13 @@ public class TestAppJsonResolve extends Assert {
|
|||
assertEquals(0, other.getProperties().size());
|
||||
|
||||
// load the external service
|
||||
SliderFileSystem sfs = createNiceMock(SliderFileSystem.class);
|
||||
FileSystem mockFs = createNiceMock(FileSystem.class);
|
||||
expect(sfs.getFileSystem()).andReturn(mockFs).anyTimes();
|
||||
expect(sfs.buildClusterDirPath(anyObject())).andReturn(
|
||||
new Path("cluster_dir_path")).anyTimes();
|
||||
replay(sfs, mockFs);
|
||||
SliderFileSystem sfs = ServiceTestUtils.initMockFs();
|
||||
Service ext = ExampleAppJson.loadResource(APP_JSON);
|
||||
ServiceApiUtil.validateAndResolveService(ext, sfs, new
|
||||
YarnConfiguration());
|
||||
reset(sfs, mockFs);
|
||||
|
||||
// perform the resolution on original service
|
||||
JsonSerDeser<Service> jsonSerDeser = createNiceMock(JsonSerDeser
|
||||
.class);
|
||||
expect(sfs.getFileSystem()).andReturn(mockFs).anyTimes();
|
||||
expect(sfs.buildClusterDirPath(anyObject())).andReturn(
|
||||
new Path("cluster_dir_path")).anyTimes();
|
||||
expect(jsonSerDeser.load(anyObject(), anyObject())).andReturn(ext)
|
||||
.anyTimes();
|
||||
replay(sfs, mockFs, jsonSerDeser);
|
||||
ServiceApiUtil.setJsonSerDeser(jsonSerDeser);
|
||||
sfs = ServiceTestUtils.initMockFs(ext);
|
||||
ServiceApiUtil.validateAndResolveService(orig, sfs, new
|
||||
YarnConfiguration());
|
||||
|
||||
|
|
|
@ -18,9 +18,8 @@
|
|||
|
||||
package org.apache.hadoop.yarn.service.conf;
|
||||
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.yarn.conf.YarnConfiguration;
|
||||
import org.apache.hadoop.yarn.service.ServiceTestUtils;
|
||||
import org.apache.hadoop.yarn.service.api.records.Service;
|
||||
import org.apache.hadoop.yarn.service.utils.ServiceApiUtil;
|
||||
import org.apache.hadoop.yarn.service.utils.SliderFileSystem;
|
||||
|
@ -33,7 +32,6 @@ import java.util.Arrays;
|
|||
import java.util.Collection;
|
||||
|
||||
import static org.apache.hadoop.yarn.service.ServiceTestUtils.JSON_SER_DESER;
|
||||
import static org.easymock.EasyMock.*;
|
||||
|
||||
/**
|
||||
* Test loading example resources.
|
||||
|
@ -62,12 +60,7 @@ public class TestLoadExampleAppJson extends Assert {
|
|||
try {
|
||||
Service service = JSON_SER_DESER.fromResource(resource);
|
||||
|
||||
SliderFileSystem sfs = createNiceMock(SliderFileSystem.class);
|
||||
FileSystem mockFs = createNiceMock(FileSystem.class);
|
||||
expect(sfs.getFileSystem()).andReturn(mockFs).anyTimes();
|
||||
expect(sfs.buildClusterDirPath(anyObject())).andReturn(
|
||||
new Path("cluster_dir_path")).anyTimes();
|
||||
replay(sfs, mockFs);
|
||||
SliderFileSystem sfs = ServiceTestUtils.initMockFs();
|
||||
|
||||
ServiceApiUtil.validateAndResolveService(service, sfs,
|
||||
new YarnConfiguration());
|
||||
|
|
|
@ -18,7 +18,6 @@
|
|||
package org.apache.hadoop.yarn.service.providers;
|
||||
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.yarn.service.api.records.Artifact;
|
||||
import org.apache.hadoop.yarn.service.api.records.ConfigFile;
|
||||
import org.apache.hadoop.yarn.service.provider.AbstractClientProvider;
|
||||
|
@ -29,7 +28,9 @@ import java.io.IOException;
|
|||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import static org.easymock.EasyMock.*;
|
||||
import static org.mockito.Matchers.anyObject;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
/**
|
||||
* Test the AbstractClientProvider shared methods.
|
||||
|
@ -55,9 +56,8 @@ public class TestAbstractClientProvider {
|
|||
@Test
|
||||
public void testConfigFiles() throws IOException {
|
||||
ClientProvider clientProvider = new ClientProvider();
|
||||
FileSystem mockFs = createNiceMock(FileSystem.class);
|
||||
expect(mockFs.exists(anyObject(Path.class))).andReturn(true).anyTimes();
|
||||
replay(mockFs);
|
||||
FileSystem mockFs = mock(FileSystem.class);
|
||||
when(mockFs.exists(anyObject())).thenReturn(true);
|
||||
|
||||
ConfigFile configFile = new ConfigFile();
|
||||
List<ConfigFile> configFiles = new ArrayList<>();
|
||||
|
|
|
@ -82,6 +82,10 @@
|
|||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-yarn-server-router</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-yarn-services-core</artifactId>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
|
|
Loading…
Reference in New Issue