HDDS-547. Fix secure docker and configs. Contributed by Xiaoyu Yao.
This commit is contained in:
parent
2d269440b0
commit
9920506b3d
|
@ -14,4 +14,5 @@
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
OZONEDIR=../../../../../../hadoop-dist/target/ozone-0.3.0-SNAPSHOT
|
HDDS_VERSION=${hdds.version}
|
||||||
|
SRC_VOLUME=../../
|
|
@ -16,47 +16,38 @@
|
||||||
|
|
||||||
version: "3"
|
version: "3"
|
||||||
services:
|
services:
|
||||||
ozone.kdc:
|
kdc:
|
||||||
image: ahadoop/kdc:v1
|
image: ahadoop/kdc:v1
|
||||||
namenode:
|
hostname: kdc
|
||||||
image: ahadoop/ozone:v1
|
|
||||||
hostname: namenode
|
|
||||||
volumes:
|
volumes:
|
||||||
- ${OZONEDIR}:/opt/hadoop
|
- $SRC_VOLUME:/opt/hadoop
|
||||||
ports:
|
|
||||||
- 9000:9000
|
|
||||||
environment:
|
|
||||||
ENSURE_NAMENODE_DIR: /data/namenode
|
|
||||||
env_file:
|
|
||||||
- ./docker-config
|
|
||||||
command: ["/opt/hadoop/bin/hdfs","namenode"]
|
|
||||||
datanode:
|
datanode:
|
||||||
image: ahadoop/ozone:v1
|
image: ahadoop/runner:latest
|
||||||
hostname: datanode
|
|
||||||
volumes:
|
volumes:
|
||||||
- ${OZONEDIR}:/opt/hadoop
|
- $SRC_VOLUME:/opt/hadoop
|
||||||
|
hostname: datanode
|
||||||
ports:
|
ports:
|
||||||
- 9874
|
- 9864
|
||||||
|
command: ["/opt/hadoop/bin/ozone","datanode"]
|
||||||
env_file:
|
env_file:
|
||||||
- ./docker-config
|
- ./docker-config
|
||||||
command: ["/opt/hadoop/bin/ozone","datanode"]
|
ozoneManager:
|
||||||
om:
|
image: ahadoop/runner:latest
|
||||||
image: ahadoop/ozone:v1
|
|
||||||
hostname: om
|
hostname: om
|
||||||
volumes:
|
volumes:
|
||||||
- ${OZONEDIR}:/opt/hadoop
|
- $SRC_VOLUME:/opt/hadoop
|
||||||
ports:
|
ports:
|
||||||
- 9874:9874
|
- 9874:9874
|
||||||
environment:
|
environment:
|
||||||
ENSURE_KSM_INITIALIZED: /data/metadata/om/current/VERSION
|
ENSURE_OM_INITIALIZED: /data/metadata/ozoneManager/current/VERSION
|
||||||
env_file:
|
env_file:
|
||||||
- ./docker-config
|
- ./docker-config
|
||||||
command: ["/opt/hadoop/bin/ozone","ksm"]
|
command: ["/opt/hadoop/bin/ozone","om"]
|
||||||
scm:
|
scm:
|
||||||
image: ahadoop/ozone:v1
|
image: ahadoop/runner:latest
|
||||||
hostname: scm
|
hostname: scm
|
||||||
volumes:
|
volumes:
|
||||||
- ${OZONEDIR}:/opt/hadoop
|
- $SRC_VOLUME:/opt/hadoop
|
||||||
ports:
|
ports:
|
||||||
- 9876:9876
|
- 9876:9876
|
||||||
env_file:
|
env_file:
|
|
@ -14,25 +14,26 @@
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
OZONE-SITE.XML_ozone.ksm.address=om
|
OZONE-SITE.XML_ozone.om.address=om
|
||||||
|
OZONE-SITE.XML_ozone.om.http-address=om:9874
|
||||||
OZONE-SITE.XML_ozone.scm.names=scm
|
OZONE-SITE.XML_ozone.scm.names=scm
|
||||||
OZONE-SITE.XML_ozone.enabled=True
|
OZONE-SITE.XML_ozone.enabled=True
|
||||||
OZONE-SITE.XML_hdds.scm.datanode.id=/data/datanode.id
|
OZONE-SITE.XML_ozone.scm.datanode.id=/data/datanode.id
|
||||||
OZONE-SITE.XML_hdds.scm.block.client.address=scm
|
OZONE-SITE.XML_ozone.scm.block.client.address=scm
|
||||||
OZONE-SITE.XML_ozone.metadata.dirs=/data/metadata
|
OZONE-SITE.XML_ozone.metadata.dirs=/data/metadata
|
||||||
OZONE-SITE.XML_ozone.handler.type=distributed
|
OZONE-SITE.XML_ozone.handler.type=distributed
|
||||||
OZONE-SITE.XML_hdds.scm.client.address=scm
|
OZONE-SITE.XML_ozone.scm.client.address=scm
|
||||||
OZONE-SITE.XML_ozone.replication=1
|
OZONE-SITE.XML_ozone.replication=1
|
||||||
OZONE-SITE.XML_hdds.scm.kerberos.principal=scm/scm@EXAMPLE.COM
|
OZONE-SITE.XML_hdds.scm.kerberos.principal=scm/scm@EXAMPLE.COM
|
||||||
OZONE-SITE.XML_hdds.scm.kerberos.keytab.file=/etc/security/keytabs/scm.keytab
|
OZONE-SITE.XML_hdds.scm.kerberos.keytab.file=/etc/security/keytabs/scm.keytab
|
||||||
OZONE-SITE.XML_ozone.om.kerberos.principal=om/om@EXAMPLE.COM
|
OZONE-SITE.XML_ozone.om.kerberos.principal=om/om@EXAMPLE.COM
|
||||||
OZONE-SITE.XML_ozone.om.kerberos.keytab.file=/etc/security/keytabs/om.keytab
|
OZONE-SITE.XML_ozone.om.kerberos.keytab.file=/etc/security/keytabs/om.keytab
|
||||||
OZONE-SITE.XML_ozone.security.enabled=true
|
OZONE-SITE.XML_ozone.security.enabled=true
|
||||||
OZONE-SITE.XML_hdds.scm.web.authentication.kerberos.principal=HTTP/scm@EXAMPLE.COM
|
OZONE-SITE.XML_hdds.scm.http.kerberos.principal=HTTP/scm@EXAMPLE.COM
|
||||||
OZONE-SITE.XML_hdds.scm.web.authentication.kerberos.keytab=/etc/security/keytabs/HTTP.keytab
|
OZONE-SITE.XML_hdds.scm.http.kerberos.keytab=/etc/security/keytabs/HTTP.keytab
|
||||||
OZONE-SITE.XML_ozone.om.web.authentication.kerberos.principal=HTTP/om@EXAMPLE.COM
|
OZONE-SITE.XML_ozone.om.http.kerberos.principal=HTTP/om@EXAMPLE.COM
|
||||||
OZONE-SITE.XML_ozone.om.web.authentication.kerberos.keytab=/etc/security/keytabs/HTTP.keytab
|
OZONE-SITE.XML_ozone.om.http.kerberos.keytab=/etc/security/keytabs/HTTP.keytab
|
||||||
HDFS-SITE.XML_dfs.datanode.kerberos.principal=dn/datanode@EXAMPLE.COM
|
HDFS-SITE.XML_dfs.datanode.kerberos.principal=dn/_HOST@EXAMPLE.COM
|
||||||
HDFS-SITE.XML_dfs.datanode.keytab.file=/etc/security/keytabs/dn.keytab
|
HDFS-SITE.XML_dfs.datanode.keytab.file=/etc/security/keytabs/dn.keytab
|
||||||
HDFS-SITE.XML_dfs.web.authentication.kerberos.principal=HTTP/_HOST@EXAMPLE.COM
|
HDFS-SITE.XML_dfs.web.authentication.kerberos.principal=HTTP/_HOST@EXAMPLE.COM
|
||||||
HDFS-SITE.XML_dfs.web.authentication.kerberos.keytab=/etc/security/keytabs/HTTP.keytab
|
HDFS-SITE.XML_dfs.web.authentication.kerberos.keytab=/etc/security/keytabs/HTTP.keytab
|
||||||
|
@ -41,11 +42,15 @@ HDFS-SITE.XML_dfs.datanode.http.address=0.0.0.0:1012
|
||||||
CORE-SITE.XML_dfs.data.transfer.protection=authentication
|
CORE-SITE.XML_dfs.data.transfer.protection=authentication
|
||||||
CORE-SITE.XML_hadoop.security.authentication=kerberos
|
CORE-SITE.XML_hadoop.security.authentication=kerberos
|
||||||
CORE-SITE.XML_hadoop.security.auth_to_local=RULE:[2:$1@$0](.*)s/.*/root/
|
CORE-SITE.XML_hadoop.security.auth_to_local=RULE:[2:$1@$0](.*)s/.*/root/
|
||||||
|
HDFS-SITE.XML_rpc.metrics.quantile.enable=true
|
||||||
|
HDFS-SITE.XML_rpc.metrics.percentiles.intervals=60,300
|
||||||
LOG4J.PROPERTIES_log4j.rootLogger=INFO, stdout
|
LOG4J.PROPERTIES_log4j.rootLogger=INFO, stdout
|
||||||
LOG4J.PROPERTIES_log4j.appender.stdout=org.apache.log4j.ConsoleAppender
|
LOG4J.PROPERTIES_log4j.appender.stdout=org.apache.log4j.ConsoleAppender
|
||||||
LOG4J.PROPERTIES_log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
|
LOG4J.PROPERTIES_log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
|
||||||
LOG4J.PROPERTIES_log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n
|
LOG4J.PROPERTIES_log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n
|
||||||
|
LOG4J.PROPERTIES_log4j.logger.org.apache.hadoop.util.NativeCodeLoader=ERROR
|
||||||
|
LOG4J.PROPERTIES_log4j.logger.org.apache.ratis.conf.ConfUtils=WARN
|
||||||
|
LOG4J.PROPERTIES_log4j.logger.org.apache.hadoop.security.ShellBasedUnixGroupsMapping=ERROR
|
||||||
|
|
||||||
#Enable this variable to print out all hadoop rpc traffic to the stdout. See http://byteman.jboss.org/ to define your own instrumentation.
|
#Enable this variable to print out all hadoop rpc traffic to the stdout. See http://byteman.jboss.org/ to define your own instrumentation.
|
||||||
#BYTEMAN_SCRIPT_URL=https://raw.githubusercontent.com/apache/hadoop/trunk/dev-support/byteman/hadooprpc.btm
|
#BYTEMAN_SCRIPT_URL=https://raw.githubusercontent.com/apache/hadoop/trunk/dev-support/byteman/hadooprpc.btm
|
||||||
|
@ -87,13 +92,12 @@ LOG4J2.PROPERTIES_rootLogger.level=INFO
|
||||||
LOG4J2.PROPERTIES_rootLogger.appenderRefs=stdout
|
LOG4J2.PROPERTIES_rootLogger.appenderRefs=stdout
|
||||||
LOG4J2.PROPERTIES_rootLogger.appenderRef.stdout.ref=STDOUT
|
LOG4J2.PROPERTIES_rootLogger.appenderRef.stdout.ref=STDOUT
|
||||||
|
|
||||||
|
|
||||||
OZONE_DATANODE_SECURE_USER=root
|
OZONE_DATANODE_SECURE_USER=root
|
||||||
CONF_DIR=/etc/security/keytabs
|
CONF_DIR=/etc/security/keytabs
|
||||||
KERBEROS_KEYTABS=dn om scm HTTP testuser
|
KERBEROS_KEYTABS=dn om scm HTTP testuser
|
||||||
KERBEROS_KEYSTORES=hadoop
|
KERBEROS_KEYSTORES=hadoop
|
||||||
KERBEROS_SERVER=ozone.kdc
|
KERBEROS_SERVER=kdc
|
||||||
JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64/
|
JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64/
|
||||||
JSVC_HOME=/usr/bin
|
JSVC_HOME=/usr/bin
|
||||||
SLEEP_SECONDS=10
|
SLEEP_SECONDS=5
|
||||||
KERBEROS_ENABLED=true
|
KERBEROS_ENABLED=true
|
|
@ -0,0 +1,39 @@
|
||||||
|
# Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
# or more contributor license agreements. See the NOTICE file
|
||||||
|
# distributed with this work for additional information
|
||||||
|
# regarding copyright ownership. The ASF licenses this file
|
||||||
|
# to you under the Apache License, Version 2.0 (the
|
||||||
|
# "License"); you may not use this file except in compliance
|
||||||
|
# with the License. You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License lsfor the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
FROM openjdk:8-jdk
|
||||||
|
RUN apt-get update && apt-get install -y jq curl python sudo && apt-get clean
|
||||||
|
|
||||||
|
RUN wget -O /usr/local/bin/dumb-init https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64
|
||||||
|
RUN chmod +x /usr/local/bin/dumb-init
|
||||||
|
RUN mkdir -p /etc/security/keytabs && chmod -R a+wr /etc/security/keytabs
|
||||||
|
ENV JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64/
|
||||||
|
ENV PATH $PATH:/opt/hadoop/bin
|
||||||
|
|
||||||
|
RUN apt-get install -y jsvc
|
||||||
|
ENV JSVC_HOME=/usr/bin
|
||||||
|
ADD scripts/krb5.conf /etc/
|
||||||
|
RUN apt-get install -y krb5-user
|
||||||
|
|
||||||
|
RUN addgroup --gid 1000 hadoop
|
||||||
|
RUN adduser --disabled-password --gecos "" --uid 1000 hadoop --gid 1000 --home /opt/hadoop
|
||||||
|
RUN echo "hadoop ALL=(ALL) NOPASSWD: ALL" >> /etc/sudoers
|
||||||
|
ADD scripts /opt/
|
||||||
|
|
||||||
|
WORKDIR /opt/hadoop
|
||||||
|
|
||||||
|
VOLUME /data
|
||||||
|
ENTRYPOINT ["/usr/local/bin/dumb-init", "--", "/opt/starter.sh"]
|
|
@ -0,0 +1,26 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
# contributor license agreements. See the NOTICE file distributed with
|
||||||
|
# this work for additional information regarding copyright ownership.
|
||||||
|
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
# (the "License"); you may not use this file except in compliance with
|
||||||
|
# the License. You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||||
|
set -e
|
||||||
|
mkdir -p build
|
||||||
|
if [ ! -d "$DIR/build/apache-rat-0.12" ]; then
|
||||||
|
wget "http://xenia.sote.hu/ftp/mirrors/www.apache.org/creadur/apache-rat-0.12/apache-rat-0.12-bin.tar.gz -O $DIR/build/apache-rat.tar.gz"
|
||||||
|
cd "$DIR"/build || exit
|
||||||
|
tar zvxf apache-rat.tar.gz
|
||||||
|
fi
|
||||||
|
java -jar "$DIR"/build/apache-rat-0.12/apache-rat-0.12.jar "$DIR" -e public -e apache-rat-0.12 -e .git -e .gitignore
|
||||||
|
docker build -t ahadoop/runner .
|
|
@ -0,0 +1,115 @@
|
||||||
|
#!/usr/bin/python
|
||||||
|
#
|
||||||
|
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
# contributor license agreements. See the NOTICE file distributed with
|
||||||
|
# this work for additional information regarding copyright ownership.
|
||||||
|
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
# (the "License"); you may not use this file except in compliance with
|
||||||
|
# the License. You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
#
|
||||||
|
|
||||||
|
"""convert environment variables to config"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import transformation
|
||||||
|
|
||||||
|
class Simple(object):
|
||||||
|
"""Simple conversion"""
|
||||||
|
def __init__(self, args):
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
parser.add_argument("--destination", help="Destination directory", required=True)
|
||||||
|
self.args = parser.parse_args(args=args)
|
||||||
|
# copy the default files to file.raw in destination directory
|
||||||
|
|
||||||
|
self.known_formats = ['xml', 'properties', 'yaml', 'yml', 'env', "sh", "cfg", 'conf']
|
||||||
|
self.output_dir = self.args.destination
|
||||||
|
|
||||||
|
self.configurables = {}
|
||||||
|
|
||||||
|
def destination_file_path(self, name, extension):
|
||||||
|
"""destination file path"""
|
||||||
|
return os.path.join(self.output_dir, "{}.{}".format(name, extension))
|
||||||
|
|
||||||
|
def write_env_var(self, name, extension, key, value):
|
||||||
|
"""Write environment variables"""
|
||||||
|
with open(self.destination_file_path(name, extension) + ".raw", "a") as myfile:
|
||||||
|
myfile.write("{}: {}\n".format(key, value))
|
||||||
|
|
||||||
|
def process_envs(self):
|
||||||
|
"""Process environment variables"""
|
||||||
|
for key in os.environ.keys():
|
||||||
|
pattern = re.compile("[_\\.]")
|
||||||
|
parts = pattern.split(key)
|
||||||
|
extension = None
|
||||||
|
name = parts[0].lower()
|
||||||
|
if len(parts) > 1:
|
||||||
|
extension = parts[1].lower()
|
||||||
|
config_key = key[len(name) + len(extension) + 2:].strip()
|
||||||
|
if extension and "!" in extension:
|
||||||
|
splitted = extension.split("!")
|
||||||
|
extension = splitted[0]
|
||||||
|
fmt = splitted[1]
|
||||||
|
config_key = key[len(name) + len(extension) + len(fmt) + 3:].strip()
|
||||||
|
else:
|
||||||
|
fmt = extension
|
||||||
|
|
||||||
|
if extension and extension in self.known_formats:
|
||||||
|
if name not in self.configurables.keys():
|
||||||
|
with open(self.destination_file_path(name, extension) + ".raw", "w") as myfile:
|
||||||
|
myfile.write("")
|
||||||
|
self.configurables[name] = (extension, fmt)
|
||||||
|
self.write_env_var(name, extension, config_key, os.environ[key])
|
||||||
|
else:
|
||||||
|
for configurable_name in self.configurables:
|
||||||
|
if key.lower().startswith(configurable_name.lower()):
|
||||||
|
self.write_env_var(configurable_name,
|
||||||
|
self.configurables[configurable_name],
|
||||||
|
key[len(configurable_name) + 1:],
|
||||||
|
os.environ[key])
|
||||||
|
|
||||||
|
def transform(self):
|
||||||
|
"""transform"""
|
||||||
|
for configurable_name in self.configurables:
|
||||||
|
name = configurable_name
|
||||||
|
extension, fmt = self.configurables[name]
|
||||||
|
|
||||||
|
destination_path = self.destination_file_path(name, extension)
|
||||||
|
|
||||||
|
with open(destination_path + ".raw", "r") as myfile:
|
||||||
|
content = myfile.read()
|
||||||
|
transformer_func = getattr(transformation, "to_" + fmt)
|
||||||
|
content = transformer_func(content)
|
||||||
|
with open(destination_path, "w") as myfile:
|
||||||
|
myfile.write(content)
|
||||||
|
|
||||||
|
def main(self):
|
||||||
|
"""main"""
|
||||||
|
|
||||||
|
# add the
|
||||||
|
self.process_envs()
|
||||||
|
|
||||||
|
# copy file.ext.raw to file.ext in the destination directory, and
|
||||||
|
# transform to the right format (eg. key: value ===> XML)
|
||||||
|
self.transform()
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
"""main"""
|
||||||
|
Simple(sys.argv[1:]).main()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
Simple(sys.argv[1:]).main()
|
|
@ -0,0 +1,38 @@
|
||||||
|
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
# contributor license agreements. See the NOTICE file distributed with
|
||||||
|
# this work for additional information regarding copyright ownership.
|
||||||
|
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
# (the "License"); you may not use this file except in compliance with
|
||||||
|
# the License. You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
[logging]
|
||||||
|
default = FILE:/var/log/krb5libs.log
|
||||||
|
kdc = FILE:/var/log/krb5kdc.log
|
||||||
|
admin_server = FILE:/var/log/kadmind.log
|
||||||
|
|
||||||
|
[libdefaults]
|
||||||
|
dns_canonicalize_hostname = false
|
||||||
|
dns_lookup_realm = false
|
||||||
|
ticket_lifetime = 24h
|
||||||
|
renew_lifetime = 7d
|
||||||
|
forwardable = true
|
||||||
|
rdns = false
|
||||||
|
default_realm = EXAMPLE.COM
|
||||||
|
|
||||||
|
[realms]
|
||||||
|
EXAMPLE.COM = {
|
||||||
|
kdc = SERVER
|
||||||
|
admin_server = SERVER
|
||||||
|
}
|
||||||
|
|
||||||
|
[domain_realm]
|
||||||
|
.example.com = EXAMPLE.COM
|
||||||
|
example.com = EXAMPLE.COM
|
|
@ -0,0 +1,100 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
##
|
||||||
|
# Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
# or more contributor license agreements. See the NOTICE file
|
||||||
|
# distributed with this work for additional information
|
||||||
|
# regarding copyright ownership. The ASF licenses this file
|
||||||
|
# to you under the Apache License, Version 2.0 (the
|
||||||
|
# "License"); you may not use this file except in compliance
|
||||||
|
# with the License. You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
##
|
||||||
|
set -e
|
||||||
|
|
||||||
|
|
||||||
|
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||||
|
echo "Setting up enviorment!!"
|
||||||
|
|
||||||
|
if [ -n "$KERBEROS_ENABLED" ]; then
|
||||||
|
echo "Setting up kerberos!!"
|
||||||
|
KERBEROS_SERVER=${KERBEROS_SERVER:-krb5}
|
||||||
|
ISSUER_SERVER=${ISSUER_SERVER:-$KERBEROS_SERVER\:8081}
|
||||||
|
|
||||||
|
echo "KDC ISSUER_SERVER => $ISSUER_SERVER"
|
||||||
|
|
||||||
|
if [ -n "$SLEEP_SECONDS" ]; then
|
||||||
|
echo "Sleeping for $(SLEEP_SECONDS) seconds"
|
||||||
|
sleep "$SLEEP_SECONDS"
|
||||||
|
fi
|
||||||
|
|
||||||
|
|
||||||
|
while true
|
||||||
|
do
|
||||||
|
STATUS=$(curl -s -o /dev/null -w '%{http_code}' http://"$ISSUER_SERVER"/keytab/test/test)
|
||||||
|
if [ "$STATUS" -eq 200 ]; then
|
||||||
|
echo "Got 200, KDC service ready!!"
|
||||||
|
break
|
||||||
|
else
|
||||||
|
echo "Got $STATUS :( KDC service not ready yet..."
|
||||||
|
fi
|
||||||
|
sleep 5
|
||||||
|
done
|
||||||
|
|
||||||
|
HOST_NAME=$(hostname -f)
|
||||||
|
export HOST_NAME
|
||||||
|
for NAME in ${KERBEROS_KEYTABS}; do
|
||||||
|
echo "Download $NAME/$HOSTNAME@EXAMPLE.COM keytab file to $CONF_DIR/$NAME.keytab"
|
||||||
|
wget "http://$ISSUER_SERVER/keytab/$HOST_NAME/$NAME" -O "$CONF_DIR/$NAME.keytab"
|
||||||
|
klist -kt "$CONF_DIR/$NAME.keytab"
|
||||||
|
KERBEROS_ENABLED=true
|
||||||
|
done
|
||||||
|
|
||||||
|
sed "s/SERVER/$KERBEROS_SERVER/g" "$DIR"/krb5.conf | sudo tee /etc/krb5.conf
|
||||||
|
fi
|
||||||
|
|
||||||
|
#To avoid docker volume permission problems
|
||||||
|
sudo chmod o+rwx /data
|
||||||
|
|
||||||
|
"$DIR"/envtoconf.py --destination /opt/hadoop/etc/hadoop
|
||||||
|
|
||||||
|
if [ -n "$ENSURE_NAMENODE_DIR" ]; then
|
||||||
|
CLUSTERID_OPTS=""
|
||||||
|
if [ -n "$ENSURE_NAMENODE_CLUSTERID" ]; then
|
||||||
|
CLUSTERID_OPTS="-clusterid $ENSURE_NAMENODE_CLUSTERID"
|
||||||
|
fi
|
||||||
|
if [ ! -d "$ENSURE_NAMENODE_DIR" ]; then
|
||||||
|
/opt/hadoop/bin/hdfs namenode -format -force "$CLUSTERID_OPTS"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -n "$ENSURE_STANDBY_NAMENODE_DIR" ]; then
|
||||||
|
if [ ! -d "$ENSURE_STANDBY_NAMENODE_DIR" ]; then
|
||||||
|
/opt/hadoop/bin/hdfs namenode -bootstrapStandby
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -n "$ENSURE_SCM_INITIALIZED" ]; then
|
||||||
|
if [ ! -f "$ENSURE_SCM_INITIALIZED" ]; then
|
||||||
|
/opt/hadoop/bin/ozone scm -init
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -n "$ENSURE_OM_INITIALIZED" ]; then
|
||||||
|
if [ ! -f "$ENSURE_OM_INITIALIZED" ]; then
|
||||||
|
#To make sure SCM is running in dockerized environment we will sleep
|
||||||
|
# Could be removed after HDFS-13203
|
||||||
|
echo "Waiting 15 seconds for SCM startup"
|
||||||
|
sleep 15
|
||||||
|
/opt/hadoop/bin/ozone om -createObjectStore
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo 'setup finished'
|
||||||
|
"$@"
|
|
@ -0,0 +1,150 @@
|
||||||
|
#!/usr/bin/python
|
||||||
|
#
|
||||||
|
# Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
# or more contributor license agreements. See the NOTICE file
|
||||||
|
# distributed with this work for additional information
|
||||||
|
# regarding copyright ownership. The ASF licenses this file
|
||||||
|
# to you under the Apache License, Version 2.0 (the
|
||||||
|
# "License"); you may not use this file except in compliance
|
||||||
|
# with the License. You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
#
|
||||||
|
|
||||||
|
"""This module transform properties into different format"""
|
||||||
|
def render_yaml(yaml_root, prefix=""):
|
||||||
|
"""render yaml"""
|
||||||
|
result = ""
|
||||||
|
if isinstance(yaml_root, dict):
|
||||||
|
if prefix:
|
||||||
|
result += "\n"
|
||||||
|
for key in yaml_root:
|
||||||
|
result += "{}{}: {}".format(prefix, key, render_yaml(
|
||||||
|
yaml_root[key], prefix + " "))
|
||||||
|
elif isinstance(yaml_root, list):
|
||||||
|
result += "\n"
|
||||||
|
for item in yaml_root:
|
||||||
|
result += prefix + " - " + render_yaml(item, prefix + " ")
|
||||||
|
else:
|
||||||
|
result += "{}\n".format(yaml_root)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def to_yaml(content):
|
||||||
|
"""transform to yaml"""
|
||||||
|
props = process_properties(content)
|
||||||
|
|
||||||
|
keys = props.keys()
|
||||||
|
yaml_props = {}
|
||||||
|
for key in keys:
|
||||||
|
parts = key.split(".")
|
||||||
|
node = yaml_props
|
||||||
|
prev_part = None
|
||||||
|
parent_node = {}
|
||||||
|
for part in parts[:-1]:
|
||||||
|
if part.isdigit():
|
||||||
|
if isinstance(node, dict):
|
||||||
|
parent_node[prev_part] = []
|
||||||
|
node = parent_node[prev_part]
|
||||||
|
while len(node) <= int(part):
|
||||||
|
node.append({})
|
||||||
|
parent_node = node
|
||||||
|
node = node[int(node)]
|
||||||
|
else:
|
||||||
|
if part not in node:
|
||||||
|
node[part] = {}
|
||||||
|
parent_node = node
|
||||||
|
node = node[part]
|
||||||
|
prev_part = part
|
||||||
|
if parts[-1].isdigit():
|
||||||
|
if isinstance(node, dict):
|
||||||
|
parent_node[prev_part] = []
|
||||||
|
node = parent_node[prev_part]
|
||||||
|
node.append(props[key])
|
||||||
|
else:
|
||||||
|
node[parts[-1]] = props[key]
|
||||||
|
|
||||||
|
return render_yaml(yaml_props)
|
||||||
|
|
||||||
|
|
||||||
|
def to_yml(content):
|
||||||
|
"""transform to yml"""
|
||||||
|
return to_yaml(content)
|
||||||
|
|
||||||
|
|
||||||
|
def to_properties(content):
|
||||||
|
"""transform to properties"""
|
||||||
|
result = ""
|
||||||
|
props = process_properties(content)
|
||||||
|
for key, val in props.items():
|
||||||
|
result += "{}: {}\n".format(key, val)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def to_env(content):
|
||||||
|
"""transform to environment variables"""
|
||||||
|
result = ""
|
||||||
|
props = process_properties(content)
|
||||||
|
for key, val in props:
|
||||||
|
result += "{}={}\n".format(key, val)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def to_sh(content):
|
||||||
|
"""transform to shell"""
|
||||||
|
result = ""
|
||||||
|
props = process_properties(content)
|
||||||
|
for key, val in props:
|
||||||
|
result += "export {}=\"{}\"\n".format(key, val)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def to_cfg(content):
|
||||||
|
"""transform to config"""
|
||||||
|
result = ""
|
||||||
|
props = process_properties(content)
|
||||||
|
for key, val in props:
|
||||||
|
result += "{}={}\n".format(key, val)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def to_conf(content):
|
||||||
|
"""transform to configuration"""
|
||||||
|
result = ""
|
||||||
|
props = process_properties(content)
|
||||||
|
for key, val in props:
|
||||||
|
result += "export {}={}\n".format(key, val)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def to_xml(content):
|
||||||
|
"""transform to xml"""
|
||||||
|
result = "<configuration>\n"
|
||||||
|
props = process_properties(content)
|
||||||
|
for key in props:
|
||||||
|
result += "<property><name>{0}</name><value>{1}</value></property>\n". \
|
||||||
|
format(key, props[key])
|
||||||
|
result += "</configuration>"
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def process_properties(content, sep=': ', comment_char='#'):
|
||||||
|
"""
|
||||||
|
Read the file passed as parameter as a properties file.
|
||||||
|
"""
|
||||||
|
props = {}
|
||||||
|
for line in content.split("\n"):
|
||||||
|
sline = line.strip()
|
||||||
|
if sline and not sline.startswith(comment_char):
|
||||||
|
key_value = sline.split(sep)
|
||||||
|
key = key_value[0].strip()
|
||||||
|
value = sep.join(key_value[1:]).strip().strip('"')
|
||||||
|
props[key] = value
|
||||||
|
|
||||||
|
return props
|
|
@ -118,6 +118,12 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
<artifactId>bcprov-jdk15on</artifactId>
|
<artifactId>bcprov-jdk15on</artifactId>
|
||||||
<version>1.54</version>
|
<version>1.54</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.bouncycastle</groupId>
|
||||||
|
<artifactId>bcpkix-jdk15on</artifactId>
|
||||||
|
<version>1.54</version>
|
||||||
|
</dependency>
|
||||||
</dependencies>
|
</dependencies>
|
||||||
|
|
||||||
<build>
|
<build>
|
||||||
|
|
|
@ -327,11 +327,11 @@ public final class ScmConfigKeys {
|
||||||
"10m";
|
"10m";
|
||||||
|
|
||||||
public static final String
|
public static final String
|
||||||
HDDS_SCM_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY =
|
HDDS_SCM_HTTP_KERBEROS_PRINCIPAL_KEY =
|
||||||
"hdds.scm.web.authentication.kerberos.principal";
|
"hdds.scm.http.kerberos.principal";
|
||||||
public static final String
|
public static final String
|
||||||
HDDS_SCM_WEB_AUTHENTICATION_KERBEROS_KEYTAB_FILE_KEY =
|
HDDS_SCM_HTTP_KERBEROS_KEYTAB_FILE_KEY =
|
||||||
"hdds.scm.web.authentication.kerberos.keytab";
|
"hdds.scm.http.kerberos.keytab";
|
||||||
/**
|
/**
|
||||||
* Never constructed.
|
* Never constructed.
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -48,7 +48,7 @@ import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_X509_MAX_DURATION;
|
||||||
import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_X509_MAX_DURATION_DEFAULT;
|
import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_X509_MAX_DURATION_DEFAULT;
|
||||||
import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_X509_SIGNATURE_ALGO;
|
import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_X509_SIGNATURE_ALGO;
|
||||||
import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_X509_SIGNATURE_ALGO_DEFAULT;
|
import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_X509_SIGNATURE_ALGO_DEFAULT;
|
||||||
import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_METADATA_DIRS;
|
import static org.apache.hadoop.hdds.HddsConfigKeys.OZONE_METADATA_DIRS;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A class that deals with all Security related configs in HDDS.
|
* A class that deals with all Security related configs in HDDS.
|
||||||
|
|
|
@ -329,9 +329,6 @@ public final class OzoneConfigKeys {
|
||||||
DFS_RATIS_SERVER_FAILURE_DURATION_DEFAULT =
|
DFS_RATIS_SERVER_FAILURE_DURATION_DEFAULT =
|
||||||
ScmConfigKeys.DFS_RATIS_SERVER_FAILURE_DURATION_DEFAULT;
|
ScmConfigKeys.DFS_RATIS_SERVER_FAILURE_DURATION_DEFAULT;
|
||||||
|
|
||||||
public static final String OZONE_SCM_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL =
|
|
||||||
"ozone.web.authentication.kerberos.principal";
|
|
||||||
|
|
||||||
public static final String HDDS_DATANODE_PLUGINS_KEY =
|
public static final String HDDS_DATANODE_PLUGINS_KEY =
|
||||||
"hdds.datanode.plugins";
|
"hdds.datanode.plugins";
|
||||||
|
|
||||||
|
|
|
@ -1629,11 +1629,11 @@
|
||||||
</property>
|
</property>
|
||||||
|
|
||||||
<property>
|
<property>
|
||||||
<name>hdds.scm.web.authentication.kerberos.principal</name>
|
<name>hdds.scm.http.kerberos.principal</name>
|
||||||
<value>HTTP/_HOST@EXAMPLE.COM</value>
|
<value>HTTP/_HOST@EXAMPLE.COM</value>
|
||||||
</property>
|
</property>
|
||||||
<property>
|
<property>
|
||||||
<name>hdds.scm.web.authentication.kerberos.keytab</name>
|
<name>hdds.scm.http.kerberos.keytab</name>
|
||||||
<value>/etc/security/keytabs/HTTP.keytab</value>
|
<value>/etc/security/keytabs/HTTP.keytab</value>
|
||||||
</property>
|
</property>
|
||||||
|
|
||||||
|
@ -1645,7 +1645,7 @@
|
||||||
</description>
|
</description>
|
||||||
</property>
|
</property>
|
||||||
<property>
|
<property>
|
||||||
<name>ozone.om.http.kerberos.keytab.file</name>
|
<name>ozone.om.http.kerberos.keytab</name>
|
||||||
<value>/etc/security/keytabs/HTTP.keytab</value>
|
<value>/etc/security/keytabs/HTTP.keytab</value>
|
||||||
<description>
|
<description>
|
||||||
OzoneManager http server kerberos keytab.
|
OzoneManager http server kerberos keytab.
|
||||||
|
|
|
@ -46,7 +46,7 @@ import java.time.Instant;
|
||||||
import java.util.Date;
|
import java.util.Date;
|
||||||
import java.util.UUID;
|
import java.util.UUID;
|
||||||
|
|
||||||
import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_METADATA_DIRS;
|
import static org.apache.hadoop.hdds.HddsConfigKeys.OZONE_METADATA_DIRS;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Test Class for Root Certificate generation.
|
* Test Class for Root Certificate generation.
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.apache.hadoop.hdds.security.x509.keys;
|
package org.apache.hadoop.hdds.security.x509.keys;
|
||||||
|
|
||||||
import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_METADATA_DIRS;
|
import static org.apache.hadoop.hdds.HddsConfigKeys.OZONE_METADATA_DIRS;
|
||||||
import java.security.KeyPair;
|
import java.security.KeyPair;
|
||||||
import java.security.NoSuchAlgorithmException;
|
import java.security.NoSuchAlgorithmException;
|
||||||
import java.security.NoSuchProviderException;
|
import java.security.NoSuchProviderException;
|
||||||
|
|
|
@ -30,6 +30,10 @@ import org.apache.hadoop.hdds.scm.ScmConfigKeys;
|
||||||
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
|
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
|
||||||
import org.apache.hadoop.ozone.container.common.helpers.ContainerUtils;
|
import org.apache.hadoop.ozone.container.common.helpers.ContainerUtils;
|
||||||
import org.apache.hadoop.ozone.container.common.statemachine.DatanodeStateMachine;
|
import org.apache.hadoop.ozone.container.common.statemachine.DatanodeStateMachine;
|
||||||
|
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
||||||
|
import org.apache.hadoop.security.SecurityUtil;
|
||||||
|
import org.apache.hadoop.security.UserGroupInformation;
|
||||||
|
import org.apache.hadoop.security.authentication.client.AuthenticationException;
|
||||||
import org.apache.hadoop.util.ServicePlugin;
|
import org.apache.hadoop.util.ServicePlugin;
|
||||||
import org.apache.hadoop.util.StringUtils;
|
import org.apache.hadoop.util.StringUtils;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
|
@ -147,12 +151,38 @@ public class HddsDatanodeService extends GenericCli implements ServicePlugin {
|
||||||
datanodeDetails = initializeDatanodeDetails();
|
datanodeDetails = initializeDatanodeDetails();
|
||||||
datanodeDetails.setHostName(hostname);
|
datanodeDetails.setHostName(hostname);
|
||||||
datanodeDetails.setIpAddress(ip);
|
datanodeDetails.setIpAddress(ip);
|
||||||
|
LOG.info("HddsDatanodeService host:{} ip:{}", hostname, ip);
|
||||||
|
// Authenticate Hdds Datanode service if security is enabled
|
||||||
|
if (conf.getBoolean(OzoneConfigKeys.OZONE_SECURITY_ENABLED_KEY,
|
||||||
|
true)) {
|
||||||
|
if (SecurityUtil.getAuthenticationMethod(conf).equals(
|
||||||
|
UserGroupInformation.AuthenticationMethod.KERBEROS)) {
|
||||||
|
LOG.debug("Ozone security is enabled. Attempting login for Hdds " +
|
||||||
|
"Datanode user. "
|
||||||
|
+ "Principal: {},keytab: {}", conf.get(
|
||||||
|
DFSConfigKeys.DFS_DATANODE_KERBEROS_PRINCIPAL_KEY),
|
||||||
|
conf.get(DFSConfigKeys.DFS_DATANODE_KEYTAB_FILE_KEY));
|
||||||
|
|
||||||
|
UserGroupInformation.setConfiguration(conf);
|
||||||
|
|
||||||
|
SecurityUtil.login(conf, DFSConfigKeys.DFS_DATANODE_KEYTAB_FILE_KEY,
|
||||||
|
DFSConfigKeys.DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, hostname);
|
||||||
|
} else {
|
||||||
|
throw new AuthenticationException(SecurityUtil.
|
||||||
|
getAuthenticationMethod(conf) + " authentication method not " +
|
||||||
|
"supported. Datanode user" + " login " + "failed.");
|
||||||
|
}
|
||||||
|
LOG.info("Hdds Datanode login successful.");
|
||||||
|
}
|
||||||
datanodeStateMachine = new DatanodeStateMachine(datanodeDetails, conf);
|
datanodeStateMachine = new DatanodeStateMachine(datanodeDetails, conf);
|
||||||
startPlugins();
|
startPlugins();
|
||||||
// Starting HDDS Daemons
|
// Starting HDDS Daemons
|
||||||
datanodeStateMachine.startDaemon();
|
datanodeStateMachine.startDaemon();
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
throw new RuntimeException("Can't start the HDDS datanode plugin", e);
|
throw new RuntimeException("Can't start the HDDS datanode plugin", e);
|
||||||
|
} catch (AuthenticationException ex) {
|
||||||
|
throw new RuntimeException("Fail to authentication when starting" +
|
||||||
|
" HDDS datanode plugin", ex);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -62,11 +62,11 @@ public class StorageContainerManagerHttpServer extends BaseHttpServer {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override protected String getKeytabFile() {
|
@Override protected String getKeytabFile() {
|
||||||
return ScmConfigKeys.HDDS_SCM_WEB_AUTHENTICATION_KERBEROS_KEYTAB_FILE_KEY;
|
return ScmConfigKeys.HDDS_SCM_HTTP_KERBEROS_KEYTAB_FILE_KEY;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override protected String getSpnegoPrincipal() {
|
@Override protected String getSpnegoPrincipal() {
|
||||||
return ScmConfigKeys.HDDS_SCM_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY;
|
return ScmConfigKeys.HDDS_SCM_HTTP_KERBEROS_PRINCIPAL_KEY;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override protected String getEnabledKey() {
|
@Override protected String getEnabledKey() {
|
||||||
|
|
|
@ -180,8 +180,8 @@ public final class OMConfigKeys {
|
||||||
+ "kerberos.keytab.file";
|
+ "kerberos.keytab.file";
|
||||||
public static final String OZONE_OM_KERBEROS_PRINCIPAL_KEY = "ozone.om"
|
public static final String OZONE_OM_KERBEROS_PRINCIPAL_KEY = "ozone.om"
|
||||||
+ ".kerberos.principal";
|
+ ".kerberos.principal";
|
||||||
public static final String OZONE_OM_WEB_AUTHENTICATION_KERBEROS_KEYTAB_FILE =
|
public static final String OZONE_OM_HTTP_KERBEROS_KEYTAB_FILE =
|
||||||
"ozone.om.http.kerberos.keytab.file";
|
"ozone.om.http.kerberos.keytab.file";
|
||||||
public static final String OZONE_OM_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY
|
public static final String OZONE_OM_HTTP_KERBEROS_PRINCIPAL_KEY
|
||||||
= "ozone.om.http.kerberos.principal";
|
= "ozone.om.http.kerberos.principal";
|
||||||
}
|
}
|
||||||
|
|
|
@ -119,9 +119,9 @@ public final class TestSecureOzoneCluster {
|
||||||
conf.get(ScmConfigKeys.HDDS_SCM_KERBEROS_PRINCIPAL_KEY));
|
conf.get(ScmConfigKeys.HDDS_SCM_KERBEROS_PRINCIPAL_KEY));
|
||||||
createPrincipal(spnegoKeytab,
|
createPrincipal(spnegoKeytab,
|
||||||
conf.get(ScmConfigKeys
|
conf.get(ScmConfigKeys
|
||||||
.HDDS_SCM_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY));
|
.HDDS_SCM_HTTP_KERBEROS_PRINCIPAL_KEY));
|
||||||
conf.get(OMConfigKeys
|
conf.get(OMConfigKeys
|
||||||
.OZONE_OM_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY);
|
.OZONE_OM_HTTP_KERBEROS_PRINCIPAL_KEY);
|
||||||
createPrincipal(omKeyTab,
|
createPrincipal(omKeyTab,
|
||||||
conf.get(OMConfigKeys.OZONE_OM_KERBEROS_PRINCIPAL_KEY));
|
conf.get(OMConfigKeys.OZONE_OM_KERBEROS_PRINCIPAL_KEY));
|
||||||
}
|
}
|
||||||
|
@ -155,12 +155,12 @@ public final class TestSecureOzoneCluster {
|
||||||
|
|
||||||
conf.set(ScmConfigKeys.HDDS_SCM_KERBEROS_PRINCIPAL_KEY,
|
conf.set(ScmConfigKeys.HDDS_SCM_KERBEROS_PRINCIPAL_KEY,
|
||||||
"scm/" + host + "@" + realm);
|
"scm/" + host + "@" + realm);
|
||||||
conf.set(ScmConfigKeys.HDDS_SCM_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY,
|
conf.set(ScmConfigKeys.HDDS_SCM_HTTP_KERBEROS_PRINCIPAL_KEY,
|
||||||
"HTTP_SCM/" + host + "@" + realm);
|
"HTTP_SCM/" + host + "@" + realm);
|
||||||
|
|
||||||
conf.set(OMConfigKeys.OZONE_OM_KERBEROS_PRINCIPAL_KEY,
|
conf.set(OMConfigKeys.OZONE_OM_KERBEROS_PRINCIPAL_KEY,
|
||||||
"om/" + host + "@" + realm);
|
"om/" + host + "@" + realm);
|
||||||
conf.set(OMConfigKeys.OZONE_OM_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY,
|
conf.set(OMConfigKeys.OZONE_OM_HTTP_KERBEROS_PRINCIPAL_KEY,
|
||||||
"HTTP_OM/" + host + "@" + realm);
|
"HTTP_OM/" + host + "@" + realm);
|
||||||
|
|
||||||
scmKeytab = new File(workDir, "scm.keytab");
|
scmKeytab = new File(workDir, "scm.keytab");
|
||||||
|
@ -169,7 +169,8 @@ public final class TestSecureOzoneCluster {
|
||||||
|
|
||||||
conf.set(ScmConfigKeys.HDDS_SCM_KERBEROS_KEYTAB_FILE_KEY,
|
conf.set(ScmConfigKeys.HDDS_SCM_KERBEROS_KEYTAB_FILE_KEY,
|
||||||
scmKeytab.getAbsolutePath());
|
scmKeytab.getAbsolutePath());
|
||||||
conf.set(ScmConfigKeys.HDDS_SCM_WEB_AUTHENTICATION_KERBEROS_KEYTAB_FILE_KEY,
|
conf.set(
|
||||||
|
ScmConfigKeys.HDDS_SCM_HTTP_KERBEROS_KEYTAB_FILE_KEY,
|
||||||
spnegoKeytab.getAbsolutePath());
|
spnegoKeytab.getAbsolutePath());
|
||||||
conf.set(OMConfigKeys.OZONE_OM_KERBEROS_KEYTAB_FILE_KEY,
|
conf.set(OMConfigKeys.OZONE_OM_KERBEROS_KEYTAB_FILE_KEY,
|
||||||
omKeyTab.getAbsolutePath());
|
omKeyTab.getAbsolutePath());
|
||||||
|
|
|
@ -64,11 +64,11 @@ public class OzoneManagerHttpServer extends BaseHttpServer {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override protected String getKeytabFile() {
|
@Override protected String getKeytabFile() {
|
||||||
return OMConfigKeys.OZONE_OM_WEB_AUTHENTICATION_KERBEROS_KEYTAB_FILE;
|
return OMConfigKeys.OZONE_OM_HTTP_KERBEROS_KEYTAB_FILE;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override protected String getSpnegoPrincipal() {
|
@Override protected String getSpnegoPrincipal() {
|
||||||
return OMConfigKeys.OZONE_OM_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY;
|
return OMConfigKeys.OZONE_OM_HTTP_KERBEROS_PRINCIPAL_KEY;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override protected String getEnabledKey() {
|
@Override protected String getEnabledKey() {
|
||||||
|
|
Loading…
Reference in New Issue