HADOOP-17055. Remove residual code of Ozone (#2039)

This commit is contained in:
Wanqiang Ji 2020-05-29 15:49:18 +08:00 committed by GitHub
parent b2200a33a6
commit d9838f2d42
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 1 additions and 100 deletions

5
.gitignore vendored
View File

@ -52,15 +52,10 @@ patchprocess/
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-ui/src/main/webapp/package-lock.json hadoop-yarn-project/hadoop-yarn/hadoop-yarn-ui/src/main/webapp/package-lock.json
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-ui/src/main/webapp/yarn-error.log hadoop-yarn-project/hadoop-yarn/hadoop-yarn-ui/src/main/webapp/yarn-error.log
# Ignore files generated by HDDS acceptance tests.
hadoop-ozone/acceptance-test/docker-compose.log
hadoop-ozone/acceptance-test/junit-results.xml
#robotframework outputs #robotframework outputs
log.html log.html
output.xml output.xml
report.html report.html
hadoop-hdds/docs/public
.mvn .mvn

View File

@ -21,9 +21,6 @@ VERSION=$1
# project.build.directory # project.build.directory
BASEDIR=$2 BASEDIR=$2
#hdds.version
HDDS_VERSION=$3
function run() function run()
{ {
declare res declare res

View File

@ -189,7 +189,7 @@ ENV MAVEN_OPTS -Xms256m -Xmx1536m
# YETUS CUT HERE # YETUS CUT HERE
### ###
# Hugo static website generator (for new hadoop site and Ozone docs) # Hugo static website generator for new hadoop site
RUN curl -L -o hugo.deb https://github.com/gohugoio/hugo/releases/download/v0.58.3/hugo_0.58.3_Linux-64bit.deb \ RUN curl -L -o hugo.deb https://github.com/gohugoio/hugo/releases/download/v0.58.3/hugo_0.58.3_Linux-64bit.deb \
&& dpkg --install hugo.deb \ && dpkg --install hugo.deb \
&& rm hugo.deb && rm hugo.deb

View File

@ -596,11 +596,6 @@ function hadoop_bootstrap
YARN_LIB_JARS_DIR=${YARN_LIB_JARS_DIR:-"share/hadoop/yarn/lib"} YARN_LIB_JARS_DIR=${YARN_LIB_JARS_DIR:-"share/hadoop/yarn/lib"}
MAPRED_DIR=${MAPRED_DIR:-"share/hadoop/mapreduce"} MAPRED_DIR=${MAPRED_DIR:-"share/hadoop/mapreduce"}
MAPRED_LIB_JARS_DIR=${MAPRED_LIB_JARS_DIR:-"share/hadoop/mapreduce/lib"} MAPRED_LIB_JARS_DIR=${MAPRED_LIB_JARS_DIR:-"share/hadoop/mapreduce/lib"}
HDDS_DIR=${HDDS_DIR:-"share/hadoop/hdds"}
HDDS_LIB_JARS_DIR=${HDDS_LIB_JARS_DIR:-"share/hadoop/hdds/lib"}
OZONE_DIR=${OZONE_DIR:-"share/hadoop/ozone"}
OZONE_LIB_JARS_DIR=${OZONE_LIB_JARS_DIR:-"share/hadoop/ozone/lib"}
OZONEFS_DIR=${OZONEFS_DIR:-"share/hadoop/ozonefs"}
HADOOP_TOOLS_HOME=${HADOOP_TOOLS_HOME:-${HADOOP_HOME}} HADOOP_TOOLS_HOME=${HADOOP_TOOLS_HOME:-${HADOOP_HOME}}
HADOOP_TOOLS_DIR=${HADOOP_TOOLS_DIR:-"share/hadoop/tools"} HADOOP_TOOLS_DIR=${HADOOP_TOOLS_DIR:-"share/hadoop/tools"}

View File

@ -390,15 +390,6 @@ export HADOOP_OS_TYPE=${HADOOP_OS_TYPE:-$(uname -s)}
# #
# export HDFS_DFSROUTER_OPTS="" # export HDFS_DFSROUTER_OPTS=""
###
# Ozone Manager specific parameters
###
# Specify the JVM options to be used when starting the Ozone Manager.
# These options will be appended to the options specified as HADOOP_OPTS
# and therefore may override any similar flags set in HADOOP_OPTS
#
# export HDFS_OM_OPTS=""
### ###
# HDFS StorageContainerManager specific parameters # HDFS StorageContainerManager specific parameters
### ###

View File

@ -282,13 +282,6 @@ log4j.appender.NMAUDIT.MaxBackupIndex=${nm.audit.log.maxbackupindex}
#log4j.appender.nodemanagerrequestlog.Filename=${hadoop.log.dir}/jetty-nodemanager-yyyy_mm_dd.log #log4j.appender.nodemanagerrequestlog.Filename=${hadoop.log.dir}/jetty-nodemanager-yyyy_mm_dd.log
#log4j.appender.nodemanagerrequestlog.RetainDays=3 #log4j.appender.nodemanagerrequestlog.RetainDays=3
#Http Server request logs for Ozone S3Gateway
log4j.logger.http.requests.s3gateway=INFO,s3gatewayrequestlog
log4j.appender.s3gatewayrequestlog=org.apache.hadoop.http.HttpRequestLogAppender
log4j.appender.s3gatewayrequestlog.Filename=${hadoop.log.dir}/jetty-s3gateway-yyyy_mm_dd.log
log4j.appender.s3gatewayrequestlog.RetainDays=3
# WebHdfs request log on datanodes # WebHdfs request log on datanodes
# Specify -Ddatanode.webhdfs.logger=INFO,HTTPDRFA on datanode startup to # Specify -Ddatanode.webhdfs.logger=INFO,HTTPDRFA on datanode startup to
# direct the log to a separate file. # direct the log to a separate file.

View File

@ -99,52 +99,6 @@
{/dn.BPServiceActorInfo} {/dn.BPServiceActorInfo}
</table> </table>
{#ozone.enabled}
<div class="page-header"><h1>Ozone: SCM Connections</h1></div>
<table class="table">
<thead>
<tr>
<th>SCM Address</th>
<th>Status</th>
<th>Version</th>
<th>Missed count</th>
<th>Last heartbeat</th>
</tr>
</thead>
{#ozone.SCMServers}
<tr>
<td>{addressString}</td>
<td>{state}</td>
<td>{versionNumber}</td>
<td>{missedCount}s</td>
<td>{lastSuccessfulHeartbeat|elapsed|fmt_time}</td>
</tr>
{/ozone.SCMServers}
</table>
<div class="page-header"><h1>Ozone: Storage locations</h1></div>
<table class="table">
<thead>
<tr>
<th>ID</th>
<th>Capacity</th>
<th>Remaining</th>
<th>SCM used</th>
<th>failed</th>
</tr>
</thead>
{#ozone.LocationReport}
<tr>
<td>{id}</td>
<td>{capacity|fmt_bytes}</td>
<td>{remaining|fmt_bytes}</td>
<td>{scmUsed|fmt_bytes}</td>
<td>{failed}</td>
</tr>
{/ozone.LocationReport}
</table>
{/ozone.enabled}
<div class="page-header"><h1>Volume Information</h1></div> <div class="page-header"><h1>Volume Information</h1></div>
<table class="table"> <table class="table">
<thead> <thead>

View File

@ -48,29 +48,5 @@ log4j.appender.DNMETRICSRFA.layout.ConversionPattern=%d{ISO8601} %m%n
log4j.appender.DNMETRICSRFA.MaxBackupIndex=1 log4j.appender.DNMETRICSRFA.MaxBackupIndex=1
log4j.appender.DNMETRICSRFA.MaxFileSize=64MB log4j.appender.DNMETRICSRFA.MaxFileSize=64MB
#
# Add a logger for ozone that is separate from the Datanode.
#
log4j.logger.org.apache.hadoop.ozone=INFO,OZONE,FILE
# Do not log into datanode logs. Remove this line to have single log.
log4j.additivity.org.apache.hadoop.ozone=false
# For development purposes, log both to console and log file.
log4j.appender.OZONE=org.apache.log4j.ConsoleAppender
log4j.appender.OZONE.Threshold=ALL
log4j.appender.OZONE.layout=org.apache.log4j.PatternLayout
log4j.appender.OZONE.layout.ConversionPattern=%d{ISO8601} [%t] %-5p %c{2} (%F:%M(%L)) \
%X{component} %X{function} %X{resource} %X{user} %X{request} - %m%n
# Real ozone logger that writes to ozone.log
log4j.appender.FILE=org.apache.log4j.DailyRollingFileAppender
log4j.appender.FILE.File=${hadoop.log.dir}/ozone.log
log4j.appender.FILE.Threshold=debug
log4j.appender.FILE.layout=org.apache.log4j.PatternLayout
log4j.appender.FILE.layout.ConversionPattern=%d{ISO8601} [%t] %-5p \
(%F:%L) %X{function} %X{resource} %X{user} %X{request} - \
%m%n
# Supress KMS error log # Supress KMS error log
log4j.logger.com.sun.jersey.server.wadl.generators.WadlGeneratorJAXBGrammarGenerator=OFF log4j.logger.com.sun.jersey.server.wadl.generators.WadlGeneratorJAXBGrammarGenerator=OFF