Skip to content

Commit d9838f2

Browse files
authored
HADOOP-17055. Remove residual code of Ozone (#2039)
1 parent b2200a3 commit d9838f2

File tree

8 files changed

+1
-100
lines changed

8 files changed

+1
-100
lines changed

.gitignore

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -52,15 +52,10 @@ patchprocess/
5252
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-ui/src/main/webapp/package-lock.json
5353
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-ui/src/main/webapp/yarn-error.log
5454

55-
# Ignore files generated by HDDS acceptance tests.
56-
hadoop-ozone/acceptance-test/docker-compose.log
57-
hadoop-ozone/acceptance-test/junit-results.xml
58-
5955
#robotframework outputs
6056
log.html
6157
output.xml
6258
report.html
6359

64-
hadoop-hdds/docs/public
6560

6661
.mvn

dev-support/bin/dist-layout-stitching

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -21,9 +21,6 @@ VERSION=$1
2121
# project.build.directory
2222
BASEDIR=$2
2323

24-
#hdds.version
25-
HDDS_VERSION=$3
26-
2724
function run()
2825
{
2926
declare res

dev-support/docker/Dockerfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -189,7 +189,7 @@ ENV MAVEN_OPTS -Xms256m -Xmx1536m
189189
# YETUS CUT HERE
190190
###
191191

192-
# Hugo static website generator (for new hadoop site and Ozone docs)
192+
# Hugo static website generator for new hadoop site
193193
RUN curl -L -o hugo.deb https://github.com/gohugoio/hugo/releases/download/v0.58.3/hugo_0.58.3_Linux-64bit.deb \
194194
&& dpkg --install hugo.deb \
195195
&& rm hugo.deb

hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -596,11 +596,6 @@ function hadoop_bootstrap
596596
YARN_LIB_JARS_DIR=${YARN_LIB_JARS_DIR:-"share/hadoop/yarn/lib"}
597597
MAPRED_DIR=${MAPRED_DIR:-"share/hadoop/mapreduce"}
598598
MAPRED_LIB_JARS_DIR=${MAPRED_LIB_JARS_DIR:-"share/hadoop/mapreduce/lib"}
599-
HDDS_DIR=${HDDS_DIR:-"share/hadoop/hdds"}
600-
HDDS_LIB_JARS_DIR=${HDDS_LIB_JARS_DIR:-"share/hadoop/hdds/lib"}
601-
OZONE_DIR=${OZONE_DIR:-"share/hadoop/ozone"}
602-
OZONE_LIB_JARS_DIR=${OZONE_LIB_JARS_DIR:-"share/hadoop/ozone/lib"}
603-
OZONEFS_DIR=${OZONEFS_DIR:-"share/hadoop/ozonefs"}
604599

605600
HADOOP_TOOLS_HOME=${HADOOP_TOOLS_HOME:-${HADOOP_HOME}}
606601
HADOOP_TOOLS_DIR=${HADOOP_TOOLS_DIR:-"share/hadoop/tools"}

hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.sh

Lines changed: 0 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -390,15 +390,6 @@ export HADOOP_OS_TYPE=${HADOOP_OS_TYPE:-$(uname -s)}
390390
#
391391
# export HDFS_DFSROUTER_OPTS=""
392392

393-
###
394-
# Ozone Manager specific parameters
395-
###
396-
# Specify the JVM options to be used when starting the Ozone Manager.
397-
# These options will be appended to the options specified as HADOOP_OPTS
398-
# and therefore may override any similar flags set in HADOOP_OPTS
399-
#
400-
# export HDFS_OM_OPTS=""
401-
402393
###
403394
# HDFS StorageContainerManager specific parameters
404395
###

hadoop-common-project/hadoop-common/src/main/conf/log4j.properties

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -282,13 +282,6 @@ log4j.appender.NMAUDIT.MaxBackupIndex=${nm.audit.log.maxbackupindex}
282282
#log4j.appender.nodemanagerrequestlog.Filename=${hadoop.log.dir}/jetty-nodemanager-yyyy_mm_dd.log
283283
#log4j.appender.nodemanagerrequestlog.RetainDays=3
284284

285-
#Http Server request logs for Ozone S3Gateway
286-
log4j.logger.http.requests.s3gateway=INFO,s3gatewayrequestlog
287-
log4j.appender.s3gatewayrequestlog=org.apache.hadoop.http.HttpRequestLogAppender
288-
log4j.appender.s3gatewayrequestlog.Filename=${hadoop.log.dir}/jetty-s3gateway-yyyy_mm_dd.log
289-
log4j.appender.s3gatewayrequestlog.RetainDays=3
290-
291-
292285
# WebHdfs request log on datanodes
293286
# Specify -Ddatanode.webhdfs.logger=INFO,HTTPDRFA on datanode startup to
294287
# direct the log to a separate file.

hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/datanode/datanode.html

Lines changed: 0 additions & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -99,52 +99,6 @@
9999
{/dn.BPServiceActorInfo}
100100
</table>
101101

102-
{#ozone.enabled}
103-
<div class="page-header"><h1>Ozone: SCM Connections</h1></div>
104-
<table class="table">
105-
<thead>
106-
<tr>
107-
<th>SCM Address</th>
108-
<th>Status</th>
109-
<th>Version</th>
110-
<th>Missed count</th>
111-
<th>Last heartbeat</th>
112-
</tr>
113-
</thead>
114-
{#ozone.SCMServers}
115-
<tr>
116-
<td>{addressString}</td>
117-
<td>{state}</td>
118-
<td>{versionNumber}</td>
119-
<td>{missedCount}s</td>
120-
<td>{lastSuccessfulHeartbeat|elapsed|fmt_time}</td>
121-
</tr>
122-
{/ozone.SCMServers}
123-
</table>
124-
125-
<div class="page-header"><h1>Ozone: Storage locations</h1></div>
126-
<table class="table">
127-
<thead>
128-
<tr>
129-
<th>ID</th>
130-
<th>Capacity</th>
131-
<th>Remaining</th>
132-
<th>SCM used</th>
133-
<th>failed</th>
134-
</tr>
135-
</thead>
136-
{#ozone.LocationReport}
137-
<tr>
138-
<td>{id}</td>
139-
<td>{capacity|fmt_bytes}</td>
140-
<td>{remaining|fmt_bytes}</td>
141-
<td>{scmUsed|fmt_bytes}</td>
142-
<td>{failed}</td>
143-
</tr>
144-
{/ozone.LocationReport}
145-
</table>
146-
{/ozone.enabled}
147-
148102
<div class="page-header"><h1>Volume Information</h1></div>
149103
<table class="table">
150104
<thead>

hadoop-hdfs-project/hadoop-hdfs/src/test/resources/log4j.properties

Lines changed: 0 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -48,29 +48,5 @@ log4j.appender.DNMETRICSRFA.layout.ConversionPattern=%d{ISO8601} %m%n
4848
log4j.appender.DNMETRICSRFA.MaxBackupIndex=1
4949
log4j.appender.DNMETRICSRFA.MaxFileSize=64MB
5050

51-
#
52-
# Add a logger for ozone that is separate from the Datanode.
53-
#
54-
log4j.logger.org.apache.hadoop.ozone=INFO,OZONE,FILE
55-
56-
# Do not log into datanode logs. Remove this line to have single log.
57-
log4j.additivity.org.apache.hadoop.ozone=false
58-
59-
# For development purposes, log both to console and log file.
60-
log4j.appender.OZONE=org.apache.log4j.ConsoleAppender
61-
log4j.appender.OZONE.Threshold=ALL
62-
log4j.appender.OZONE.layout=org.apache.log4j.PatternLayout
63-
log4j.appender.OZONE.layout.ConversionPattern=%d{ISO8601} [%t] %-5p %c{2} (%F:%M(%L)) \
64-
%X{component} %X{function} %X{resource} %X{user} %X{request} - %m%n
65-
66-
# Real ozone logger that writes to ozone.log
67-
log4j.appender.FILE=org.apache.log4j.DailyRollingFileAppender
68-
log4j.appender.FILE.File=${hadoop.log.dir}/ozone.log
69-
log4j.appender.FILE.Threshold=debug
70-
log4j.appender.FILE.layout=org.apache.log4j.PatternLayout
71-
log4j.appender.FILE.layout.ConversionPattern=%d{ISO8601} [%t] %-5p \
72-
(%F:%L) %X{function} %X{resource} %X{user} %X{request} - \
73-
%m%n
74-
7551
# Supress KMS error log
7652
log4j.logger.com.sun.jersey.server.wadl.generators.WadlGeneratorJAXBGrammarGenerator=OFF

0 commit comments

Comments
 (0)