HDDS-906. Display the ozone version on SCM/OM web ui instead of Hadoop version.

Contributed by Doroszlai, Attila.
This commit is contained in:
Nanda kumar 2019-01-25 12:16:29 +05:30
parent 8ff9578126
commit 45c4cfe790
8 changed files with 143 additions and 154 deletions

View File

@ -20,86 +20,24 @@ package org.apache.hadoop.utils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.util.ClassUtil;
import org.apache.hadoop.util.ThreadUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
/**
* This class returns build information about Hadoop components.
*/
@InterfaceAudience.Public
@InterfaceStability.Stable
public class HddsVersionInfo {
public final class HddsVersionInfo {
private static final Logger LOG = LoggerFactory.getLogger(
HddsVersionInfo.class);
public static final HddsVersionInfo HDDS_VERSION_INFO =
new HddsVersionInfo("hdds");
public static final VersionInfo HDDS_VERSION_INFO =
new VersionInfo("hdds");
private Properties info;
protected HddsVersionInfo(String component) {
info = new Properties();
String versionInfoFile = component + "-version-info.properties";
InputStream is = null;
try {
is = ThreadUtil.getResourceAsStream(
HddsVersionInfo.class.getClassLoader(),
versionInfoFile);
info.load(is);
} catch (IOException ex) {
LoggerFactory.getLogger(getClass()).warn("Could not read '" +
versionInfoFile + "', " + ex.toString(), ex);
} finally {
IOUtils.closeStream(is);
}
}
protected String getVersion() {
return info.getProperty("version", "Unknown");
}
protected String getRevision() {
return info.getProperty("revision", "Unknown");
}
protected String getBranch() {
return info.getProperty("branch", "Unknown");
}
protected String getDate() {
return info.getProperty("date", "Unknown");
}
protected String getUser() {
return info.getProperty("user", "Unknown");
}
protected String getUrl() {
return info.getProperty("url", "Unknown");
}
protected String getSrcChecksum() {
return info.getProperty("srcChecksum", "Unknown");
}
public String getBuildVersion() {
return HDDS_VERSION_INFO.getVersion() +
" from " + HDDS_VERSION_INFO.getRevision() +
" by " + getUser() +
" source checksum " + getSrcChecksum();
}
protected String getProtocVersion() {
return info.getProperty("protocVersion", "Unknown");
}
private HddsVersionInfo() {}
public static void main(String[] args) {
System.out.println("Using HDDS " + HDDS_VERSION_INFO.getVersion());

View File

@ -0,0 +1,97 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.utils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.util.ThreadUtil;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
/**
* This class returns build information about Hadoop components.
*/
@InterfaceAudience.Public
@InterfaceStability.Stable
public class VersionInfo {
private final Properties info = new Properties();
public VersionInfo(String component) {
String versionInfoFile = component + "-version-info.properties";
InputStream is = null;
try {
is = ThreadUtil.getResourceAsStream(
getClass().getClassLoader(),
versionInfoFile);
info.load(is);
} catch (IOException ex) {
LoggerFactory.getLogger(getClass()).warn("Could not read '" +
versionInfoFile + "', " + ex.toString(), ex);
} finally {
IOUtils.closeStream(is);
}
}
public String getRelease() {
return info.getProperty("release", "Unknown");
}
public String getVersion() {
return info.getProperty("version", "Unknown");
}
public String getRevision() {
return info.getProperty("revision", "Unknown");
}
public String getBranch() {
return info.getProperty("branch", "Unknown");
}
public String getDate() {
return info.getProperty("date", "Unknown");
}
public String getUser() {
return info.getProperty("user", "Unknown");
}
public String getUrl() {
return info.getProperty("url", "Unknown");
}
public String getSrcChecksum() {
return info.getProperty("srcChecksum", "Unknown");
}
public String getProtocVersion() {
return info.getProperty("protocVersion", "Unknown");
}
public String getBuildVersion() {
return getVersion() +
" from " + getRevision() +
" by " + getUser() +
" source checksum " + getSrcChecksum();
}
}

View File

@ -17,7 +17,7 @@
package org.apache.hadoop.hdds.server;
import org.apache.hadoop.util.VersionInfo;
import org.apache.hadoop.utils.VersionInfo;
/**
* Helper base class to report the standard version and runtime information.
@ -26,21 +26,26 @@ import org.apache.hadoop.util.VersionInfo;
public class ServiceRuntimeInfoImpl implements ServiceRuntimeInfo {
private long startedTimeInMillis;
private final VersionInfo versionInfo;
protected ServiceRuntimeInfoImpl(VersionInfo versionInfo) {
this.versionInfo = versionInfo;
}
@Override
public String getVersion() {
return VersionInfo.getVersion() + ", r" + VersionInfo.getRevision();
return versionInfo.getVersion() + ", r" + versionInfo.getRevision();
}
@Override
public String getSoftwareVersion() {
return VersionInfo.getVersion();
return versionInfo.getVersion();
}
@Override
public String getCompileInfo() {
return VersionInfo.getDate() + " by " + VersionInfo.getUser() + " from "
+ VersionInfo.getBranch();
return versionInfo.getDate() + " by " + versionInfo.getUser() + " from "
+ versionInfo.getBranch();
}
@Override

View File

@ -44,20 +44,17 @@ import org.apache.hadoop.hdds.scm.command.CommandStatusReportHandler;
import org.apache.hadoop.hdds.scm.container.CloseContainerEventHandler;
import org.apache.hadoop.hdds.scm.container.ContainerActionsHandler;
import org.apache.hadoop.hdds.scm.container.ContainerID;
import org.apache.hadoop.hdds.scm.container.ContainerInfo;
import org.apache.hadoop.hdds.scm.container.ContainerManager;
import org.apache.hadoop.hdds.scm.container.ContainerReportHandler;
import org.apache.hadoop.hdds.scm.container.IncrementalContainerReportHandler;
import org.apache.hadoop.hdds.scm.container.SCMContainerManager;
import org.apache.hadoop.hdds.scm.container.ContainerReportHandler;
import org.apache.hadoop.hdds.scm.container.replication
.ReplicationActivityStatus;
import org.apache.hadoop.hdds.scm.container.replication.ReplicationManager;
import org.apache.hadoop.hdds.scm.container.ContainerInfo;
import org.apache.hadoop.hdds.scm.container.placement.algorithms
.ContainerPlacementPolicy;
import org.apache.hadoop.hdds.scm.container.placement.algorithms
.SCMContainerPlacementCapacity;
import org.apache.hadoop.hdds.scm.container.placement.algorithms.ContainerPlacementPolicy;
import org.apache.hadoop.hdds.scm.container.placement.algorithms.SCMContainerPlacementCapacity;
import org.apache.hadoop.hdds.scm.container.placement.metrics.ContainerStat;
import org.apache.hadoop.hdds.scm.container.placement.metrics.SCMMetrics;
import org.apache.hadoop.hdds.scm.container.replication.ReplicationActivityStatus;
import org.apache.hadoop.hdds.scm.container.replication.ReplicationManager;
import org.apache.hadoop.hdds.scm.events.SCMEvents;
import org.apache.hadoop.hdds.scm.exceptions.SCMException;
import org.apache.hadoop.hdds.scm.exceptions.SCMException.ResultCodes;
@ -95,8 +92,7 @@ import org.apache.hadoop.security.authentication.client.AuthenticationException;
import org.apache.hadoop.util.GenericOptionsParser;
import org.apache.hadoop.util.JvmPauseMonitor;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.utils.HddsVersionInfo;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -110,10 +106,11 @@ import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.TimeUnit;
import static org.apache.hadoop.hdds.scm.ScmConfigKeys.HDDS_SCM_KERBEROS_KEYTAB_FILE_KEY;
import static org.apache.hadoop.hdds.scm.ScmConfigKeys.HDDS_SCM_KERBEROS_PRINCIPAL_KEY;
import static org.apache.hadoop.hdds.scm.ScmConfigKeys.HDDS_SCM_WATCHER_TIMEOUT_DEFAULT;
import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_ENABLED;
import static org.apache.hadoop.hdds.scm.ScmConfigKeys.HDDS_SCM_KERBEROS_PRINCIPAL_KEY;
import static org.apache.hadoop.hdds.scm.ScmConfigKeys.HDDS_SCM_KERBEROS_KEYTAB_FILE_KEY;
import static org.apache.hadoop.util.ExitUtil.terminate;
/**
@ -209,6 +206,7 @@ public final class StorageContainerManager extends ServiceRuntimeInfoImpl
*/
private StorageContainerManager(OzoneConfiguration conf)
throws IOException, AuthenticationException {
super(HddsVersionInfo.HDDS_VERSION_INFO);
configuration = conf;
StorageContainerManager.initMetrics();

View File

@ -20,90 +20,25 @@ package org.apache.hadoop.ozone.util;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.util.ClassUtil;
import org.apache.hadoop.util.ThreadUtil;
import org.apache.hadoop.utils.HddsVersionInfo;
import org.apache.hadoop.utils.VersionInfo;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
/**
* This class returns build information about Hadoop components.
*/
@InterfaceAudience.Public
@InterfaceStability.Stable
public class OzoneVersionInfo {
public final class OzoneVersionInfo {
private static final Logger LOG =
LoggerFactory.getLogger(OzoneVersionInfo.class);
private Properties info;
public static final VersionInfo OZONE_VERSION_INFO =
new VersionInfo("ozone");
protected OzoneVersionInfo(String component) {
info = new Properties();
String versionInfoFile = component + "-version-info.properties";
InputStream is = null;
try {
is = ThreadUtil
.getResourceAsStream(OzoneVersionInfo.class.getClassLoader(),
versionInfoFile);
info.load(is);
} catch (IOException ex) {
LoggerFactory.getLogger(getClass()).warn("Could not read '" +
versionInfoFile + "', " + ex.toString(), ex);
} finally {
IOUtils.closeStream(is);
}
}
protected String getVersion() {
return info.getProperty("version", "Unknown");
}
protected String getRelease() {
return info.getProperty("release", "Unknown");
}
protected String getRevision() {
return info.getProperty("revision", "Unknown");
}
protected String getBranch() {
return info.getProperty("branch", "Unknown");
}
protected String getDate() {
return info.getProperty("date", "Unknown");
}
protected String getUser() {
return info.getProperty("user", "Unknown");
}
protected String getUrl() {
return info.getProperty("url", "Unknown");
}
protected String getSrcChecksum() {
return info.getProperty("srcChecksum", "Unknown");
}
protected String getBuildVersion() {
return getVersion() +
" from " + getRevision() +
" by " + getUser() +
" source checksum " + getSrcChecksum();
}
protected String getProtocVersion() {
return info.getProperty("protocVersion", "Unknown");
}
private static final OzoneVersionInfo OZONE_VERSION_INFO =
new OzoneVersionInfo("ozone");
private OzoneVersionInfo() {}
public static void main(String[] args) {
System.out.println(

View File

@ -62,6 +62,7 @@ import org.apache.hadoop.ozone.protocol.commands.SCMCommand;
import org.apache.hadoop.security.authentication.client.AuthenticationException;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.ExitUtil;
import org.apache.hadoop.utils.HddsVersionInfo;
import org.junit.Assert;
import org.junit.Rule;
@ -474,5 +475,10 @@ public class TestStorageContainerManager {
ScmInfo scmInfo = scm.getClientProtocolServer().getScmInfo();
Assert.assertEquals(clusterId, scmInfo.getClusterId());
Assert.assertEquals(scmId, scmInfo.getScmId());
String expectedVersion = HddsVersionInfo.HDDS_VERSION_INFO.getVersion();
String actualVersion = scm.getSoftwareVersion();
Assert.assertEquals(expectedVersion, actualVersion);
}
}

View File

@ -38,6 +38,7 @@ import org.apache.hadoop.ozone.protocol.proto
.OzoneManagerProtocolProtos.ServicePort;
import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.VolumeList;
import org.apache.hadoop.ozone.util.OzoneVersionInfo;
import org.apache.hadoop.ozone.web.handlers.BucketArgs;
import org.apache.hadoop.ozone.web.handlers.KeyArgs;
import org.apache.hadoop.ozone.web.handlers.UserArgs;
@ -1395,4 +1396,11 @@ public class TestOzoneManager {
LifeCycle.State.RUNNING,
cluster.getOzoneManager().getOmRatisServerState());
}
@Test
public void testVersion() {
String expectedVersion = OzoneVersionInfo.OZONE_VERSION_INFO.getVersion();
String actualVersion = cluster.getOzoneManager().getSoftwareVersion();
Assert.assertEquals(expectedVersion, actualVersion);
}
}

View File

@ -99,6 +99,7 @@ import org.apache.hadoop.ozone.security.acl.OzoneObjInfo;
import org.apache.hadoop.ozone.security.acl.RequestContext;
import org.apache.hadoop.ozone.security.OzoneBlockTokenSecretManager;
import org.apache.hadoop.ozone.security.OzoneDelegationTokenSecretManager;
import org.apache.hadoop.ozone.util.OzoneVersionInfo;
import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
@ -223,6 +224,7 @@ public final class OzoneManager extends ServiceRuntimeInfoImpl
private volatile boolean isOmRpcServerRunning = false;
private OzoneManager(OzoneConfiguration conf) throws IOException {
super(OzoneVersionInfo.OZONE_VERSION_INFO);
Preconditions.checkNotNull(conf);
configuration = conf;
omStorage = new OMStorage(conf);