HBASE-13310 Fix high priority findbugs warnings
This commit is contained in:
parent
1428a59caf
commit
6e9ded51fb
|
@ -734,7 +734,7 @@ public class HColumnDescriptor implements Comparable<HColumnDescriptor> {
|
|||
String compressTagsStr = getValue(COMPRESS_TAGS);
|
||||
boolean compressTags = DEFAULT_COMPRESS_TAGS;
|
||||
if (compressTagsStr != null) {
|
||||
compressTags = Boolean.valueOf(compressTagsStr);
|
||||
compressTags = Boolean.parseBoolean(compressTagsStr);
|
||||
}
|
||||
return compressTags;
|
||||
}
|
||||
|
@ -747,7 +747,7 @@ public class HColumnDescriptor implements Comparable<HColumnDescriptor> {
|
|||
String compressTagsStr = getValue(COMPRESS_TAGS);
|
||||
boolean compressTags = DEFAULT_COMPRESS_TAGS;
|
||||
if (compressTagsStr != null) {
|
||||
compressTags = Boolean.valueOf(compressTagsStr);
|
||||
compressTags = Boolean.parseBoolean(compressTagsStr);
|
||||
}
|
||||
return compressTags;
|
||||
}
|
||||
|
@ -778,8 +778,9 @@ public class HColumnDescriptor implements Comparable<HColumnDescriptor> {
|
|||
*/
|
||||
public boolean isInMemory() {
|
||||
String value = getValue(HConstants.IN_MEMORY);
|
||||
if (value != null)
|
||||
return Boolean.valueOf(value).booleanValue();
|
||||
if (value != null) {
|
||||
return Boolean.parseBoolean(value);
|
||||
}
|
||||
return DEFAULT_IN_MEMORY;
|
||||
}
|
||||
|
||||
|
@ -827,7 +828,7 @@ public class HColumnDescriptor implements Comparable<HColumnDescriptor> {
|
|||
*/
|
||||
public int getTimeToLive() {
|
||||
String value = getValue(TTL);
|
||||
return (value != null)? Integer.valueOf(value).intValue(): DEFAULT_TTL;
|
||||
return (value != null)? Integer.parseInt(value) : DEFAULT_TTL;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -843,7 +844,7 @@ public class HColumnDescriptor implements Comparable<HColumnDescriptor> {
|
|||
*/
|
||||
public int getMinVersions() {
|
||||
String value = getValue(MIN_VERSIONS);
|
||||
return (value != null)? Integer.valueOf(value).intValue(): 0;
|
||||
return (value != null)? Integer.parseInt(value) : 0;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -861,8 +862,9 @@ public class HColumnDescriptor implements Comparable<HColumnDescriptor> {
|
|||
*/
|
||||
public boolean isBlockCacheEnabled() {
|
||||
String value = getValue(BLOCKCACHE);
|
||||
if (value != null)
|
||||
return Boolean.valueOf(value).booleanValue();
|
||||
if (value != null) {
|
||||
return Boolean.parseBoolean(value);
|
||||
}
|
||||
return DEFAULT_BLOCKCACHE;
|
||||
}
|
||||
|
||||
|
@ -900,7 +902,7 @@ public class HColumnDescriptor implements Comparable<HColumnDescriptor> {
|
|||
public int getScope() {
|
||||
byte[] value = getValue(REPLICATION_SCOPE_BYTES);
|
||||
if (value != null) {
|
||||
return Integer.valueOf(Bytes.toString(value));
|
||||
return Integer.parseInt(Bytes.toString(value));
|
||||
}
|
||||
return DEFAULT_REPLICATION_SCOPE;
|
||||
}
|
||||
|
@ -966,7 +968,9 @@ public class HColumnDescriptor implements Comparable<HColumnDescriptor> {
|
|||
|
||||
private boolean setAndGetBoolean(final String key, final boolean defaultSetting) {
|
||||
String value = getValue(key);
|
||||
if (value != null) return Boolean.valueOf(value).booleanValue();
|
||||
if (value != null) {
|
||||
return Boolean.parseBoolean(value);
|
||||
}
|
||||
return defaultSetting;
|
||||
}
|
||||
|
||||
|
@ -1201,7 +1205,7 @@ public class HColumnDescriptor implements Comparable<HColumnDescriptor> {
|
|||
@Override
|
||||
public int hashCode() {
|
||||
int result = Bytes.hashCode(this.name);
|
||||
result ^= Byte.valueOf(COLUMN_DESCRIPTOR_VERSION).hashCode();
|
||||
result ^= (int) COLUMN_DESCRIPTOR_VERSION;
|
||||
result ^= values.hashCode();
|
||||
result ^= configuration.hashCode();
|
||||
return result;
|
||||
|
|
|
@ -34,7 +34,7 @@ public class PrettyPrinter {
|
|||
StringBuilder human = new StringBuilder();
|
||||
switch (unit) {
|
||||
case TIME_INTERVAL:
|
||||
human.append(humanReadableTTL(Long.valueOf(value)));
|
||||
human.append(humanReadableTTL(Long.parseLong(value)));
|
||||
break;
|
||||
default:
|
||||
human.append(value);
|
||||
|
|
|
@ -35,6 +35,8 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
|||
import org.apache.hadoop.util.ReflectionUtils;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
|
||||
/**
|
||||
* Thread Utility
|
||||
*/
|
||||
|
@ -266,8 +268,66 @@ public class Threads {
|
|||
t.setUncaughtExceptionHandler(LOGGING_EXCEPTION_HANDLER);
|
||||
}
|
||||
|
||||
private static Method printThreadInfoMethod = null;
|
||||
private static boolean printThreadInfoMethodWithPrintStream = true;
|
||||
private static interface PrintThreadInfoHelper {
|
||||
|
||||
void printThreadInfo(PrintStream stream, String title);
|
||||
|
||||
}
|
||||
|
||||
private static class PrintThreadInfoLazyHolder {
|
||||
|
||||
public static final PrintThreadInfoHelper HELPER = initHelper();
|
||||
|
||||
private static PrintThreadInfoHelper initHelper() {
|
||||
Method method = null;
|
||||
try {
|
||||
// Hadoop 2.7+ declares printThreadInfo(PrintStream, String)
|
||||
method = ReflectionUtils.class.getMethod("printThreadInfo", PrintStream.class,
|
||||
String.class);
|
||||
method.setAccessible(true);
|
||||
final Method hadoop27Method = method;
|
||||
return new PrintThreadInfoHelper() {
|
||||
|
||||
@Override
|
||||
public void printThreadInfo(PrintStream stream, String title) {
|
||||
try {
|
||||
hadoop27Method.invoke(null, stream, title);
|
||||
} catch (IllegalAccessException | IllegalArgumentException e) {
|
||||
throw new RuntimeException(e);
|
||||
} catch (InvocationTargetException e) {
|
||||
throw new RuntimeException(e.getCause());
|
||||
}
|
||||
}
|
||||
};
|
||||
} catch (NoSuchMethodException e) {
|
||||
LOG.info(
|
||||
"Can not find hadoop 2.7+ printThreadInfo method, try hadoop hadoop 2.6 and earlier", e);
|
||||
}
|
||||
try {
|
||||
// Hadoop 2.6 and earlier declares printThreadInfo(PrintWriter, String)
|
||||
method = ReflectionUtils.class.getMethod("printThreadInfo", PrintWriter.class,
|
||||
String.class);
|
||||
method.setAccessible(true);
|
||||
final Method hadoop26Method = method;
|
||||
return new PrintThreadInfoHelper() {
|
||||
|
||||
@Override
|
||||
public void printThreadInfo(PrintStream stream, String title) {
|
||||
try {
|
||||
hadoop26Method.invoke(null, new PrintWriter(stream), title);
|
||||
} catch (IllegalAccessException | IllegalArgumentException e) {
|
||||
throw new RuntimeException(e);
|
||||
} catch (InvocationTargetException e) {
|
||||
throw new RuntimeException(e.getCause());
|
||||
}
|
||||
}
|
||||
};
|
||||
} catch (NoSuchMethodException e) {
|
||||
LOG.warn("Cannot find printThreadInfo method. Check hadoop jars linked", e);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Print all of the thread's information and stack traces. Wrapper around Hadoop's method.
|
||||
|
@ -276,33 +336,7 @@ public class Threads {
|
|||
* @param title a string title for the stack trace
|
||||
*/
|
||||
public static void printThreadInfo(PrintStream stream, String title) {
|
||||
|
||||
if (printThreadInfoMethod == null) {
|
||||
try {
|
||||
// Hadoop 2.7+ declares printThreadInfo(PrintStream, String)
|
||||
printThreadInfoMethod = ReflectionUtils.class.getMethod("printThreadInfo",
|
||||
PrintStream.class, String.class);
|
||||
} catch (NoSuchMethodException e) {
|
||||
// Hadoop 2.6 and earlier declares printThreadInfo(PrintWriter, String)
|
||||
printThreadInfoMethodWithPrintStream = false;
|
||||
try {
|
||||
printThreadInfoMethod = ReflectionUtils.class.getMethod("printThreadInfo",
|
||||
PrintWriter.class, String.class);
|
||||
} catch (NoSuchMethodException e1) {
|
||||
throw new RuntimeException("Cannot find method. Check hadoop jars linked", e1);
|
||||
}
|
||||
}
|
||||
printThreadInfoMethod.setAccessible(true);
|
||||
}
|
||||
|
||||
try {
|
||||
if (printThreadInfoMethodWithPrintStream) {
|
||||
printThreadInfoMethod.invoke(null, stream, title);
|
||||
} else {
|
||||
printThreadInfoMethod.invoke(null, new PrintWriter(stream), title);
|
||||
}
|
||||
} catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException e) {
|
||||
throw new RuntimeException(e.getCause());
|
||||
}
|
||||
Preconditions.checkNotNull(PrintThreadInfoLazyHolder.HELPER,
|
||||
"Cannot find method. Check hadoop jars linked").printThreadInfo(stream, title);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -127,8 +127,7 @@ public class RESTServer implements Constants {
|
|||
// check for user-defined port setting, if so override the conf
|
||||
if (commandLine != null && commandLine.hasOption("port")) {
|
||||
String val = commandLine.getOptionValue("port");
|
||||
servlet.getConfiguration()
|
||||
.setInt("hbase.rest.port", Integer.valueOf(val));
|
||||
servlet.getConfiguration().setInt("hbase.rest.port", Integer.parseInt(val));
|
||||
LOG.debug("port set to " + val);
|
||||
}
|
||||
|
||||
|
@ -141,8 +140,7 @@ public class RESTServer implements Constants {
|
|||
// check for user-defined info server port setting, if so override the conf
|
||||
if (commandLine != null && commandLine.hasOption("infoport")) {
|
||||
String val = commandLine.getOptionValue("infoport");
|
||||
servlet.getConfiguration()
|
||||
.setInt("hbase.rest.info.port", Integer.valueOf(val));
|
||||
servlet.getConfiguration().setInt("hbase.rest.info.port", Integer.parseInt(val));
|
||||
LOG.debug("Web UI port set to " + val);
|
||||
}
|
||||
|
||||
|
|
|
@ -76,7 +76,7 @@ public class RowResource extends ResourceBase {
|
|||
this.tableResource = tableResource;
|
||||
this.rowspec = new RowSpec(rowspec);
|
||||
if (versions != null) {
|
||||
this.rowspec.setMaxVersions(Integer.valueOf(versions));
|
||||
this.rowspec.setMaxVersions(Integer.parseInt(versions));
|
||||
}
|
||||
this.check = check;
|
||||
}
|
||||
|
@ -271,7 +271,7 @@ public class RowResource extends ResourceBase {
|
|||
}
|
||||
vals = headers.getRequestHeader("X-Timestamp");
|
||||
if (vals != null && !vals.isEmpty()) {
|
||||
timestamp = Long.valueOf(vals.get(0));
|
||||
timestamp = Long.parseLong(vals.get(0));
|
||||
}
|
||||
if (column == null) {
|
||||
servlet.getMetrics().incrementFailedPutRequests(1);
|
||||
|
|
|
@ -164,7 +164,7 @@ public class RowSpec {
|
|||
i++;
|
||||
}
|
||||
try {
|
||||
time0 = Long.valueOf(URLDecoder.decode(stamp.toString(),
|
||||
time0 = Long.parseLong(URLDecoder.decode(stamp.toString(),
|
||||
HConstants.UTF8_ENCODING));
|
||||
} catch (NumberFormatException e) {
|
||||
throw new IllegalArgumentException(e);
|
||||
|
@ -177,7 +177,7 @@ public class RowSpec {
|
|||
i++;
|
||||
}
|
||||
try {
|
||||
time1 = Long.valueOf(URLDecoder.decode(stamp.toString(),
|
||||
time1 = Long.parseLong(URLDecoder.decode(stamp.toString(),
|
||||
HConstants.UTF8_ENCODING));
|
||||
} catch (NumberFormatException e) {
|
||||
throw new IllegalArgumentException(e);
|
||||
|
@ -245,7 +245,7 @@ public class RowSpec {
|
|||
}
|
||||
sb.append(c);
|
||||
}
|
||||
maxVersions = Integer.valueOf(sb.toString());
|
||||
maxVersions = Integer.parseInt(sb.toString());
|
||||
} break;
|
||||
case 'n': {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
|
@ -257,7 +257,7 @@ public class RowSpec {
|
|||
}
|
||||
sb.append(c);
|
||||
}
|
||||
maxValues = Integer.valueOf(sb.toString());
|
||||
maxValues = Integer.parseInt(sb.toString());
|
||||
} break;
|
||||
default:
|
||||
throw new IllegalArgumentException("unknown parameter '" + c + "'");
|
||||
|
|
|
@ -138,7 +138,7 @@ public class ColumnSchemaModel implements Serializable {
|
|||
public boolean __getBlockcache() {
|
||||
Object o = attrs.get(BLOCKCACHE);
|
||||
return o != null ?
|
||||
Boolean.valueOf(o.toString()) : HColumnDescriptor.DEFAULT_BLOCKCACHE;
|
||||
Boolean.parseBoolean(o.toString()) : HColumnDescriptor.DEFAULT_BLOCKCACHE;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -147,7 +147,7 @@ public class ColumnSchemaModel implements Serializable {
|
|||
public int __getBlocksize() {
|
||||
Object o = attrs.get(BLOCKSIZE);
|
||||
return o != null ?
|
||||
Integer.valueOf(o.toString()) : HColumnDescriptor.DEFAULT_BLOCKSIZE;
|
||||
Integer.parseInt(o.toString()) : HColumnDescriptor.DEFAULT_BLOCKSIZE;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -172,7 +172,7 @@ public class ColumnSchemaModel implements Serializable {
|
|||
public boolean __getInMemory() {
|
||||
Object o = attrs.get(IN_MEMORY);
|
||||
return o != null ?
|
||||
Boolean.valueOf(o.toString()) : HColumnDescriptor.DEFAULT_IN_MEMORY;
|
||||
Boolean.parseBoolean(o.toString()) : HColumnDescriptor.DEFAULT_IN_MEMORY;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -181,7 +181,7 @@ public class ColumnSchemaModel implements Serializable {
|
|||
public int __getTTL() {
|
||||
Object o = attrs.get(TTL);
|
||||
return o != null ?
|
||||
Integer.valueOf(o.toString()) : HColumnDescriptor.DEFAULT_TTL;
|
||||
Integer.parseInt(o.toString()) : HColumnDescriptor.DEFAULT_TTL;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -190,7 +190,7 @@ public class ColumnSchemaModel implements Serializable {
|
|||
public int __getVersions() {
|
||||
Object o = attrs.get(VERSIONS);
|
||||
return o != null ?
|
||||
Integer.valueOf(o.toString()) : HColumnDescriptor.DEFAULT_VERSIONS;
|
||||
Integer.parseInt(o.toString()) : HColumnDescriptor.DEFAULT_VERSIONS;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -413,7 +413,7 @@ public class ScannerModel implements ProtobufMessageHandler, Serializable {
|
|||
}
|
||||
} break;
|
||||
case PageFilter:
|
||||
filter = new PageFilter(Long.valueOf(value));
|
||||
filter = new PageFilter(Long.parseLong(value));
|
||||
break;
|
||||
case PrefixFilter:
|
||||
filter = new PrefixFilter(Base64.decode(value));
|
||||
|
|
|
@ -106,7 +106,9 @@ public class StorageClusterStatusModel
|
|||
/**
|
||||
* Represents a region hosted on a region server.
|
||||
*/
|
||||
public static class Region {
|
||||
public static class Region implements Serializable {
|
||||
private static final long serialVersionUID = -1326683840086398193L;
|
||||
|
||||
private byte[] name;
|
||||
private int stores;
|
||||
private int storefiles;
|
||||
|
|
|
@ -142,7 +142,7 @@ public class TableRegionModel implements Serializable {
|
|||
this.startKey = Bytes.toBytes(split[1]);
|
||||
String tail = split[2];
|
||||
split = tail.split("\\.");
|
||||
id = Long.valueOf(split[0]);
|
||||
id = Long.parseLong(split[0]);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -221,7 +221,7 @@ public class TableSchemaModel implements Serializable, ProtobufMessageHandler {
|
|||
*/
|
||||
public boolean __getIsMeta() {
|
||||
Object o = attrs.get(IS_META);
|
||||
return o != null ? Boolean.valueOf(o.toString()) : false;
|
||||
return o != null ? Boolean.parseBoolean(o.toString()) : false;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -229,7 +229,7 @@ public class TableSchemaModel implements Serializable, ProtobufMessageHandler {
|
|||
*/
|
||||
public boolean __getIsRoot() {
|
||||
Object o = attrs.get(IS_ROOT);
|
||||
return o != null ? Boolean.valueOf(o.toString()) : false;
|
||||
return o != null ? Boolean.parseBoolean(o.toString()) : false;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -237,8 +237,7 @@ public class TableSchemaModel implements Serializable, ProtobufMessageHandler {
|
|||
*/
|
||||
public boolean __getReadOnly() {
|
||||
Object o = attrs.get(READONLY);
|
||||
return o != null ?
|
||||
Boolean.valueOf(o.toString()) : HTableDescriptor.DEFAULT_READONLY;
|
||||
return o != null ? Boolean.parseBoolean(o.toString()) : HTableDescriptor.DEFAULT_READONLY;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -285,12 +284,10 @@ public class TableSchemaModel implements Serializable, ProtobufMessageHandler {
|
|||
familyBuilder.addAttrs(attrBuilder);
|
||||
}
|
||||
if (familyAttrs.containsKey(TTL)) {
|
||||
familyBuilder.setTtl(
|
||||
Integer.valueOf(familyAttrs.get(TTL).toString()));
|
||||
familyBuilder.setTtl(Integer.parseInt(familyAttrs.get(TTL).toString()));
|
||||
}
|
||||
if (familyAttrs.containsKey(VERSIONS)) {
|
||||
familyBuilder.setMaxVersions(
|
||||
Integer.valueOf(familyAttrs.get(VERSIONS).toString()));
|
||||
familyBuilder.setMaxVersions(Integer.parseInt(familyAttrs.get(VERSIONS).toString()));
|
||||
}
|
||||
if (familyAttrs.containsKey(COMPRESSION)) {
|
||||
familyBuilder.setCompression(familyAttrs.get(COMPRESSION).toString());
|
||||
|
@ -298,8 +295,7 @@ public class TableSchemaModel implements Serializable, ProtobufMessageHandler {
|
|||
builder.addColumns(familyBuilder);
|
||||
}
|
||||
if (attrs.containsKey(READONLY)) {
|
||||
builder.setReadOnly(
|
||||
Boolean.valueOf(attrs.get(READONLY).toString()));
|
||||
builder.setReadOnly(Boolean.parseBoolean(attrs.get(READONLY).toString()));
|
||||
}
|
||||
return builder.build().toByteArray();
|
||||
}
|
||||
|
|
|
@ -1748,6 +1748,19 @@ public class HFileBlock implements Cacheable {
|
|||
return HFileBlock.blockDeserializer;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = 1;
|
||||
result = result * 31 + blockType.hashCode();
|
||||
result = result * 31 + nextBlockOnDiskSizeWithHeader;
|
||||
result = result * 31 + (int) (offset ^ (offset >>> 32));
|
||||
result = result * 31 + onDiskSizeWithoutHeader;
|
||||
result = result * 31 + (int) (prevBlockOffset ^ (prevBlockOffset >>> 32));
|
||||
result = result * 31 + uncompressedSizeWithoutHeader;
|
||||
result = result * 31 + buf.hashCode();
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object comparison) {
|
||||
if (this == comparison) {
|
||||
|
|
|
@ -95,7 +95,7 @@ public class HMasterCommandLine extends ServerCommandLine {
|
|||
if (cmd.hasOption("minRegionServers")) {
|
||||
String val = cmd.getOptionValue("minRegionServers");
|
||||
getConf().setInt("hbase.regions.server.count.min",
|
||||
Integer.valueOf(val));
|
||||
Integer.parseInt(val));
|
||||
LOG.debug("minRegionServers set to " + val);
|
||||
}
|
||||
|
||||
|
@ -103,7 +103,7 @@ public class HMasterCommandLine extends ServerCommandLine {
|
|||
if (cmd.hasOption("minServers")) {
|
||||
String val = cmd.getOptionValue("minServers");
|
||||
getConf().setInt("hbase.regions.server.count.min",
|
||||
Integer.valueOf(val));
|
||||
Integer.parseInt(val));
|
||||
LOG.debug("minServers set to " + val);
|
||||
}
|
||||
|
||||
|
@ -116,13 +116,13 @@ public class HMasterCommandLine extends ServerCommandLine {
|
|||
// master when we are in local/standalone mode. Useful testing)
|
||||
if (cmd.hasOption("localRegionServers")) {
|
||||
String val = cmd.getOptionValue("localRegionServers");
|
||||
getConf().setInt("hbase.regionservers", Integer.valueOf(val));
|
||||
getConf().setInt("hbase.regionservers", Integer.parseInt(val));
|
||||
LOG.debug("localRegionServers set to " + val);
|
||||
}
|
||||
// How many masters to startup inside this process; useful testing
|
||||
if (cmd.hasOption("masters")) {
|
||||
String val = cmd.getOptionValue("masters");
|
||||
getConf().setInt("hbase.masters", Integer.valueOf(val));
|
||||
getConf().setInt("hbase.masters", Integer.parseInt(val));
|
||||
LOG.debug("masters set to " + val);
|
||||
}
|
||||
|
||||
|
|
|
@ -258,8 +258,9 @@ public class RegionCoprocessorHost
|
|||
key + ", spec: " + spec);
|
||||
continue;
|
||||
}
|
||||
int priority = matcher.group(3).trim().isEmpty() ?
|
||||
Coprocessor.PRIORITY_USER : Integer.valueOf(matcher.group(3));
|
||||
String priorityStr = matcher.group(3).trim();
|
||||
int priority = priorityStr.isEmpty() ?
|
||||
Coprocessor.PRIORITY_USER : Integer.parseInt(priorityStr);
|
||||
String cfgSpec = null;
|
||||
try {
|
||||
cfgSpec = matcher.group(4);
|
||||
|
|
|
@ -23,6 +23,7 @@ import java.util.HashMap;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ConcurrentSkipListMap;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
@ -105,7 +106,7 @@ public class TableAuthManager {
|
|||
|
||||
private Configuration conf;
|
||||
private ZKPermissionWatcher zkperms;
|
||||
private volatile long mtime;
|
||||
private final AtomicLong mtime = new AtomicLong(0L);
|
||||
|
||||
private TableAuthManager(ZooKeeperWatcher watcher, Configuration conf)
|
||||
throws IOException {
|
||||
|
@ -212,7 +213,7 @@ public class TableAuthManager {
|
|||
}
|
||||
}
|
||||
globalCache = newCache;
|
||||
mtime++;
|
||||
mtime.incrementAndGet();
|
||||
} catch (IOException e) {
|
||||
// Never happens
|
||||
LOG.error("Error occured while updating the global cache", e);
|
||||
|
@ -240,7 +241,7 @@ public class TableAuthManager {
|
|||
}
|
||||
|
||||
tableCache.put(table, newTablePerms);
|
||||
mtime++;
|
||||
mtime.incrementAndGet();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -264,7 +265,7 @@ public class TableAuthManager {
|
|||
}
|
||||
|
||||
nsCache.put(namespace, newTablePerms);
|
||||
mtime++;
|
||||
mtime.incrementAndGet();
|
||||
}
|
||||
|
||||
private PermissionCache<TablePermission> getTablePermissions(TableName table) {
|
||||
|
@ -741,7 +742,7 @@ public class TableAuthManager {
|
|||
}
|
||||
|
||||
public long getMTime() {
|
||||
return mtime;
|
||||
return mtime.get();
|
||||
}
|
||||
|
||||
static Map<ZooKeeperWatcher,TableAuthManager> managerMap =
|
||||
|
|
|
@ -3985,7 +3985,7 @@ public class HBaseFsck extends Configured implements Closeable {
|
|||
* Display the full report from fsck. This displays all live and dead region
|
||||
* servers, and all known regions.
|
||||
*/
|
||||
public void setDisplayFullReport() {
|
||||
public static void setDisplayFullReport() {
|
||||
details = true;
|
||||
}
|
||||
|
||||
|
@ -3993,7 +3993,7 @@ public class HBaseFsck extends Configured implements Closeable {
|
|||
* Set summary mode.
|
||||
* Print only summary of the tables and status (OK or INCONSISTENT)
|
||||
*/
|
||||
void setSummary() {
|
||||
static void setSummary() {
|
||||
summary = true;
|
||||
}
|
||||
|
||||
|
|
|
@ -81,7 +81,7 @@ public class OfflineMetaRepair {
|
|||
for (int i = 0; i < args.length; i++) {
|
||||
String cmd = args[i];
|
||||
if (cmd.equals("-details")) {
|
||||
fsck.setDisplayFullReport();
|
||||
HBaseFsck.setDisplayFullReport();
|
||||
} else if (cmd.equals("-base")) {
|
||||
if (i == args.length - 1) {
|
||||
System.err.println("OfflineMetaRepair: -base needs an HDFS path.");
|
||||
|
|
|
@ -1001,7 +1001,7 @@ public class TestHBaseFsck {
|
|||
// fix the problem.
|
||||
HBaseFsck fsck = new HBaseFsck(conf, hbfsckExecutorService);
|
||||
fsck.connect();
|
||||
fsck.setDisplayFullReport(); // i.e. -details
|
||||
HBaseFsck.setDisplayFullReport(); // i.e. -details
|
||||
fsck.setTimeLag(0);
|
||||
fsck.setFixAssignments(true);
|
||||
fsck.setFixMeta(true);
|
||||
|
@ -1673,7 +1673,7 @@ public class TestHBaseFsck {
|
|||
// fix lingering split parent
|
||||
hbck = new HBaseFsck(conf, hbfsckExecutorService);
|
||||
hbck.connect();
|
||||
hbck.setDisplayFullReport(); // i.e. -details
|
||||
HBaseFsck.setDisplayFullReport(); // i.e. -details
|
||||
hbck.setTimeLag(0);
|
||||
hbck.setFixSplitParents(true);
|
||||
hbck.onlineHbck();
|
||||
|
@ -1926,7 +1926,7 @@ public class TestHBaseFsck {
|
|||
// verify that noHdfsChecking report the same errors
|
||||
HBaseFsck fsck = new HBaseFsck(conf, hbfsckExecutorService);
|
||||
fsck.connect();
|
||||
fsck.setDisplayFullReport(); // i.e. -details
|
||||
HBaseFsck.setDisplayFullReport(); // i.e. -details
|
||||
fsck.setTimeLag(0);
|
||||
fsck.setCheckHdfs(false);
|
||||
fsck.onlineHbck();
|
||||
|
@ -1937,7 +1937,7 @@ public class TestHBaseFsck {
|
|||
// verify that fixAssignments works fine with noHdfsChecking
|
||||
fsck = new HBaseFsck(conf, hbfsckExecutorService);
|
||||
fsck.connect();
|
||||
fsck.setDisplayFullReport(); // i.e. -details
|
||||
HBaseFsck.setDisplayFullReport(); // i.e. -details
|
||||
fsck.setTimeLag(0);
|
||||
fsck.setCheckHdfs(false);
|
||||
fsck.setFixAssignments(true);
|
||||
|
@ -1979,7 +1979,7 @@ public class TestHBaseFsck {
|
|||
// verify that noHdfsChecking report the same errors
|
||||
HBaseFsck fsck = new HBaseFsck(conf, hbfsckExecutorService);
|
||||
fsck.connect();
|
||||
fsck.setDisplayFullReport(); // i.e. -details
|
||||
HBaseFsck.setDisplayFullReport(); // i.e. -details
|
||||
fsck.setTimeLag(0);
|
||||
fsck.setCheckHdfs(false);
|
||||
fsck.onlineHbck();
|
||||
|
@ -1990,7 +1990,7 @@ public class TestHBaseFsck {
|
|||
// verify that fixMeta doesn't work with noHdfsChecking
|
||||
fsck = new HBaseFsck(conf, hbfsckExecutorService);
|
||||
fsck.connect();
|
||||
fsck.setDisplayFullReport(); // i.e. -details
|
||||
HBaseFsck.setDisplayFullReport(); // i.e. -details
|
||||
fsck.setTimeLag(0);
|
||||
fsck.setCheckHdfs(false);
|
||||
fsck.setFixAssignments(true);
|
||||
|
@ -2045,7 +2045,7 @@ public class TestHBaseFsck {
|
|||
// verify that noHdfsChecking can't detect ORPHAN_HDFS_REGION
|
||||
HBaseFsck fsck = new HBaseFsck(conf, hbfsckExecutorService);
|
||||
fsck.connect();
|
||||
fsck.setDisplayFullReport(); // i.e. -details
|
||||
HBaseFsck.setDisplayFullReport(); // i.e. -details
|
||||
fsck.setTimeLag(0);
|
||||
fsck.setCheckHdfs(false);
|
||||
fsck.onlineHbck();
|
||||
|
@ -2056,7 +2056,7 @@ public class TestHBaseFsck {
|
|||
// verify that fixHdfsHoles doesn't work with noHdfsChecking
|
||||
fsck = new HBaseFsck(conf, hbfsckExecutorService);
|
||||
fsck.connect();
|
||||
fsck.setDisplayFullReport(); // i.e. -details
|
||||
HBaseFsck.setDisplayFullReport(); // i.e. -details
|
||||
fsck.setTimeLag(0);
|
||||
fsck.setCheckHdfs(false);
|
||||
fsck.setFixHdfsHoles(true);
|
||||
|
|
|
@ -51,7 +51,7 @@ public class HbckTestingUtil {
|
|||
HBaseFsck fsck = new HBaseFsck(conf, exec);
|
||||
try {
|
||||
fsck.connect();
|
||||
fsck.setDisplayFullReport(); // i.e. -details
|
||||
HBaseFsck.setDisplayFullReport(); // i.e. -details
|
||||
fsck.setTimeLag(0);
|
||||
fsck.setFixAssignments(fixAssignments);
|
||||
fsck.setFixMeta(fixMeta);
|
||||
|
|
Loading…
Reference in New Issue