HBASE-12283 Clean up some checkstyle errors

This commit is contained in:
Elliott Clark 2014-10-17 11:34:20 -07:00
parent ebe8db5869
commit c623b04122
16 changed files with 80 additions and 33 deletions

View File

@ -517,14 +517,18 @@ checkCheckstyleErrors() {
JIRA_COMMENT="$JIRA_COMMENT JIRA_COMMENT="$JIRA_COMMENT
{color:red}-1 javac{color}. The applied patch generated $patchCheckstyleErrors checkstyle errors (more than the trunk's current $trunkCheckstyleErrors errors)." {color:red}-1 checkstyle{color}. The applied patch generated $patchCheckstyleErrors checkstyle errors (more than the trunk's current $trunkCheckstyleErrors errors)."
return 1 return 1
fi fi
echo "There were $patchCheckstyleErrors checkstyle errors in this patch compared to $trunkCheckstyleErrors on master." echo "There were $patchCheckstyleErrors checkstyle errors in this patch compared to $trunkCheckstyleErrors on master."
fi fi
JIRA_COMMENT_FOOTER="Checkstyle Errors: $BUILD_URL/artifact/patchprocess/checkstyle-aggregate.html
$JIRA_COMMENT_FOOTER"
JIRA_COMMENT="$JIRA_COMMENT JIRA_COMMENT="$JIRA_COMMENT
{color:green}+1 javac{color}. The applied patch does not increase the total number of checkstyle errors" {color:green}+1 checkstyle{color}. The applied patch does not increase the total number of checkstyle errors"
return 0 return 0
} }

View File

@ -38,7 +38,7 @@ import org.apache.hadoop.hbase.util.Sleeper;
public abstract class Chore extends HasThread { public abstract class Chore extends HasThread {
private final Log LOG = LogFactory.getLog(this.getClass()); private final Log LOG = LogFactory.getLog(this.getClass());
private final Sleeper sleeper; private final Sleeper sleeper;
protected final Stoppable stopper; private final Stoppable stopper;
/** /**
* @param p Period at which we should run. Will be adjusted appropriately * @param p Period at which we should run. Will be adjusted appropriately
@ -146,4 +146,12 @@ public abstract class Chore extends HasThread {
*/ */
protected void cleanup() { protected void cleanup() {
} }
protected Stoppable getStopper() {
return stopper;
}
protected Sleeper getSleeper() {
return sleeper;
}
} }

View File

@ -216,6 +216,7 @@ public class ClusterStatus extends VersionedWritable {
* @return region server information * @return region server information
* @deprecated Use {@link #getServers()} * @deprecated Use {@link #getServers()}
*/ */
@Deprecated
public Collection<ServerName> getServerInfo() { public Collection<ServerName> getServerInfo() {
return getServers(); return getServers();
} }

View File

@ -632,6 +632,7 @@ public class HColumnDescriptor implements Comparable<HColumnDescriptor> {
Integer.decode(value): Integer.valueOf(DEFAULT_BLOCKSIZE); Integer.decode(value): Integer.valueOf(DEFAULT_BLOCKSIZE);
} }
return this.blocksize.intValue(); return this.blocksize.intValue();
} }
/** /**
@ -664,7 +665,10 @@ public class HColumnDescriptor implements Comparable<HColumnDescriptor> {
return setValue(COMPRESSION, type.getName().toUpperCase()); return setValue(COMPRESSION, type.getName().toUpperCase());
} }
/** @return data block encoding algorithm used on disk */ /**
* @return data block encoding algorithm used on disk
* @deprecated See getDataBlockEncoding()
*/
@Deprecated @Deprecated
public DataBlockEncoding getDataBlockEncodingOnDisk() { public DataBlockEncoding getDataBlockEncodingOnDisk() {
return getDataBlockEncoding(); return getDataBlockEncoding();
@ -674,6 +678,7 @@ public class HColumnDescriptor implements Comparable<HColumnDescriptor> {
* This method does nothing now. Flag ENCODE_ON_DISK is not used * This method does nothing now. Flag ENCODE_ON_DISK is not used
* any more. Data blocks have the same encoding in cache as on disk. * any more. Data blocks have the same encoding in cache as on disk.
* @return this (for chained invocation) * @return this (for chained invocation)
* @deprecated This does nothing now.
*/ */
@Deprecated @Deprecated
public HColumnDescriptor setEncodeOnDisk(boolean encodeOnDisk) { public HColumnDescriptor setEncodeOnDisk(boolean encodeOnDisk) {

View File

@ -104,7 +104,8 @@ public class HRegionLocation implements Comparable<HRegionLocation> {
} }
/** /**
* @return String made of hostname and port formatted as per {@link Addressing#createHostAndPortStr(String, int)} * @return String made of hostname and port formatted as
* per {@link Addressing#createHostAndPortStr(String, int)}
*/ */
public String getHostnamePort() { public String getHostnamePort() {
return Addressing.createHostAndPortStr(this.getHostname(), this.getPort()); return Addressing.createHostAndPortStr(this.getHostname(), this.getPort());

View File

@ -62,8 +62,7 @@ public class CellKey {
familyLength); familyLength);
String qualifier = (qualifierLength == 0) ? "" : Bytes.toStringBinary(qualifierArray, String qualifier = (qualifierLength == 0) ? "" : Bytes.toStringBinary(qualifierArray,
qualifierOffset, qualifierLength); qualifierOffset, qualifierLength);
return row + "/" + family + return row + "/" + family + (family != null && family.length() > 0 ? ":" : "") + qualifier
(family != null && family.length() > 0 ? ":" : "") + qualifier
+ "/" + KeyValue.humanReadableTimestamp(ts) + "/" + Type.codeToType(type); + "/" + KeyValue.humanReadableTimestamp(ts) + "/" + Type.codeToType(type);
} }
} }

View File

@ -41,6 +41,11 @@ import org.apache.hadoop.hbase.util.Bytes;
@InterfaceStability.Evolving @InterfaceStability.Evolving
public final class CellUtil { public final class CellUtil {
/**
* Private constructor to keep this class from being instantiated.
*/
private CellUtil(){}
/******************* ByteRange *******************************/ /******************* ByteRange *******************************/
public static ByteRange fillRowRange(Cell cell, ByteRange range) { public static ByteRange fillRowRange(Cell cell, ByteRange range) {
@ -175,7 +180,8 @@ public final class CellUtil {
} }
public static Cell createCell(final byte[] row, final byte[] family, final byte[] qualifier, public static Cell createCell(final byte[] row, final byte[] family, final byte[] qualifier,
final long timestamp, final byte type, final byte[] value, byte[] tags, final long memstoreTS) { final long timestamp, final byte type, final byte[] value, byte[] tags,
final long memstoreTS) {
KeyValue keyValue = new KeyValue(row, family, qualifier, timestamp, KeyValue keyValue = new KeyValue(row, family, qualifier, timestamp,
KeyValue.Type.codeToType(type), value, tags); KeyValue.Type.codeToType(type), value, tags);
keyValue.setSequenceId(memstoreTS); keyValue.setSequenceId(memstoreTS);
@ -212,7 +218,8 @@ public final class CellUtil {
* @param cellScannerables * @param cellScannerables
* @return CellScanner interface over <code>cellIterables</code> * @return CellScanner interface over <code>cellIterables</code>
*/ */
public static CellScanner createCellScanner(final List<? extends CellScannable> cellScannerables) { public static CellScanner createCellScanner(
final List<? extends CellScannable> cellScannerables) {
return new CellScanner() { return new CellScanner() {
private final Iterator<? extends CellScannable> iterator = cellScannerables.iterator(); private final Iterator<? extends CellScannable> iterator = cellScannerables.iterator();
private CellScanner cellScanner = null; private CellScanner cellScanner = null;

View File

@ -72,11 +72,11 @@ public class CompoundConfiguration extends Configuration {
int size(); int size();
} }
protected List<ImmutableConfigMap> configs private final List<ImmutableConfigMap> configs
= new ArrayList<ImmutableConfigMap>(); = new ArrayList<ImmutableConfigMap>();
static class ImmutableConfWrapper implements ImmutableConfigMap { static class ImmutableConfWrapper implements ImmutableConfigMap {
Configuration c; private final Configuration c;
ImmutableConfWrapper(Configuration conf) { ImmutableConfWrapper(Configuration conf) {
c = conf; c = conf;
@ -163,7 +163,7 @@ public class CompoundConfiguration extends Configuration {
// put new map at the front of the list (top priority) // put new map at the front of the list (top priority)
this.configs.add(0, new ImmutableConfigMap() { this.configs.add(0, new ImmutableConfigMap() {
Map<Bytes, Bytes> m = map; private final Map<Bytes, Bytes> m = map;
@Override @Override
public Iterator<Map.Entry<String,String>> iterator() { public Iterator<Map.Entry<String,String>> iterator() {
@ -224,7 +224,7 @@ public class CompoundConfiguration extends Configuration {
// put new map at the front of the list (top priority) // put new map at the front of the list (top priority)
this.configs.add(0, new ImmutableConfigMap() { this.configs.add(0, new ImmutableConfigMap() {
Map<String, String> m = map; private final Map<String, String> m = map;
@Override @Override
public Iterator<Map.Entry<String,String>> iterator() { public Iterator<Map.Entry<String,String>> iterator() {

View File

@ -42,6 +42,7 @@ public class HBaseConfiguration extends Configuration {
/** /**
* Instantinating HBaseConfiguration() is deprecated. Please use * Instantinating HBaseConfiguration() is deprecated. Please use
* HBaseConfiguration#create() to construct a plain Configuration * HBaseConfiguration#create() to construct a plain Configuration
* @deprecated Please use create() instead.
*/ */
@Deprecated @Deprecated
public HBaseConfiguration() { public HBaseConfiguration() {
@ -55,6 +56,7 @@ public class HBaseConfiguration extends Configuration {
/** /**
* Instantiating HBaseConfiguration() is deprecated. Please use * Instantiating HBaseConfiguration() is deprecated. Please use
* HBaseConfiguration#create(conf) to construct a plain Configuration * HBaseConfiguration#create(conf) to construct a plain Configuration
* @deprecated Please user create(conf) instead.
*/ */
@Deprecated @Deprecated
public HBaseConfiguration(final Configuration c) { public HBaseConfiguration(final Configuration c) {
@ -167,8 +169,9 @@ public class HBaseConfiguration extends Configuration {
* Get the password from the Configuration instance using the * Get the password from the Configuration instance using the
* getPassword method if it exists. If not, then fall back to the * getPassword method if it exists. If not, then fall back to the
* general get method for configuration elements. * general get method for configuration elements.
* @param conf configuration instance for accessing the passwords *
* @param alias the name of the password element * @param conf configuration instance for accessing the passwords
* @param alias the name of the password element
* @param defPass the default password * @param defPass the default password
* @return String password or default password * @return String password or default password
* @throws IOException * @throws IOException
@ -181,10 +184,9 @@ public class HBaseConfiguration extends Configuration {
char[] p = (char[]) m.invoke(conf, alias); char[] p = (char[]) m.invoke(conf, alias);
if (p != null) { if (p != null) {
LOG.debug(String.format("Config option \"%s\" was found through" + LOG.debug(String.format("Config option \"%s\" was found through" +
" the Configuration getPassword method.", alias)); " the Configuration getPassword method.", alias));
passwd = new String(p); passwd = new String(p);
} } else {
else {
LOG.debug(String.format( LOG.debug(String.format(
"Config option \"%s\" was not found. Using provided default value", "Config option \"%s\" was not found. Using provided default value",
alias)); alias));
@ -195,7 +197,7 @@ public class HBaseConfiguration extends Configuration {
//provider API doesn't exist yet //provider API doesn't exist yet
LOG.debug(String.format( LOG.debug(String.format(
"Credential.getPassword method is not available." + "Credential.getPassword method is not available." +
" Falling back to configuration.")); " Falling back to configuration."));
passwd = conf.get(alias, defPass); passwd = conf.get(alias, defPass);
} catch (SecurityException e) { } catch (SecurityException e) {
throw new IOException(e.getMessage(), e); throw new IOException(e.getMessage(), e);
@ -209,7 +211,8 @@ public class HBaseConfiguration extends Configuration {
return passwd; return passwd;
} }
/** For debugging. Dump configurations to system output as xml format. /**
* For debugging. Dump configurations to system output as xml format.
* Master and RS configurations can also be dumped using * Master and RS configurations can also be dumped using
* http services. e.g. "curl http://master:16010/dump" * http services. e.g. "curl http://master:16010/dump"
*/ */

View File

@ -25,7 +25,13 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
*/ */
@InterfaceAudience.Public @InterfaceAudience.Public
@InterfaceStability.Evolving @InterfaceStability.Evolving
public class HBaseInterfaceAudience { public final class HBaseInterfaceAudience {
/**
* Can't create this class.
*/
private HBaseInterfaceAudience(){}
public static final String COPROC = "Coprocesssor"; public static final String COPROC = "Coprocesssor";
public static final String REPLICATION = "Replication"; public static final String REPLICATION = "Replication";
public static final String PHOENIX = "Phoenix"; public static final String PHOENIX = "Phoenix";

View File

@ -150,7 +150,9 @@ public final class HConstants {
/** Parameter name for the master type being backup (waits for primary to go inactive). */ /** Parameter name for the master type being backup (waits for primary to go inactive). */
public static final String MASTER_TYPE_BACKUP = "hbase.master.backup"; public static final String MASTER_TYPE_BACKUP = "hbase.master.backup";
/** by default every master is a possible primary master unless the conf explicitly overrides it */ /**
* by default every master is a possible primary master unless the conf explicitly overrides it
*/
public static final boolean DEFAULT_MASTER_TYPE_BACKUP = false; public static final boolean DEFAULT_MASTER_TYPE_BACKUP = false;
/** Name of ZooKeeper quorum configuration parameter. */ /** Name of ZooKeeper quorum configuration parameter. */
@ -179,8 +181,11 @@ public final class HConstants {
/** Default client port that the zookeeper listens on */ /** Default client port that the zookeeper listens on */
public static final int DEFAULT_ZOOKEPER_CLIENT_PORT = 2181; public static final int DEFAULT_ZOOKEPER_CLIENT_PORT = 2181;
/** Parameter name for the wait time for the recoverable zookeeper */ /**
public static final String ZOOKEEPER_RECOVERABLE_WAITTIME = "hbase.zookeeper.recoverable.waittime"; * Parameter name for the wait time for the recoverable zookeeper
*/
public static final String ZOOKEEPER_RECOVERABLE_WAITTIME =
"hbase.zookeeper.recoverable.waittime";
/** Default wait time for the recoverable zookeeper */ /** Default wait time for the recoverable zookeeper */
public static final long DEFAULT_ZOOKEPER_RECOVERABLE_WAITIME = 10000; public static final long DEFAULT_ZOOKEPER_RECOVERABLE_WAITIME = 10000;
@ -380,7 +385,10 @@ public final class HConstants {
// should go down. // should go down.
/** The hbase:meta table's name. */ /**
* The hbase:meta table's name.
*
*/
@Deprecated // for compat from 0.94 -> 0.96. @Deprecated // for compat from 0.94 -> 0.96.
public static final byte[] META_TABLE_NAME = TableName.META_TABLE_NAME.getName(); public static final byte[] META_TABLE_NAME = TableName.META_TABLE_NAME.getName();

View File

@ -27,7 +27,12 @@ import org.apache.hadoop.util.ReflectionUtils;
* based on configuration. * based on configuration.
*/ */
@InterfaceAudience.Private @InterfaceAudience.Private
public class CoordinatedStateManagerFactory { public final class CoordinatedStateManagerFactory {
/**
* Private to keep this class from being accidentally instantiated.
*/
private CoordinatedStateManagerFactory(){}
/** /**
* Creates consensus provider from the given configuration. * Creates consensus provider from the given configuration.

View File

@ -24,10 +24,10 @@ import java.util.concurrent.atomic.AtomicInteger;
* Thread factory that creates daemon threads * Thread factory that creates daemon threads
*/ */
public class DaemonThreadFactory implements ThreadFactory { public class DaemonThreadFactory implements ThreadFactory {
static final AtomicInteger poolNumber = new AtomicInteger(1); private static final AtomicInteger poolNumber = new AtomicInteger(1);
final ThreadGroup group; private final ThreadGroup group;
final AtomicInteger threadNumber = new AtomicInteger(1); private final AtomicInteger threadNumber = new AtomicInteger(1);
final String namePrefix; private final String namePrefix;
public DaemonThreadFactory(String name) { public DaemonThreadFactory(String name) {
SecurityManager s = System.getSecurityManager(); SecurityManager s = System.getSecurityManager();

View File

@ -58,7 +58,7 @@ import org.apache.hadoop.util.StringUtils;
if (!isHealthy) { if (!isHealthy) {
boolean needToStop = decideToStop(); boolean needToStop = decideToStop();
if (needToStop) { if (needToStop) {
this.stopper.stop("The node reported unhealthy " + threshold this.getStopper().stop("The node reported unhealthy " + threshold
+ " number of times consecutively."); + " number of times consecutively.");
} }
// Always log health report. // Always log health report.

View File

@ -226,7 +226,7 @@ public abstract class CleanerChore<T extends FileCleanerDelegate> extends Chore
Iterable<FileStatus> deletableValidFiles = validFiles; Iterable<FileStatus> deletableValidFiles = validFiles;
// check each of the cleaners for the valid files // check each of the cleaners for the valid files
for (T cleaner : cleanersChain) { for (T cleaner : cleanersChain) {
if (cleaner.isStopped() || this.stopper.isStopped()) { if (cleaner.isStopped() || this.getStopper().isStopped()) {
LOG.warn("A file cleaner" + this.getName() + " is stopped, won't delete any more files in:" LOG.warn("A file cleaner" + this.getName() + " is stopped, won't delete any more files in:"
+ this.oldFileDir); + this.oldFileDir);
return false; return false;

View File

@ -385,7 +385,7 @@ public class TestEndToEndSplitTransaction {
verify(); verify();
} catch (Throwable ex) { } catch (Throwable ex) {
this.ex = ex; this.ex = ex;
stopper.stop("caught exception"); getStopper().stop("caught exception");
} }
} }
} }