HBASE-12283 Clean up some checkstyle errors

This commit is contained in:
Elliott Clark 2014-10-17 11:34:20 -07:00
parent ebe8db5869
commit c623b04122
16 changed files with 80 additions and 33 deletions

View File

@ -517,14 +517,18 @@ checkCheckstyleErrors() {
JIRA_COMMENT="$JIRA_COMMENT
{color:red}-1 javac{color}. The applied patch generated $patchCheckstyleErrors checkstyle errors (more than the trunk's current $trunkCheckstyleErrors errors)."
{color:red}-1 checkstyle{color}. The applied patch generated $patchCheckstyleErrors checkstyle errors (more than the trunk's current $trunkCheckstyleErrors errors)."
return 1
fi
echo "There were $patchCheckstyleErrors checkstyle errors in this patch compared to $trunkCheckstyleErrors on master."
fi
JIRA_COMMENT_FOOTER="Checkstyle Errors: $BUILD_URL/artifact/patchprocess/checkstyle-aggregate.html
$JIRA_COMMENT_FOOTER"
JIRA_COMMENT="$JIRA_COMMENT
{color:green}+1 javac{color}. The applied patch does not increase the total number of checkstyle errors"
{color:green}+1 checkstyle{color}. The applied patch does not increase the total number of checkstyle errors"
return 0
}

View File

@ -38,7 +38,7 @@ import org.apache.hadoop.hbase.util.Sleeper;
public abstract class Chore extends HasThread {
private final Log LOG = LogFactory.getLog(this.getClass());
private final Sleeper sleeper;
protected final Stoppable stopper;
private final Stoppable stopper;
/**
* @param p Period at which we should run. Will be adjusted appropriately
@ -146,4 +146,12 @@ public abstract class Chore extends HasThread {
*/
protected void cleanup() {
}
protected Stoppable getStopper() {
return stopper;
}
protected Sleeper getSleeper() {
return sleeper;
}
}

View File

@ -216,6 +216,7 @@ public class ClusterStatus extends VersionedWritable {
* @return region server information
* @deprecated Use {@link #getServers()}
*/
@Deprecated
public Collection<ServerName> getServerInfo() {
return getServers();
}

View File

@ -632,6 +632,7 @@ public class HColumnDescriptor implements Comparable<HColumnDescriptor> {
Integer.decode(value): Integer.valueOf(DEFAULT_BLOCKSIZE);
}
return this.blocksize.intValue();
}
/**
@ -664,7 +665,10 @@ public class HColumnDescriptor implements Comparable<HColumnDescriptor> {
return setValue(COMPRESSION, type.getName().toUpperCase());
}
/** @return data block encoding algorithm used on disk */
/**
* @return data block encoding algorithm used on disk
* @deprecated See getDataBlockEncoding()
*/
@Deprecated
public DataBlockEncoding getDataBlockEncodingOnDisk() {
return getDataBlockEncoding();
@ -674,6 +678,7 @@ public class HColumnDescriptor implements Comparable<HColumnDescriptor> {
* This method does nothing now. Flag ENCODE_ON_DISK is not used
* any more. Data blocks have the same encoding in cache as on disk.
* @return this (for chained invocation)
* @deprecated This does nothing now.
*/
@Deprecated
public HColumnDescriptor setEncodeOnDisk(boolean encodeOnDisk) {

View File

@ -104,7 +104,8 @@ public class HRegionLocation implements Comparable<HRegionLocation> {
}
/**
* @return String made of hostname and port formatted as per {@link Addressing#createHostAndPortStr(String, int)}
* @return String made of hostname and port formatted as
* per {@link Addressing#createHostAndPortStr(String, int)}
*/
public String getHostnamePort() {
return Addressing.createHostAndPortStr(this.getHostname(), this.getPort());

View File

@ -62,8 +62,7 @@ public class CellKey {
familyLength);
String qualifier = (qualifierLength == 0) ? "" : Bytes.toStringBinary(qualifierArray,
qualifierOffset, qualifierLength);
return row + "/" + family +
(family != null && family.length() > 0 ? ":" : "") + qualifier
return row + "/" + family + (family != null && family.length() > 0 ? ":" : "") + qualifier
+ "/" + KeyValue.humanReadableTimestamp(ts) + "/" + Type.codeToType(type);
}
}

View File

@ -41,6 +41,11 @@ import org.apache.hadoop.hbase.util.Bytes;
@InterfaceStability.Evolving
public final class CellUtil {
/**
* Private constructor to keep this class from being instantiated.
*/
private CellUtil(){}
/******************* ByteRange *******************************/
public static ByteRange fillRowRange(Cell cell, ByteRange range) {
@ -175,7 +180,8 @@ public final class CellUtil {
}
public static Cell createCell(final byte[] row, final byte[] family, final byte[] qualifier,
final long timestamp, final byte type, final byte[] value, byte[] tags, final long memstoreTS) {
final long timestamp, final byte type, final byte[] value, byte[] tags,
final long memstoreTS) {
KeyValue keyValue = new KeyValue(row, family, qualifier, timestamp,
KeyValue.Type.codeToType(type), value, tags);
keyValue.setSequenceId(memstoreTS);
@ -212,7 +218,8 @@ public final class CellUtil {
* @param cellScannerables
* @return CellScanner interface over <code>cellIterables</code>
*/
public static CellScanner createCellScanner(final List<? extends CellScannable> cellScannerables) {
public static CellScanner createCellScanner(
final List<? extends CellScannable> cellScannerables) {
return new CellScanner() {
private final Iterator<? extends CellScannable> iterator = cellScannerables.iterator();
private CellScanner cellScanner = null;

View File

@ -72,11 +72,11 @@ public class CompoundConfiguration extends Configuration {
int size();
}
protected List<ImmutableConfigMap> configs
private final List<ImmutableConfigMap> configs
= new ArrayList<ImmutableConfigMap>();
static class ImmutableConfWrapper implements ImmutableConfigMap {
Configuration c;
private final Configuration c;
ImmutableConfWrapper(Configuration conf) {
c = conf;
@ -163,7 +163,7 @@ public class CompoundConfiguration extends Configuration {
// put new map at the front of the list (top priority)
this.configs.add(0, new ImmutableConfigMap() {
Map<Bytes, Bytes> m = map;
private final Map<Bytes, Bytes> m = map;
@Override
public Iterator<Map.Entry<String,String>> iterator() {
@ -224,7 +224,7 @@ public class CompoundConfiguration extends Configuration {
// put new map at the front of the list (top priority)
this.configs.add(0, new ImmutableConfigMap() {
Map<String, String> m = map;
private final Map<String, String> m = map;
@Override
public Iterator<Map.Entry<String,String>> iterator() {

View File

@ -42,6 +42,7 @@ public class HBaseConfiguration extends Configuration {
/**
* Instantinating HBaseConfiguration() is deprecated. Please use
* HBaseConfiguration#create() to construct a plain Configuration
* @deprecated Please use create() instead.
*/
@Deprecated
public HBaseConfiguration() {
@ -55,6 +56,7 @@ public class HBaseConfiguration extends Configuration {
/**
* Instantiating HBaseConfiguration() is deprecated. Please use
* HBaseConfiguration#create(conf) to construct a plain Configuration
* @deprecated Please user create(conf) instead.
*/
@Deprecated
public HBaseConfiguration(final Configuration c) {
@ -167,8 +169,9 @@ public class HBaseConfiguration extends Configuration {
* Get the password from the Configuration instance using the
* getPassword method if it exists. If not, then fall back to the
* general get method for configuration elements.
* @param conf configuration instance for accessing the passwords
* @param alias the name of the password element
*
* @param conf configuration instance for accessing the passwords
* @param alias the name of the password element
* @param defPass the default password
* @return String password or default password
* @throws IOException
@ -181,10 +184,9 @@ public class HBaseConfiguration extends Configuration {
char[] p = (char[]) m.invoke(conf, alias);
if (p != null) {
LOG.debug(String.format("Config option \"%s\" was found through" +
" the Configuration getPassword method.", alias));
" the Configuration getPassword method.", alias));
passwd = new String(p);
}
else {
} else {
LOG.debug(String.format(
"Config option \"%s\" was not found. Using provided default value",
alias));
@ -195,7 +197,7 @@ public class HBaseConfiguration extends Configuration {
//provider API doesn't exist yet
LOG.debug(String.format(
"Credential.getPassword method is not available." +
" Falling back to configuration."));
" Falling back to configuration."));
passwd = conf.get(alias, defPass);
} catch (SecurityException e) {
throw new IOException(e.getMessage(), e);
@ -209,7 +211,8 @@ public class HBaseConfiguration extends Configuration {
return passwd;
}
/** For debugging. Dump configurations to system output as xml format.
/**
* For debugging. Dump configurations to system output as xml format.
* Master and RS configurations can also be dumped using
* http services. e.g. "curl http://master:16010/dump"
*/

View File

@ -25,7 +25,13 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
public class HBaseInterfaceAudience {
public final class HBaseInterfaceAudience {
/**
* Can't create this class.
*/
private HBaseInterfaceAudience(){}
public static final String COPROC = "Coprocesssor";
public static final String REPLICATION = "Replication";
public static final String PHOENIX = "Phoenix";

View File

@ -150,7 +150,9 @@ public final class HConstants {
/** Parameter name for the master type being backup (waits for primary to go inactive). */
public static final String MASTER_TYPE_BACKUP = "hbase.master.backup";
/** by default every master is a possible primary master unless the conf explicitly overrides it */
/**
* by default every master is a possible primary master unless the conf explicitly overrides it
*/
public static final boolean DEFAULT_MASTER_TYPE_BACKUP = false;
/** Name of ZooKeeper quorum configuration parameter. */
@ -179,8 +181,11 @@ public final class HConstants {
/** Default client port that the zookeeper listens on */
public static final int DEFAULT_ZOOKEPER_CLIENT_PORT = 2181;
/** Parameter name for the wait time for the recoverable zookeeper */
public static final String ZOOKEEPER_RECOVERABLE_WAITTIME = "hbase.zookeeper.recoverable.waittime";
/**
* Parameter name for the wait time for the recoverable zookeeper
*/
public static final String ZOOKEEPER_RECOVERABLE_WAITTIME =
"hbase.zookeeper.recoverable.waittime";
/** Default wait time for the recoverable zookeeper */
public static final long DEFAULT_ZOOKEPER_RECOVERABLE_WAITIME = 10000;
@ -380,7 +385,10 @@ public final class HConstants {
// should go down.
/** The hbase:meta table's name. */
/**
* The hbase:meta table's name.
*
*/
@Deprecated // for compat from 0.94 -> 0.96.
public static final byte[] META_TABLE_NAME = TableName.META_TABLE_NAME.getName();

View File

@ -27,7 +27,12 @@ import org.apache.hadoop.util.ReflectionUtils;
* based on configuration.
*/
@InterfaceAudience.Private
public class CoordinatedStateManagerFactory {
public final class CoordinatedStateManagerFactory {
/**
* Private to keep this class from being accidentally instantiated.
*/
private CoordinatedStateManagerFactory(){}
/**
* Creates consensus provider from the given configuration.

View File

@ -24,10 +24,10 @@ import java.util.concurrent.atomic.AtomicInteger;
* Thread factory that creates daemon threads
*/
public class DaemonThreadFactory implements ThreadFactory {
static final AtomicInteger poolNumber = new AtomicInteger(1);
final ThreadGroup group;
final AtomicInteger threadNumber = new AtomicInteger(1);
final String namePrefix;
private static final AtomicInteger poolNumber = new AtomicInteger(1);
private final ThreadGroup group;
private final AtomicInteger threadNumber = new AtomicInteger(1);
private final String namePrefix;
public DaemonThreadFactory(String name) {
SecurityManager s = System.getSecurityManager();

View File

@ -58,7 +58,7 @@ import org.apache.hadoop.util.StringUtils;
if (!isHealthy) {
boolean needToStop = decideToStop();
if (needToStop) {
this.stopper.stop("The node reported unhealthy " + threshold
this.getStopper().stop("The node reported unhealthy " + threshold
+ " number of times consecutively.");
}
// Always log health report.

View File

@ -226,7 +226,7 @@ public abstract class CleanerChore<T extends FileCleanerDelegate> extends Chore
Iterable<FileStatus> deletableValidFiles = validFiles;
// check each of the cleaners for the valid files
for (T cleaner : cleanersChain) {
if (cleaner.isStopped() || this.stopper.isStopped()) {
if (cleaner.isStopped() || this.getStopper().isStopped()) {
LOG.warn("A file cleaner" + this.getName() + " is stopped, won't delete any more files in:"
+ this.oldFileDir);
return false;

View File

@ -385,7 +385,7 @@ public class TestEndToEndSplitTransaction {
verify();
} catch (Throwable ex) {
this.ex = ex;
stopper.stop("caught exception");
getStopper().stop("caught exception");
}
}
}