HBASE-12519 Remove tabs used as whitespace (Varun Saxena)

This commit is contained in:
stack 2014-11-28 20:50:02 -08:00
parent eb4c194a87
commit 58b6b24c29
64 changed files with 810 additions and 834 deletions

View File

@ -39,7 +39,7 @@ public class ExcludePrivateAnnotationsStandardDoclet {
public static boolean start(RootDoc root) { public static boolean start(RootDoc root) {
System.out.println( System.out.println(
ExcludePrivateAnnotationsStandardDoclet.class.getSimpleName()); ExcludePrivateAnnotationsStandardDoclet.class.getSimpleName());
return Standard.start(RootDocProcessor.process(root)); return Standard.start(RootDocProcessor.process(root));
} }

View File

@ -65,10 +65,10 @@ class RootDocProcessor {
return getProxy(obj); return getProxy(obj);
} else if (obj instanceof Object[]) { } else if (obj instanceof Object[]) {
Class<?> componentType = type.isArray() ? type.getComponentType() Class<?> componentType = type.isArray() ? type.getComponentType()
: cls.getComponentType(); : cls.getComponentType();
Object[] array = (Object[]) obj; Object[] array = (Object[]) obj;
Object[] newArray = (Object[]) Array.newInstance(componentType, Object[] newArray = (Object[]) Array.newInstance(componentType,
array.length); array.length);
for (int i = 0; i < array.length; ++i) { for (int i = 0; i < array.length; ++i) {
newArray[i] = process(array[i], componentType); newArray[i] = process(array[i], componentType);
} }
@ -98,116 +98,99 @@ class RootDocProcessor {
} }
@Override @Override
public Object invoke(Object proxy, Method method, Object[] args) public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
throws Throwable {
String methodName = method.getName(); String methodName = method.getName();
if (target instanceof Doc) { if (target instanceof Doc) {
if (methodName.equals("isIncluded")) { if (methodName.equals("isIncluded")) {
Doc doc = (Doc) target; Doc doc = (Doc) target;
return !exclude(doc) && doc.isIncluded(); return !exclude(doc) && doc.isIncluded();
} }
if (target instanceof RootDoc) { if (target instanceof RootDoc) {
if (methodName.equals("classes")) { if (methodName.equals("classes")) {
return filter(((RootDoc) target).classes(), ClassDoc.class); return filter(((RootDoc) target).classes(), ClassDoc.class);
} else if (methodName.equals("specifiedClasses")) { } else if (methodName.equals("specifiedClasses")) {
return filter(((RootDoc) target).specifiedClasses(), ClassDoc.class); return filter(((RootDoc) target).specifiedClasses(), ClassDoc.class);
} else if (methodName.equals("specifiedPackages")) { } else if (methodName.equals("specifiedPackages")) {
return filter(((RootDoc) target).specifiedPackages(), PackageDoc.class); return filter(((RootDoc) target).specifiedPackages(), PackageDoc.class);
} }
} else if (target instanceof ClassDoc) { } else if (target instanceof ClassDoc) {
if (isFiltered(args)) { if (isFiltered(args)) {
if (methodName.equals("methods")) { if (methodName.equals("methods")) {
return filter(((ClassDoc) target).methods(true), MethodDoc.class); return filter(((ClassDoc) target).methods(true), MethodDoc.class);
} else if (methodName.equals("fields")) { } else if (methodName.equals("fields")) {
return filter(((ClassDoc) target).fields(true), FieldDoc.class); return filter(((ClassDoc) target).fields(true), FieldDoc.class);
} else if (methodName.equals("innerClasses")) { } else if (methodName.equals("innerClasses")) {
return filter(((ClassDoc) target).innerClasses(true), return filter(((ClassDoc) target).innerClasses(true), ClassDoc.class);
ClassDoc.class); } else if (methodName.equals("constructors")) {
} else if (methodName.equals("constructors")) { return filter(((ClassDoc) target).constructors(true), ConstructorDoc.class);
return filter(((ClassDoc) target).constructors(true), }
ConstructorDoc.class); }
} } else if (target instanceof PackageDoc) {
} if (methodName.equals("allClasses")) {
} else if (target instanceof PackageDoc) { if (isFiltered(args)) {
if (methodName.equals("allClasses")) { return filter(((PackageDoc) target).allClasses(true), ClassDoc.class);
if (isFiltered(args)) { } else {
return filter(((PackageDoc) target).allClasses(true), return filter(((PackageDoc) target).allClasses(), ClassDoc.class);
ClassDoc.class); }
} else { } else if (methodName.equals("annotationTypes")) {
return filter(((PackageDoc) target).allClasses(), ClassDoc.class); return filter(((PackageDoc) target).annotationTypes(), AnnotationTypeDoc.class);
} } else if (methodName.equals("enums")) {
} else if (methodName.equals("annotationTypes")) { return filter(((PackageDoc) target).enums(), ClassDoc.class);
return filter(((PackageDoc) target).annotationTypes(), } else if (methodName.equals("errors")) {
AnnotationTypeDoc.class); return filter(((PackageDoc) target).errors(), ClassDoc.class);
} else if (methodName.equals("enums")) { } else if (methodName.equals("exceptions")) {
return filter(((PackageDoc) target).enums(), return filter(((PackageDoc) target).exceptions(), ClassDoc.class);
ClassDoc.class); } else if (methodName.equals("interfaces")) {
} else if (methodName.equals("errors")) { return filter(((PackageDoc) target).interfaces(), ClassDoc.class);
return filter(((PackageDoc) target).errors(), } else if (methodName.equals("ordinaryClasses")) {
ClassDoc.class); return filter(((PackageDoc) target).ordinaryClasses(), ClassDoc.class);
} else if (methodName.equals("exceptions")) { }
return filter(((PackageDoc) target).exceptions(), }
ClassDoc.class);
} else if (methodName.equals("interfaces")) {
return filter(((PackageDoc) target).interfaces(),
ClassDoc.class);
} else if (methodName.equals("ordinaryClasses")) {
return filter(((PackageDoc) target).ordinaryClasses(),
ClassDoc.class);
}
}
} }
if (args != null) { if (args != null) {
if (methodName.equals("compareTo") || methodName.equals("equals") if (methodName.equals("compareTo") || methodName.equals("equals")
|| methodName.equals("overrides") || methodName.equals("overrides") || methodName.equals("subclassOf")) {
|| methodName.equals("subclassOf")) { args[0] = unwrap(args[0]);
args[0] = unwrap(args[0]); }
}
} }
try { try {
return process(method.invoke(target, args), method.getReturnType()); return process(method.invoke(target, args), method.getReturnType());
} catch (InvocationTargetException e) { } catch (InvocationTargetException e) {
throw e.getTargetException(); throw e.getTargetException();
} }
} }
private static boolean exclude(Doc doc) { private static boolean exclude(Doc doc) {
AnnotationDesc[] annotations = null; AnnotationDesc[] annotations = null;
if (doc instanceof ProgramElementDoc) { if (doc instanceof ProgramElementDoc) {
annotations = ((ProgramElementDoc) doc).annotations(); annotations = ((ProgramElementDoc) doc).annotations();
} else if (doc instanceof PackageDoc) { } else if (doc instanceof PackageDoc) {
annotations = ((PackageDoc) doc).annotations(); annotations = ((PackageDoc) doc).annotations();
} }
if (annotations != null) { if (annotations != null) {
for (AnnotationDesc annotation : annotations) {
String qualifiedTypeName = annotation.annotationType().qualifiedTypeName();
if (qualifiedTypeName.equals(
InterfaceAudience.Private.class.getCanonicalName())
|| qualifiedTypeName.equals(
InterfaceAudience.LimitedPrivate.class.getCanonicalName())) {
return true;
}
if (stability.equals(StabilityOptions.EVOLVING_OPTION)) {
if (qualifiedTypeName.equals(
InterfaceStability.Unstable.class.getCanonicalName())) {
return true;
}
}
if (stability.equals(StabilityOptions.STABLE_OPTION)) {
if (qualifiedTypeName.equals(
InterfaceStability.Unstable.class.getCanonicalName())
|| qualifiedTypeName.equals(
InterfaceStability.Evolving.class.getCanonicalName())) {
return true;
}
}
}
for (AnnotationDesc annotation : annotations) { for (AnnotationDesc annotation : annotations) {
String qualifiedTypeName = String qualifiedTypeName = annotation.annotationType().qualifiedTypeName();
annotation.annotationType().qualifiedTypeName(); if (qualifiedTypeName.equals(InterfaceAudience.Private.class.getCanonicalName())
if (qualifiedTypeName.equals( || qualifiedTypeName
InterfaceAudience.Public.class.getCanonicalName())) { .equals(InterfaceAudience.LimitedPrivate.class.getCanonicalName())) {
return true;
}
if (stability.equals(StabilityOptions.EVOLVING_OPTION)) {
if (qualifiedTypeName.equals(InterfaceStability.Unstable.class.getCanonicalName())) {
return true;
}
}
if (stability.equals(StabilityOptions.STABLE_OPTION)) {
if (qualifiedTypeName.equals(InterfaceStability.Unstable.class.getCanonicalName())
|| qualifiedTypeName.equals(InterfaceStability.Evolving.class.getCanonicalName())) {
return true;
}
}
}
for (AnnotationDesc annotation : annotations) {
String qualifiedTypeName = annotation.annotationType().qualifiedTypeName();
if (qualifiedTypeName.equals(InterfaceAudience.Public.class.getCanonicalName())) {
return false; return false;
} }
} }
@ -220,28 +203,24 @@ class RootDocProcessor {
private static Object[] filter(Doc[] array, Class<?> componentType) { private static Object[] filter(Doc[] array, Class<?> componentType) {
if (array == null || array.length == 0) { if (array == null || array.length == 0) {
return array; return array;
} }
List<Object> list = new ArrayList<Object>(array.length); List<Object> list = new ArrayList<Object>(array.length);
for (Doc entry : array) { for (Doc entry : array) {
if (!exclude(entry)) { if (!exclude(entry)) {
list.add(process(entry, componentType)); list.add(process(entry, componentType));
} }
} }
return list.toArray((Object[]) Array.newInstance(componentType, list return list.toArray((Object[]) Array.newInstance(componentType, list.size()));
.size()));
} }
private Object unwrap(Object proxy) { private Object unwrap(Object proxy) {
if (proxy instanceof Proxy) if (proxy instanceof Proxy) return ((ExcludeHandler) Proxy.getInvocationHandler(proxy)).target;
return ((ExcludeHandler) Proxy.getInvocationHandler(proxy)).target;
return proxy; return proxy;
} }
private boolean isFiltered(Object[] args) { private boolean isFiltered(Object[] args) {
return args != null && Boolean.TRUE.equals(args[0]); return args != null && Boolean.TRUE.equals(args[0]);
} }
} }
} }

View File

@ -35,16 +35,15 @@ class StabilityOptions {
return null; return null;
} }
public static void validOptions(String[][] options, public static void validOptions(String[][] options, DocErrorReporter reporter) {
DocErrorReporter reporter) {
for (int i = 0; i < options.length; i++) { for (int i = 0; i < options.length; i++) {
String opt = options[i][0].toLowerCase(); String opt = options[i][0].toLowerCase();
if (opt.equals(UNSTABLE_OPTION)) { if (opt.equals(UNSTABLE_OPTION)) {
RootDocProcessor.stability = UNSTABLE_OPTION; RootDocProcessor.stability = UNSTABLE_OPTION;
} else if (opt.equals(EVOLVING_OPTION)) { } else if (opt.equals(EVOLVING_OPTION)) {
RootDocProcessor.stability = EVOLVING_OPTION; RootDocProcessor.stability = EVOLVING_OPTION;
} else if (opt.equals(STABLE_OPTION)) { } else if (opt.equals(STABLE_OPTION)) {
RootDocProcessor.stability = STABLE_OPTION; RootDocProcessor.stability = STABLE_OPTION;
} }
} }
} }
@ -53,9 +52,9 @@ class StabilityOptions {
List<String[]> optionsList = new ArrayList<String[]>(); List<String[]> optionsList = new ArrayList<String[]>();
for (int i = 0; i < options.length; i++) { for (int i = 0; i < options.length; i++) {
if (!options[i][0].equalsIgnoreCase(UNSTABLE_OPTION) if (!options[i][0].equalsIgnoreCase(UNSTABLE_OPTION)
&& !options[i][0].equalsIgnoreCase(EVOLVING_OPTION) && !options[i][0].equalsIgnoreCase(EVOLVING_OPTION)
&& !options[i][0].equalsIgnoreCase(STABLE_OPTION)) { && !options[i][0].equalsIgnoreCase(STABLE_OPTION)) {
optionsList.add(options[i]); optionsList.add(options[i]);
} }
} }
String[][] filteredOptions = new String[optionsList.size()][]; String[][] filteredOptions = new String[optionsList.size()][];
@ -65,5 +64,4 @@ class StabilityOptions {
} }
return filteredOptions; return filteredOptions;
} }
} }

View File

@ -973,7 +973,7 @@ public class HTableDescriptor implements Comparable<HTableDescriptor> {
* This compares the content of the two descriptors and not the reference. * This compares the content of the two descriptors and not the reference.
* *
* @return 0 if the contents of the descriptors are exactly matching, * @return 0 if the contents of the descriptors are exactly matching,
* 1 if there is a mismatch in the contents * 1 if there is a mismatch in the contents
*/ */
@Override @Override
public int compareTo(@Nonnull final HTableDescriptor other) { public int compareTo(@Nonnull final HTableDescriptor other) {

View File

@ -428,7 +428,7 @@ public class HTable implements HTableInterface, RegionLocator {
* @param tableName Name of table to check. * @param tableName Name of table to check.
* @return {@code true} if table is online. * @return {@code true} if table is online.
* @throws IOException if a remote or network exception occurs * @throws IOException if a remote or network exception occurs
* @deprecated use {@link HBaseAdmin#isTableEnabled(byte[])} * @deprecated use {@link HBaseAdmin#isTableEnabled(byte[])}
*/ */
@Deprecated @Deprecated
public static boolean isTableEnabled(Configuration conf, String tableName) public static boolean isTableEnabled(Configuration conf, String tableName)
@ -442,7 +442,7 @@ public class HTable implements HTableInterface, RegionLocator {
* @param tableName Name of table to check. * @param tableName Name of table to check.
* @return {@code true} if table is online. * @return {@code true} if table is online.
* @throws IOException if a remote or network exception occurs * @throws IOException if a remote or network exception occurs
* @deprecated use {@link HBaseAdmin#isTableEnabled(byte[])} * @deprecated use {@link HBaseAdmin#isTableEnabled(byte[])}
*/ */
@Deprecated @Deprecated
public static boolean isTableEnabled(Configuration conf, byte[] tableName) public static boolean isTableEnabled(Configuration conf, byte[] tableName)

View File

@ -32,7 +32,7 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
public class UnmodifyableHTableDescriptor extends HTableDescriptor { public class UnmodifyableHTableDescriptor extends HTableDescriptor {
/** Default constructor */ /** Default constructor */
public UnmodifyableHTableDescriptor() { public UnmodifyableHTableDescriptor() {
super(); super();
} }
/* /*

View File

@ -180,12 +180,11 @@ public class AggregationClient implements Closeable {
*/ */
private void validateParameters(Scan scan, boolean canFamilyBeAbsent) throws IOException { private void validateParameters(Scan scan, boolean canFamilyBeAbsent) throws IOException {
if (scan == null if (scan == null
|| (Bytes.equals(scan.getStartRow(), scan.getStopRow()) && !Bytes || (Bytes.equals(scan.getStartRow(), scan.getStopRow()) && !Bytes.equals(
.equals(scan.getStartRow(), HConstants.EMPTY_START_ROW)) scan.getStartRow(), HConstants.EMPTY_START_ROW))
|| ((Bytes.compareTo(scan.getStartRow(), scan.getStopRow()) > 0) && || ((Bytes.compareTo(scan.getStartRow(), scan.getStopRow()) > 0) && !Bytes.equals(
!Bytes.equals(scan.getStopRow(), HConstants.EMPTY_END_ROW))) { scan.getStopRow(), HConstants.EMPTY_END_ROW))) {
throw new IOException( throw new IOException("Agg client Exception: Startrow should be smaller than Stoprow");
"Agg client Exception: Startrow should be smaller than Stoprow");
} else if (!canFamilyBeAbsent) { } else if (!canFamilyBeAbsent) {
if (scan.getFamilyMap().size() != 1) { if (scan.getFamilyMap().size() != 1) {
throw new IOException("There must be only one family."); throw new IOException("There must be only one family.");

View File

@ -68,8 +68,8 @@ public class DependentColumnFilter extends CompareFilter {
* @param valueComparator comparator * @param valueComparator comparator
*/ */
public DependentColumnFilter(final byte [] family, final byte[] qualifier, public DependentColumnFilter(final byte [] family, final byte[] qualifier,
final boolean dropDependentColumn, final CompareOp valueCompareOp, final boolean dropDependentColumn, final CompareOp valueCompareOp,
final ByteArrayComparable valueComparator) { final ByteArrayComparable valueComparator) {
// set up the comparator // set up the comparator
super(valueCompareOp, valueComparator); super(valueCompareOp, valueComparator);
this.columnFamily = family; this.columnFamily = family;
@ -136,19 +136,19 @@ public class DependentColumnFilter extends CompareFilter {
@Override @Override
public ReturnCode filterKeyValue(Cell c) { public ReturnCode filterKeyValue(Cell c) {
// Check if the column and qualifier match // Check if the column and qualifier match
if (!CellUtil.matchingColumn(c, this.columnFamily, this.columnQualifier)) { if (!CellUtil.matchingColumn(c, this.columnFamily, this.columnQualifier)) {
// include non-matches for the time being, they'll be discarded afterwards // include non-matches for the time being, they'll be discarded afterwards
return ReturnCode.INCLUDE; return ReturnCode.INCLUDE;
} }
// If it doesn't pass the op, skip it // If it doesn't pass the op, skip it
if (comparator != null if (comparator != null
&& doCompare(compareOp, comparator, c.getValueArray(), c.getValueOffset(), && doCompare(compareOp, comparator, c.getValueArray(), c.getValueOffset(),
c.getValueLength())) c.getValueLength()))
return ReturnCode.SKIP; return ReturnCode.SKIP;
stampSet.add(c.getTimestamp()); stampSet.add(c.getTimestamp());
if(dropDependentColumn) { if(dropDependentColumn) {
return ReturnCode.SKIP; return ReturnCode.SKIP;
} }
return ReturnCode.INCLUDE; return ReturnCode.INCLUDE;
} }

View File

@ -1049,7 +1049,7 @@ public final class RequestConverter {
public static MoveRegionRequest buildMoveRegionRequest( public static MoveRegionRequest buildMoveRegionRequest(
final byte [] encodedRegionName, final byte [] destServerName) throws final byte [] encodedRegionName, final byte [] destServerName) throws
DeserializationException { DeserializationException {
MoveRegionRequest.Builder builder = MoveRegionRequest.newBuilder(); MoveRegionRequest.Builder builder = MoveRegionRequest.newBuilder();
builder.setRegion( builder.setRegion(
buildRegionSpecifier(RegionSpecifierType.ENCODED_REGION_NAME,encodedRegionName)); buildRegionSpecifier(RegionSpecifierType.ENCODED_REGION_NAME,encodedRegionName));
if (destServerName != null) { if (destServerName != null) {

View File

@ -61,7 +61,7 @@ public class TimeRange {
* @param minStamp the minimum timestamp value, inclusive * @param minStamp the minimum timestamp value, inclusive
*/ */
public TimeRange(byte [] minStamp) { public TimeRange(byte [] minStamp) {
this.minStamp = Bytes.toLong(minStamp); this.minStamp = Bytes.toLong(minStamp);
} }
/** /**
@ -126,8 +126,8 @@ public class TimeRange {
* @return true if within TimeRange, false if not * @return true if within TimeRange, false if not
*/ */
public boolean withinTimeRange(byte [] bytes, int offset) { public boolean withinTimeRange(byte [] bytes, int offset) {
if(allTime) return true; if(allTime) return true;
return withinTimeRange(Bytes.toLong(bytes, offset)); return withinTimeRange(Bytes.toLong(bytes, offset));
} }
/** /**
@ -139,9 +139,9 @@ public class TimeRange {
* @return true if within TimeRange, false if not * @return true if within TimeRange, false if not
*/ */
public boolean withinTimeRange(long timestamp) { public boolean withinTimeRange(long timestamp) {
if(allTime) return true; if(allTime) return true;
// check if >= minStamp // check if >= minStamp
return (minStamp <= timestamp && timestamp < maxStamp); return (minStamp <= timestamp && timestamp < maxStamp);
} }
/** /**

View File

@ -43,7 +43,7 @@ public class CollectionUtils {
return in; return in;
} }
/************************ size ************************************/ /************************ size ************************************/
public static <T> int nullSafeSize(Collection<T> collection) { public static <T> int nullSafeSize(Collection<T> collection) {
if (collection == null) { if (collection == null) {
@ -56,7 +56,7 @@ public class CollectionUtils {
return nullSafeSize(a) == nullSafeSize(b); return nullSafeSize(a) == nullSafeSize(b);
} }
/*************************** empty ****************************************/ /*************************** empty ****************************************/
public static <T> boolean isEmpty(Collection<T> collection) { public static <T> boolean isEmpty(Collection<T> collection) {
return collection == null || collection.isEmpty(); return collection == null || collection.isEmpty();
@ -66,7 +66,7 @@ public class CollectionUtils {
return !isEmpty(collection); return !isEmpty(collection);
} }
/************************ first/last **************************/ /************************ first/last **************************/
public static <T> T getFirst(Collection<T> collection) { public static <T> T getFirst(Collection<T> collection) {
if (CollectionUtils.isEmpty(collection)) { if (CollectionUtils.isEmpty(collection)) {

View File

@ -164,7 +164,7 @@ public class JenkinsHash extends Hash {
//-------------------------------- last block: affect all 32 bits of (c) //-------------------------------- last block: affect all 32 bits of (c)
switch (length) { // all the case statements fall through switch (length) { // all the case statements fall through
case 12: case 12:
c += ((key[offset + 11] & BYTE_MASK) << 24); c += ((key[offset + 11] & BYTE_MASK) << 24);
case 11: case 11:
c += ((key[offset + 10] & BYTE_MASK) << 16); c += ((key[offset + 10] & BYTE_MASK) << 16);
case 10: case 10:

View File

@ -201,7 +201,7 @@ public class TestHBaseConfiguration {
hadoopClassesAvailable = true; hadoopClassesAvailable = true;
LOG.info("Credential provider classes have been" + LOG.info("Credential provider classes have been" +
" loaded and initialized successfully through reflection."); " loaded and initialized successfully through reflection.");
return true; return true;
} }
@ -280,7 +280,7 @@ public class TestHBaseConfiguration {
List<Object> providers = getCredentialProviders(conf); List<Object> providers = getCredentialProviders(conf);
if (null == providers) { if (null == providers) {
throw new IOException("Could not fetch any CredentialProviders, " + throw new IOException("Could not fetch any CredentialProviders, " +
"is the implementation available?"); "is the implementation available?");
} }
Object provider = providers.get(0); Object provider = providers.get(0);

View File

@ -583,8 +583,8 @@ public class IntegrationTestBigLinkedListWithVisibility extends IntegrationTestB
if (args.length < 5) { if (args.length < 5) {
System.err System.err
.println("Usage: Loop <num iterations> " + .println("Usage: Loop <num iterations> " +
"<num mappers> <num nodes per mapper> <output dir> " + "<num mappers> <num nodes per mapper> <output dir> " +
"<num reducers> [<width> <wrap multiplier>]"); "<num reducers> [<width> <wrap multiplier>]");
return 1; return 1;
} }
LOG.info("Running Loop with args:" + Arrays.deepToString(args)); LOG.info("Running Loop with args:" + Arrays.deepToString(args));

View File

@ -45,7 +45,7 @@ public class PrefixTreeBlockMeta {
public static final int MAX_FAMILY_LENGTH = Byte.MAX_VALUE;// hard-coded in KeyValue public static final int MAX_FAMILY_LENGTH = Byte.MAX_VALUE;// hard-coded in KeyValue
public static final int public static final int
NUM_LONGS = 2, NUM_LONGS = 2,
NUM_INTS = 28, NUM_INTS = 28,
NUM_SHORTS = 0,//keyValueTypeWidth not persisted NUM_SHORTS = 0,//keyValueTypeWidth not persisted
NUM_SINGLE_BYTES = 2, NUM_SINGLE_BYTES = 2,
@ -135,7 +135,7 @@ public class PrefixTreeBlockMeta {
} }
/**************** operate on each field **********************/ /**************** operate on each field **********************/
public int calculateNumMetaBytes(){ public int calculateNumMetaBytes(){
int numBytes = 0; int numBytes = 0;
@ -339,7 +339,7 @@ public class PrefixTreeBlockMeta {
position += UVIntTool.numBytes(numUniqueTags); position += UVIntTool.numBytes(numUniqueTags);
} }
//TODO method that can read directly from ByteBuffer instead of InputStream //TODO method that can read directly from ByteBuffer instead of InputStream
/*************** methods *************************/ /*************** methods *************************/

View File

@ -306,7 +306,7 @@ public class PrefixTreeArraySearcher extends PrefixTreeArrayReversibleScanner im
} }
/****************** complete seek when token mismatch ******************/ /****************** complete seek when token mismatch ******************/
/** /**
* @param searcherIsAfterInputKey <0: input key is before the searcher's position<br/> * @param searcherIsAfterInputKey <0: input key is before the searcher's position<br/>

View File

@ -71,7 +71,7 @@ public class ColumnSectionWriter {
private List<Integer> outputArrayOffsets; private List<Integer> outputArrayOffsets;
/*********************** construct *********************/ /*********************** construct *********************/
public ColumnSectionWriter() { public ColumnSectionWriter() {
this.nonLeaves = Lists.newArrayList(); this.nonLeaves = Lists.newArrayList();
@ -100,7 +100,7 @@ public class ColumnSectionWriter {
} }
/****************** methods *******************************/ /****************** methods *******************************/
public ColumnSectionWriter compile() { public ColumnSectionWriter compile() {
if (this.nodeType == ColumnNodeType.FAMILY) { if (this.nodeType == ColumnNodeType.FAMILY) {

View File

@ -75,7 +75,7 @@ public class LongEncoder {
} }
/************* methods ***************************/ /************* methods ***************************/
public void add(long timestamp) { public void add(long timestamp) {
uniqueValues.add(timestamp); uniqueValues.add(timestamp);
@ -158,7 +158,7 @@ public class LongEncoder {
} }
/******************** get/set **************************/ /******************** get/set **************************/
public long getMin() { public long getMin() {
return min; return min;

View File

@ -179,7 +179,7 @@ public class Tokenizer{
} }
/********************** write ***************************/ /********************** write ***************************/
public Tokenizer setNodeFirstInsertionIndexes() { public Tokenizer setNodeFirstInsertionIndexes() {
root.setInsertionIndexes(0); root.setInsertionIndexes(0);

View File

@ -289,7 +289,7 @@ public class TokenizerNode{
} }
/************************ byte[] utils *************************/ /************************ byte[] utils *************************/
protected boolean partiallyMatchesToken(ByteRange bytes) { protected boolean partiallyMatchesToken(ByteRange bytes) {
return numIdenticalBytes(bytes) > 0; return numIdenticalBytes(bytes) > 0;
@ -304,7 +304,7 @@ public class TokenizerNode{
} }
/***************** moving nodes around ************************/ /***************** moving nodes around ************************/
public void appendNodesToExternalList(List<TokenizerNode> appendTo, boolean includeNonLeaves, public void appendNodesToExternalList(List<TokenizerNode> appendTo, boolean includeNonLeaves,
boolean includeLeaves) { boolean includeLeaves) {
@ -462,7 +462,7 @@ public class TokenizerNode{
} }
/********************** count different node types ********************/ /********************** count different node types ********************/
public int getNumBranchNodesIncludingThisNode() { public int getNumBranchNodesIncludingThisNode() {
if (isLeaf()) { if (isLeaf()) {

View File

@ -30,9 +30,9 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
@InterfaceAudience.Private @InterfaceAudience.Private
public enum TokenizerRowSearchPosition { public enum TokenizerRowSearchPosition {
AFTER,//the key is after this tree node, so keep searching AFTER,//the key is after this tree node, so keep searching
BEFORE,//in a binary search, this tells us to back up BEFORE,//in a binary search, this tells us to back up
MATCH,//the current node is a full match MATCH,//the current node is a full match
NO_MATCH,//might as well return a value more informative than null NO_MATCH,//might as well return a value more informative than null
} }

View File

@ -78,13 +78,13 @@ public class UFIntTool {
private static final long[] MASKS = new long[] { private static final long[] MASKS = new long[] {
(long) 255, (long) 255,
(long) 255 << 8, (long) 255 << 8,
(long) 255 << 16, (long) 255 << 16,
(long) 255 << 24, (long) 255 << 24,
(long) 255 << 32, (long) 255 << 32,
(long) 255 << 40, (long) 255 << 40,
(long) 255 << 48, (long) 255 << 48,
(long) 255 << 56 (long) 255 << 56
}; };
public static void writeBytes(int outputWidth, final long value, OutputStream os) throws IOException { public static void writeBytes(int outputWidth, final long value, OutputStream os) throws IOException {

View File

@ -48,7 +48,7 @@ import org.junit.runners.Parameterized.Parameters;
@RunWith(Parameterized.class) @RunWith(Parameterized.class)
public class TestPrefixTreeSearcher { public class TestPrefixTreeSearcher {
protected static int BLOCK_START = 7; protected static int BLOCK_START = 7;
@Parameters @Parameters
public static Collection<Object[]> parameters() { public static Collection<Object[]> parameters() {

View File

@ -77,7 +77,7 @@ public class TestRowEncoder {
this.rows = testRows; this.rows = testRows;
} }
@Before @Before
public void compile() throws IOException { public void compile() throws IOException {
// Always run with tags. But should also ensure that KVs without tags work fine // Always run with tags. But should also ensure that KVs without tags work fine
os = new ByteArrayOutputStream(1 << 20); os = new ByteArrayOutputStream(1 << 20);
@ -175,7 +175,7 @@ public class TestRowEncoder {
} }
/**************** helper **************************/ /**************** helper **************************/
protected void assertKeyAndValueEqual(Cell expected, Cell actual) { protected void assertKeyAndValueEqual(Cell expected, Cell actual) {
// assert keys are equal (doesn't compare values) // assert keys are equal (doesn't compare values)

View File

@ -29,7 +29,7 @@ import com.google.common.collect.Lists;
public class TestRowDataComplexQualifiers extends BaseTestRowData{ public class TestRowDataComplexQualifiers extends BaseTestRowData{
static byte[] static byte[]
Arow = Bytes.toBytes("Arow"), Arow = Bytes.toBytes("Arow"),
cf = PrefixTreeTestConstants.TEST_CF, cf = PrefixTreeTestConstants.TEST_CF,
v0 = Bytes.toBytes("v0"); v0 = Bytes.toBytes("v0");

View File

@ -36,36 +36,36 @@ import com.google.common.collect.Lists;
*/ */
public class TestRowDataDeeper extends BaseTestRowData{ public class TestRowDataDeeper extends BaseTestRowData{
static byte[] static byte[]
cdc = Bytes.toBytes("cdc"), cdc = Bytes.toBytes("cdc"),
cf6 = Bytes.toBytes("cf6"), cf6 = Bytes.toBytes("cf6"),
cfc = Bytes.toBytes("cfc"), cfc = Bytes.toBytes("cfc"),
f = Bytes.toBytes("f"), f = Bytes.toBytes("f"),
q = Bytes.toBytes("q"), q = Bytes.toBytes("q"),
v = Bytes.toBytes("v"); v = Bytes.toBytes("v");
static long static long
ts = 55L; ts = 55L;
static List<KeyValue> d = Lists.newArrayList(); static List<KeyValue> d = Lists.newArrayList();
static{ static{
d.add(new KeyValue(cdc, f, q, ts, v)); d.add(new KeyValue(cdc, f, q, ts, v));
d.add(new KeyValue(cf6, f, q, ts, v)); d.add(new KeyValue(cf6, f, q, ts, v));
d.add(new KeyValue(cfc, f, q, ts, v)); d.add(new KeyValue(cfc, f, q, ts, v));
} }
@Override @Override
public List<KeyValue> getInputs() { public List<KeyValue> getInputs() {
return d; return d;
} }
@Override @Override
public void individualBlockMetaAssertions(PrefixTreeBlockMeta blockMeta) { public void individualBlockMetaAssertions(PrefixTreeBlockMeta blockMeta) {
//0: token:c; fan:d,f //0: token:c; fan:d,f
//1: token:f; fan:6,c //1: token:f; fan:6,c
//2: leaves //2: leaves
Assert.assertEquals(3, blockMeta.getRowTreeDepth()); Assert.assertEquals(3, blockMeta.getRowTreeDepth());
} }
@Override @Override
public void individualSearcherAssertions(CellSearcher searcher) { public void individualSearcherAssertions(CellSearcher searcher) {

View File

@ -33,62 +33,62 @@ import com.google.common.collect.Lists;
*/ */
public class TestRowDataDifferentTimestamps extends BaseTestRowData{ public class TestRowDataDifferentTimestamps extends BaseTestRowData{
static byte[] static byte[]
Arow = Bytes.toBytes("Arow"), Arow = Bytes.toBytes("Arow"),
Brow = Bytes.toBytes("Brow"), Brow = Bytes.toBytes("Brow"),
cf = Bytes.toBytes("fammy"), cf = Bytes.toBytes("fammy"),
cq0 = Bytes.toBytes("cq0"), cq0 = Bytes.toBytes("cq0"),
cq1 = Bytes.toBytes("cq1"), cq1 = Bytes.toBytes("cq1"),
v0 = Bytes.toBytes("v0"); v0 = Bytes.toBytes("v0");
static List<KeyValue> d = Lists.newArrayList(); static List<KeyValue> d = Lists.newArrayList();
static{ static{
KeyValue kv0 = new KeyValue(Arow, cf, cq0, 0L, v0); KeyValue kv0 = new KeyValue(Arow, cf, cq0, 0L, v0);
kv0.setSequenceId(123456789L); kv0.setSequenceId(123456789L);
d.add(kv0); d.add(kv0);
KeyValue kv1 = new KeyValue(Arow, cf, cq1, 1L, v0); KeyValue kv1 = new KeyValue(Arow, cf, cq1, 1L, v0);
kv1.setSequenceId(3L); kv1.setSequenceId(3L);
d.add(kv1); d.add(kv1);
KeyValue kv2 = new KeyValue(Brow, cf, cq0, 12345678L, v0); KeyValue kv2 = new KeyValue(Brow, cf, cq0, 12345678L, v0);
kv2.setSequenceId(65537L); kv2.setSequenceId(65537L);
d.add(kv2); d.add(kv2);
//watch out... Long.MAX_VALUE comes back as 1332221664203, even with other encoders //watch out... Long.MAX_VALUE comes back as 1332221664203, even with other encoders
// d.add(new KeyValue(Brow, cf, cq1, Long.MAX_VALUE, v0)); //d.add(new KeyValue(Brow, cf, cq1, Long.MAX_VALUE, v0));
KeyValue kv3 = new KeyValue(Brow, cf, cq1, Long.MAX_VALUE-1, v0); KeyValue kv3 = new KeyValue(Brow, cf, cq1, Long.MAX_VALUE-1, v0);
kv3.setSequenceId(1L); kv3.setSequenceId(1L);
d.add(kv3); d.add(kv3);
KeyValue kv4 = new KeyValue(Brow, cf, cq1, 999999999, v0); KeyValue kv4 = new KeyValue(Brow, cf, cq1, 999999999, v0);
//don't set memstoreTS //don't set memstoreTS
d.add(kv4); d.add(kv4);
KeyValue kv5 = new KeyValue(Brow, cf, cq1, 12345, v0); KeyValue kv5 = new KeyValue(Brow, cf, cq1, 12345, v0);
kv5.setSequenceId(0L); kv5.setSequenceId(0L);
d.add(kv5); d.add(kv5);
} }
@Override @Override
public List<KeyValue> getInputs() { public List<KeyValue> getInputs() {
return d; return d;
} }
@Override @Override
public void individualBlockMetaAssertions(PrefixTreeBlockMeta blockMeta) { public void individualBlockMetaAssertions(PrefixTreeBlockMeta blockMeta) {
Assert.assertTrue(blockMeta.getNumMvccVersionBytes() > 0); Assert.assertTrue(blockMeta.getNumMvccVersionBytes() > 0);
Assert.assertEquals(12, blockMeta.getNumValueBytes()); Assert.assertEquals(12, blockMeta.getNumValueBytes());
Assert.assertFalse(blockMeta.isAllSameTimestamp()); Assert.assertFalse(blockMeta.isAllSameTimestamp());
Assert.assertNotNull(blockMeta.getMinTimestamp()); Assert.assertNotNull(blockMeta.getMinTimestamp());
Assert.assertTrue(blockMeta.getTimestampIndexWidth() > 0); Assert.assertTrue(blockMeta.getTimestampIndexWidth() > 0);
Assert.assertTrue(blockMeta.getTimestampDeltaWidth() > 0); Assert.assertTrue(blockMeta.getTimestampDeltaWidth() > 0);
Assert.assertFalse(blockMeta.isAllSameMvccVersion()); Assert.assertFalse(blockMeta.isAllSameMvccVersion());
Assert.assertNotNull(blockMeta.getMinMvccVersion()); Assert.assertNotNull(blockMeta.getMinMvccVersion());
Assert.assertTrue(blockMeta.getMvccVersionIndexWidth() > 0); Assert.assertTrue(blockMeta.getMvccVersionIndexWidth() > 0);
Assert.assertTrue(blockMeta.getMvccVersionDeltaWidth() > 0); Assert.assertTrue(blockMeta.getMvccVersionDeltaWidth() > 0);
} }
} }

View File

@ -42,49 +42,49 @@ import com.google.common.collect.Lists;
public class TestRowDataExerciseFInts extends BaseTestRowData{ public class TestRowDataExerciseFInts extends BaseTestRowData{
static List<ByteRange> rows; static List<ByteRange> rows;
static{ static{
List<String> rowStrings = new ArrayList<String>(); List<String> rowStrings = new ArrayList<String>();
rowStrings.add("com.edsBlog/directoryAa/pageAaa"); rowStrings.add("com.edsBlog/directoryAa/pageAaa");
rowStrings.add("com.edsBlog/directoryAa/pageBbb"); rowStrings.add("com.edsBlog/directoryAa/pageBbb");
rowStrings.add("com.edsBlog/directoryAa/pageCcc"); rowStrings.add("com.edsBlog/directoryAa/pageCcc");
rowStrings.add("com.edsBlog/directoryAa/pageDdd"); rowStrings.add("com.edsBlog/directoryAa/pageDdd");
rowStrings.add("com.edsBlog/directoryBb/pageEee"); rowStrings.add("com.edsBlog/directoryBb/pageEee");
rowStrings.add("com.edsBlog/directoryBb/pageFff"); rowStrings.add("com.edsBlog/directoryBb/pageFff");
rowStrings.add("com.edsBlog/directoryBb/pageGgg"); rowStrings.add("com.edsBlog/directoryBb/pageGgg");
rowStrings.add("com.edsBlog/directoryBb/pageHhh"); rowStrings.add("com.edsBlog/directoryBb/pageHhh");
rowStrings.add("com.isabellasBlog/directoryAa/pageAaa"); rowStrings.add("com.isabellasBlog/directoryAa/pageAaa");
rowStrings.add("com.isabellasBlog/directoryAa/pageBbb"); rowStrings.add("com.isabellasBlog/directoryAa/pageBbb");
rowStrings.add("com.isabellasBlog/directoryAa/pageCcc"); rowStrings.add("com.isabellasBlog/directoryAa/pageCcc");
rowStrings.add("com.isabellasBlog/directoryAa/pageDdd"); rowStrings.add("com.isabellasBlog/directoryAa/pageDdd");
rowStrings.add("com.isabellasBlog/directoryBb/pageEee"); rowStrings.add("com.isabellasBlog/directoryBb/pageEee");
rowStrings.add("com.isabellasBlog/directoryBb/pageFff"); rowStrings.add("com.isabellasBlog/directoryBb/pageFff");
rowStrings.add("com.isabellasBlog/directoryBb/pageGgg"); rowStrings.add("com.isabellasBlog/directoryBb/pageGgg");
rowStrings.add("com.isabellasBlog/directoryBb/pageHhh"); rowStrings.add("com.isabellasBlog/directoryBb/pageHhh");
ByteRangeTreeSet ba = new ByteRangeTreeSet(); ByteRangeTreeSet ba = new ByteRangeTreeSet();
for(String row : rowStrings){ for(String row : rowStrings){
ba.add(new SimpleMutableByteRange(Bytes.toBytes(row))); ba.add(new SimpleMutableByteRange(Bytes.toBytes(row)));
} }
rows = ba.compile().getSortedRanges(); rows = ba.compile().getSortedRanges();
} }
static List<String> cols = Lists.newArrayList(); static List<String> cols = Lists.newArrayList();
static{ static{
cols.add("Chrome"); cols.add("Chrome");
cols.add("Chromeb"); cols.add("Chromeb");
cols.add("Firefox"); cols.add("Firefox");
cols.add("InternetExplorer"); cols.add("InternetExplorer");
cols.add("Opera"); cols.add("Opera");
cols.add("Safari"); cols.add("Safari");
cols.add("Z1stBrowserWithHuuuuuuuuuuuugeQualifier"); cols.add("Z1stBrowserWithHuuuuuuuuuuuugeQualifier");
cols.add("Z2ndBrowserWithEvenBiggerQualifierMoreMoreMoreMoreMore"); cols.add("Z2ndBrowserWithEvenBiggerQualifierMoreMoreMoreMoreMore");
cols.add("Z3rdBrowserWithEvenBiggerQualifierMoreMoreMoreMoreMore"); cols.add("Z3rdBrowserWithEvenBiggerQualifierMoreMoreMoreMoreMore");
cols.add("Z4thBrowserWithEvenBiggerQualifierMoreMoreMoreMoreMore"); cols.add("Z4thBrowserWithEvenBiggerQualifierMoreMoreMoreMoreMore");
cols.add("Z5thBrowserWithEvenBiggerQualifierMoreMoreMoreMoreMore"); cols.add("Z5thBrowserWithEvenBiggerQualifierMoreMoreMoreMoreMore");
cols.add("Z6thBrowserWithEvenBiggerQualifierMoreMoreMoreMoreMore"); cols.add("Z6thBrowserWithEvenBiggerQualifierMoreMoreMoreMoreMore");
cols.add("Z7thBrowserWithEvenBiggerQualifierMoreMoreMoreMoreMore"); cols.add("Z7thBrowserWithEvenBiggerQualifierMoreMoreMoreMoreMore");
cols.add("Z8thBrowserWithEvenBiggerQualifierMoreMoreMoreMoreMore"); cols.add("Z8thBrowserWithEvenBiggerQualifierMoreMoreMoreMoreMore");
cols.add("Z9thBrowserWithEvenBiggerQualifierMoreMoreMoreMoreMore"); cols.add("Z9thBrowserWithEvenBiggerQualifierMoreMoreMoreMoreMore");
} }
static long ts = 1234567890; static long ts = 1234567890;

View File

@ -29,31 +29,31 @@ import com.google.common.collect.Lists;
public class TestRowDataNub extends BaseTestRowData{ public class TestRowDataNub extends BaseTestRowData{
static byte[] static byte[]
rowA = Bytes.toBytes("rowA"), rowA = Bytes.toBytes("rowA"),
rowB = Bytes.toBytes("rowB"),//nub rowB = Bytes.toBytes("rowB"),//nub
rowBB = Bytes.toBytes("rowBB"), rowBB = Bytes.toBytes("rowBB"),
cf = PrefixTreeTestConstants.TEST_CF, cf = PrefixTreeTestConstants.TEST_CF,
cq0 = Bytes.toBytes("cq0"), cq0 = Bytes.toBytes("cq0"),
cq1 = Bytes.toBytes("cq1"), cq1 = Bytes.toBytes("cq1"),
v0 = Bytes.toBytes("v0"); v0 = Bytes.toBytes("v0");
static long static long
ts = 55L; ts = 55L;
static List<KeyValue> d = Lists.newArrayList(); static List<KeyValue> d = Lists.newArrayList();
static{ static{
d.add(new KeyValue(rowA, cf, cq0, ts, v0)); d.add(new KeyValue(rowA, cf, cq0, ts, v0));
d.add(new KeyValue(rowA, cf, cq1, ts, v0)); d.add(new KeyValue(rowA, cf, cq1, ts, v0));
d.add(new KeyValue(rowB, cf, cq0, ts, v0)); d.add(new KeyValue(rowB, cf, cq0, ts, v0));
d.add(new KeyValue(rowB, cf, cq1, ts, v0)); d.add(new KeyValue(rowB, cf, cq1, ts, v0));
d.add(new KeyValue(rowBB, cf, cq0, ts, v0)); d.add(new KeyValue(rowBB, cf, cq0, ts, v0));
d.add(new KeyValue(rowBB, cf, cq1, ts, v0)); d.add(new KeyValue(rowBB, cf, cq1, ts, v0));
} }
@Override @Override
public List<KeyValue> getInputs() { public List<KeyValue> getInputs() {
return d; return d;
} }
} }

View File

@ -28,15 +28,15 @@ import com.google.common.collect.Lists;
public class TestRowDataQualifierByteOrdering extends BaseTestRowData{ public class TestRowDataQualifierByteOrdering extends BaseTestRowData{
static byte[] static byte[]
Arow = Bytes.toBytes("Arow"), Arow = Bytes.toBytes("Arow"),
Brow = Bytes.toBytes("Brow"), Brow = Bytes.toBytes("Brow"),
Brow2 = Bytes.toBytes("Brow2"), Brow2 = Bytes.toBytes("Brow2"),
fam = Bytes.toBytes("HappyFam"), fam = Bytes.toBytes("HappyFam"),
cq0 = Bytes.toBytes("cq0"), cq0 = Bytes.toBytes("cq0"),
cq1 = Bytes.toBytes("cq1tail"),//make sure tail does not come back as liat cq1 = Bytes.toBytes("cq1tail"),//make sure tail does not come back as liat
cq2 = Bytes.toBytes("cq2"), cq2 = Bytes.toBytes("cq2"),
v0 = Bytes.toBytes("v0"); v0 = Bytes.toBytes("v0");
static long ts = 55L; static long ts = 55L;

View File

@ -55,13 +55,13 @@ public class TestRowDataSearcherRowMiss extends BaseTestRowData{
d.add(new KeyValue(B, cf, cq, ts, v)); d.add(new KeyValue(B, cf, cq, ts, v));
} }
@Override @Override
public List<KeyValue> getInputs() { public List<KeyValue> getInputs() {
return d; return d;
} }
@Override @Override
public void individualSearcherAssertions(CellSearcher searcher) { public void individualSearcherAssertions(CellSearcher searcher) {
assertRowOffsetsCorrect(); assertRowOffsetsCorrect();
searcher.resetToBeforeFirstEntry(); searcher.resetToBeforeFirstEntry();
@ -83,13 +83,13 @@ public class TestRowDataSearcherRowMiss extends BaseTestRowData{
testBetween2and3(searcher); testBetween2and3(searcher);
} }
/************ private methods, call from above *******************/ /************ private methods, call from above *******************/
private void assertRowOffsetsCorrect(){ private void assertRowOffsetsCorrect(){
Assert.assertEquals(4, getRowStartIndexes().size()); Assert.assertEquals(4, getRowStartIndexes().size());
} }
private void testBetween1and2(CellSearcher searcher){ private void testBetween1and2(CellSearcher searcher){
CellScannerPosition p;//reuse CellScannerPosition p;//reuse
Cell betweenAAndAAA = new KeyValue(AA, cf, cq, ts-2, v); Cell betweenAAndAAA = new KeyValue(AA, cf, cq, ts-2, v);
@ -105,7 +105,7 @@ public class TestRowDataSearcherRowMiss extends BaseTestRowData{
p = searcher.positionAtOrAfter(betweenAAndAAA); p = searcher.positionAtOrAfter(betweenAAndAAA);
Assert.assertEquals(CellScannerPosition.AFTER, p); Assert.assertEquals(CellScannerPosition.AFTER, p);
Assert.assertTrue(CellComparator.equals(searcher.current(), d.get(2))); Assert.assertTrue(CellComparator.equals(searcher.current(), d.get(2)));
} }
private void testBetween2and3(CellSearcher searcher){ private void testBetween2and3(CellSearcher searcher){
CellScannerPosition p;//reuse CellScannerPosition p;//reuse

View File

@ -29,12 +29,12 @@ import com.google.common.collect.Lists;
public class TestRowDataSingleQualifier extends BaseTestRowData{ public class TestRowDataSingleQualifier extends BaseTestRowData{
static byte[] static byte[]
rowA = Bytes.toBytes("rowA"), rowA = Bytes.toBytes("rowA"),
rowB = Bytes.toBytes("rowB"), rowB = Bytes.toBytes("rowB"),
cf = PrefixTreeTestConstants.TEST_CF, cf = PrefixTreeTestConstants.TEST_CF,
cq0 = Bytes.toBytes("cq0"), cq0 = Bytes.toBytes("cq0"),
v0 = Bytes.toBytes("v0"); v0 = Bytes.toBytes("v0");
static long ts = 55L; static long ts = 55L;

View File

@ -33,12 +33,12 @@ import com.google.common.collect.Lists;
public class TestRowDataTrivial extends BaseTestRowData{ public class TestRowDataTrivial extends BaseTestRowData{
static byte[] static byte[]
rA = Bytes.toBytes("rA"), rA = Bytes.toBytes("rA"),
rB = Bytes.toBytes("rB"),//turn "r" into a branch for the Searcher tests rB = Bytes.toBytes("rB"),//turn "r" into a branch for the Searcher tests
cf = Bytes.toBytes("fam"), cf = Bytes.toBytes("fam"),
cq0 = Bytes.toBytes("q0"), cq0 = Bytes.toBytes("q0"),
v0 = Bytes.toBytes("v0"); v0 = Bytes.toBytes("v0");
static long ts = 55L; static long ts = 55L;

View File

@ -40,7 +40,7 @@ import com.google.common.collect.Lists;
public class TestRowDataUrls extends BaseTestRowData{ public class TestRowDataUrls extends BaseTestRowData{
static List<ByteRange> rows; static List<ByteRange> rows;
static{ static{
List<String> rowStrings = new ArrayList<String>(); List<String> rowStrings = new ArrayList<String>();
rowStrings.add("com.edsBlog/directoryAa/pageAaa"); rowStrings.add("com.edsBlog/directoryAa/pageAaa");
rowStrings.add("com.edsBlog/directoryAa/pageBbb"); rowStrings.add("com.edsBlog/directoryAa/pageBbb");

View File

@ -51,33 +51,33 @@ public class TestRowDataUrlsExample extends BaseTestRowData{
static String FAMILY = "hits"; static String FAMILY = "hits";
static List<String> BROWSERS = Lists.newArrayList( static List<String> BROWSERS = Lists.newArrayList(
"Chrome", "IE8", "IE9beta");//, "Opera", "Safari"); "Chrome", "IE8", "IE9beta");//, "Opera", "Safari");
static long TIMESTAMP = 1234567890; static long TIMESTAMP = 1234567890;
static int MAX_VALUE = 50; static int MAX_VALUE = 50;
static List<KeyValue> kvs = Lists.newArrayList(); static List<KeyValue> kvs = Lists.newArrayList();
static{ static{
for(String rowKey : URLS){ for(String rowKey : URLS){
for(String qualifier : BROWSERS){ for(String qualifier : BROWSERS){
KeyValue kv = new KeyValue( KeyValue kv = new KeyValue(
Bytes.toBytes(rowKey), Bytes.toBytes(rowKey),
Bytes.toBytes(FAMILY), Bytes.toBytes(FAMILY),
Bytes.toBytes(qualifier), Bytes.toBytes(qualifier),
TIMESTAMP, TIMESTAMP,
KeyValue.Type.Put, KeyValue.Type.Put,
Bytes.toBytes("VvvV")); Bytes.toBytes("VvvV"));
kvs.add(kv); kvs.add(kv);
} }
} }
} }
/** /**
* Used for generating docs. * Used for generating docs.
*/ */
public static void main(String... args) throws IOException{ public static void main(String... args) throws IOException{
System.out.println("-- inputs --"); System.out.println("-- inputs --");
System.out.println(KeyValueTestUtil.toStringWithPadding(kvs, true)); System.out.println(KeyValueTestUtil.toStringWithPadding(kvs, true));
ByteArrayOutputStream os = new ByteArrayOutputStream(1<<20); ByteArrayOutputStream os = new ByteArrayOutputStream(1<<20);
PrefixTreeEncoder encoder = new PrefixTreeEncoder(os, false); PrefixTreeEncoder encoder = new PrefixTreeEncoder(os, false);
for(KeyValue kv : kvs){ for(KeyValue kv : kvs){
@ -116,11 +116,11 @@ public class TestRowDataUrlsExample extends BaseTestRowData{
System.out.println("-- concatenated values --"); System.out.println("-- concatenated values --");
System.out.println(Bytes.toStringBinary(encoder.getValueByteRange().deepCopyToNewArray())); System.out.println(Bytes.toStringBinary(encoder.getValueByteRange().deepCopyToNewArray()));
} }
@Override @Override
public List<KeyValue> getInputs() { public List<KeyValue> getInputs() {
return kvs; return kvs;
} }
} }

View File

@ -40,39 +40,39 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
@XmlRootElement(name="ClusterVersion") @XmlRootElement(name="ClusterVersion")
@InterfaceAudience.Private @InterfaceAudience.Private
public class StorageClusterVersionModel implements Serializable { public class StorageClusterVersionModel implements Serializable {
private static final long serialVersionUID = 1L; private static final long serialVersionUID = 1L;
private String version; private String version;
/** /**
* @return the storage cluster version * @return the storage cluster version
*/ */
@XmlValue @XmlValue
public String getVersion() { public String getVersion() {
return version; return version;
} }
/** /**
* @param version the storage cluster version * @param version the storage cluster version
*/ */
public void setVersion(String version) { public void setVersion(String version) {
this.version = version; this.version = version;
} }
/* (non-Javadoc) /* (non-Javadoc)
* @see java.lang.Object#toString() * @see java.lang.Object#toString()
*/ */
@JsonValue @JsonValue
@Override @Override
public String toString() { public String toString() {
return version; return version;
} }
//needed for jackson deserialization //needed for jackson deserialization
private static StorageClusterVersionModel valueOf(String value) { private static StorageClusterVersionModel valueOf(String value) {
StorageClusterVersionModel versionModel StorageClusterVersionModel versionModel
= new StorageClusterVersionModel(); = new StorageClusterVersionModel();
versionModel.setVersion(value); versionModel.setVersion(value);
return versionModel; return versionModel;
} }
} }

View File

@ -38,67 +38,67 @@ import org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableLis
@InterfaceAudience.Private @InterfaceAudience.Private
public class TableListModel implements Serializable, ProtobufMessageHandler { public class TableListModel implements Serializable, ProtobufMessageHandler {
private static final long serialVersionUID = 1L; private static final long serialVersionUID = 1L;
private List<TableModel> tables = new ArrayList<TableModel>(); private List<TableModel> tables = new ArrayList<TableModel>();
/** /**
* Default constructor * Default constructor
*/ */
public TableListModel() {} public TableListModel() {}
/** /**
* Add the table name model to the list * Add the table name model to the list
* @param table the table model * @param table the table model
*/ */
public void add(TableModel table) { public void add(TableModel table) {
tables.add(table); tables.add(table);
} }
/** /**
* @param index the index * @param index the index
* @return the table model * @return the table model
*/ */
public TableModel get(int index) { public TableModel get(int index) {
return tables.get(index); return tables.get(index);
} }
/** /**
* @return the tables * @return the tables
*/ */
@XmlElementRef(name="table") @XmlElementRef(name="table")
public List<TableModel> getTables() { public List<TableModel> getTables() {
return tables; return tables;
} }
/** /**
* @param tables the tables to set * @param tables the tables to set
*/ */
public void setTables(List<TableModel> tables) { public void setTables(List<TableModel> tables) {
this.tables = tables; this.tables = tables;
} }
/* (non-Javadoc) /* (non-Javadoc)
* @see java.lang.Object#toString() * @see java.lang.Object#toString()
*/ */
@Override @Override
public String toString() { public String toString() {
StringBuilder sb = new StringBuilder(); StringBuilder sb = new StringBuilder();
for(TableModel aTable : tables) { for(TableModel aTable : tables) {
sb.append(aTable.toString()); sb.append(aTable.toString());
sb.append('\n'); sb.append('\n');
} }
return sb.toString(); return sb.toString();
} }
@Override @Override
public byte[] createProtobufOutput() { public byte[] createProtobufOutput() {
TableList.Builder builder = TableList.newBuilder(); TableList.Builder builder = TableList.newBuilder();
for (TableModel aTable : tables) { for (TableModel aTable : tables) {
builder.addName(aTable.getName()); builder.addName(aTable.getName());
} }
return builder.build().toByteArray(); return builder.build().toByteArray();
} }
@Override @Override
public ProtobufMessageHandler getObjectFromMessage(byte[] message) public ProtobufMessageHandler getObjectFromMessage(byte[] message)

View File

@ -41,44 +41,44 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
@InterfaceAudience.Private @InterfaceAudience.Private
public class TableModel implements Serializable { public class TableModel implements Serializable {
private static final long serialVersionUID = 1L; private static final long serialVersionUID = 1L;
private String name; private String name;
/** /**
* Default constructor * Default constructor
*/ */
public TableModel() {} public TableModel() {}
/** /**
* Constructor * Constructor
* @param name * @param name
*/ */
public TableModel(String name) { public TableModel(String name) {
super(); super();
this.name = name; this.name = name;
} }
/** /**
* @return the name * @return the name
*/ */
@XmlAttribute @XmlAttribute
public String getName() { public String getName() {
return name; return name;
} }
/** /**
* @param name the name to set * @param name the name to set
*/ */
public void setName(String name) { public void setName(String name) {
this.name = name; this.name = name;
} }
/* (non-Javadoc) /* (non-Javadoc)
* @see java.lang.Object#toString() * @see java.lang.Object#toString()
*/ */
@Override @Override
public String toString() { public String toString() {
return this.name; return this.name;
} }
} }

View File

@ -48,9 +48,9 @@ import com.sun.jersey.spi.container.servlet.ServletContainer;
@InterfaceAudience.Private @InterfaceAudience.Private
public class VersionModel implements Serializable, ProtobufMessageHandler { public class VersionModel implements Serializable, ProtobufMessageHandler {
private static final long serialVersionUID = 1L; private static final long serialVersionUID = 1L;
private String restVersion; private String restVersion;
private String jvmVersion; private String jvmVersion;
private String osVersion; private String osVersion;
private String serverVersion; private String serverVersion;
@ -65,30 +65,30 @@ public class VersionModel implements Serializable, ProtobufMessageHandler {
* Constructor * Constructor
* @param context the servlet context * @param context the servlet context
*/ */
public VersionModel(ServletContext context) { public VersionModel(ServletContext context) {
restVersion = RESTServlet.VERSION_STRING; restVersion = RESTServlet.VERSION_STRING;
jvmVersion = System.getProperty("java.vm.vendor") + ' ' + jvmVersion = System.getProperty("java.vm.vendor") + ' ' +
System.getProperty("java.version") + '-' + System.getProperty("java.version") + '-' +
System.getProperty("java.vm.version"); System.getProperty("java.vm.version");
osVersion = System.getProperty("os.name") + ' ' + osVersion = System.getProperty("os.name") + ' ' +
System.getProperty("os.version") + ' ' + System.getProperty("os.version") + ' ' +
System.getProperty("os.arch"); System.getProperty("os.arch");
serverVersion = context.getServerInfo(); serverVersion = context.getServerInfo();
jerseyVersion = ServletContainer.class.getPackage() jerseyVersion = ServletContainer.class.getPackage()
.getImplementationVersion(); .getImplementationVersion();
} }
/** /**
* @return the REST gateway version * @return the REST gateway version
*/ */
@XmlAttribute(name="REST") @XmlAttribute(name="REST")
public String getRESTVersion() { public String getRESTVersion() {
return restVersion; return restVersion;
} }
/** /**
* @return the JVM vendor and version * @return the JVM vendor and version
*/ */
@XmlAttribute(name="JVM") @XmlAttribute(name="JVM")
public String getJVMVersion() { public String getJVMVersion() {
return jvmVersion; return jvmVersion;
@ -154,34 +154,34 @@ public class VersionModel implements Serializable, ProtobufMessageHandler {
} }
/* (non-Javadoc) /* (non-Javadoc)
* @see java.lang.Object#toString() * @see java.lang.Object#toString()
*/ */
@Override @Override
public String toString() { public String toString() {
StringBuilder sb = new StringBuilder(); StringBuilder sb = new StringBuilder();
sb.append("rest "); sb.append("rest ");
sb.append(restVersion); sb.append(restVersion);
sb.append(" [JVM: "); sb.append(" [JVM: ");
sb.append(jvmVersion); sb.append(jvmVersion);
sb.append("] [OS: "); sb.append("] [OS: ");
sb.append(osVersion); sb.append(osVersion);
sb.append("] [Server: "); sb.append("] [Server: ");
sb.append(serverVersion); sb.append(serverVersion);
sb.append("] [Jersey: "); sb.append("] [Jersey: ");
sb.append(jerseyVersion); sb.append(jerseyVersion);
sb.append("]\n"); sb.append("]\n");
return sb.toString(); return sb.toString();
} }
@Override @Override
public byte[] createProtobufOutput() { public byte[] createProtobufOutput() {
Version.Builder builder = Version.newBuilder(); Version.Builder builder = Version.newBuilder();
builder.setRestVersion(restVersion); builder.setRestVersion(restVersion);
builder.setJvmVersion(jvmVersion); builder.setJvmVersion(jvmVersion);
builder.setOsVersion(osVersion); builder.setOsVersion(osVersion);
builder.setServerVersion(serverVersion); builder.setServerVersion(serverVersion);
builder.setJerseyVersion(jerseyVersion); builder.setJerseyVersion(jerseyVersion);
return builder.build().toByteArray(); return builder.build().toByteArray();
} }
@Override @Override

View File

@ -55,12 +55,12 @@ import com.sun.jersey.api.json.JSONJAXBContext;
@InterfaceAudience.Private @InterfaceAudience.Private
public class JAXBContextResolver implements ContextResolver<JAXBContext> { public class JAXBContextResolver implements ContextResolver<JAXBContext> {
private final JAXBContext context; private final JAXBContext context;
private final Set<Class<?>> types; private final Set<Class<?>> types;
private final Class<?>[] cTypes = { private final Class<?>[] cTypes = {
CellModel.class, CellModel.class,
CellSetModel.class, CellSetModel.class,
ColumnSchemaModel.class, ColumnSchemaModel.class,
RowModel.class, RowModel.class,
@ -68,22 +68,22 @@ public class JAXBContextResolver implements ContextResolver<JAXBContext> {
StorageClusterStatusModel.class, StorageClusterStatusModel.class,
StorageClusterVersionModel.class, StorageClusterVersionModel.class,
TableInfoModel.class, TableInfoModel.class,
TableListModel.class, TableListModel.class,
TableModel.class, TableModel.class,
TableRegionModel.class, TableRegionModel.class,
TableSchemaModel.class, TableSchemaModel.class,
VersionModel.class VersionModel.class
}; };
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public JAXBContextResolver() throws Exception { public JAXBContextResolver() throws Exception {
this.types = new HashSet(Arrays.asList(cTypes)); this.types = new HashSet(Arrays.asList(cTypes));
this.context = new JSONJAXBContext(JSONConfiguration.natural().build(), this.context = new JSONJAXBContext(JSONConfiguration.natural().build(),
cTypes); cTypes);
} }
@Override @Override
public JAXBContext getContext(Class<?> objectType) { public JAXBContext getContext(Class<?> objectType) {
return (types.contains(objectType)) ? context : null; return (types.contains(objectType)) ? context : null;
} }
} }

View File

@ -54,21 +54,21 @@ public class PlainTextMessageBodyProducer
return true; return true;
} }
@Override @Override
public long getSize(Object object, Class<?> type, Type genericType, public long getSize(Object object, Class<?> type, Type genericType,
Annotation[] annotations, MediaType mediaType) { Annotation[] annotations, MediaType mediaType) {
byte[] bytes = object.toString().getBytes(); byte[] bytes = object.toString().getBytes();
buffer.set(bytes); buffer.set(bytes);
return bytes.length; return bytes.length;
} }
@Override @Override
public void writeTo(Object object, Class<?> type, Type genericType, public void writeTo(Object object, Class<?> type, Type genericType,
Annotation[] annotations, MediaType mediaType, Annotation[] annotations, MediaType mediaType,
MultivaluedMap<String, Object> httpHeaders, OutputStream outStream) MultivaluedMap<String, Object> httpHeaders, OutputStream outStream)
throws IOException, WebApplicationException { throws IOException, WebApplicationException {
byte[] bytes = buffer.get(); byte[] bytes = buffer.get();
outStream.write(bytes); outStream.write(bytes);
buffer.remove(); buffer.remove();
} }
} }

View File

@ -50,32 +50,32 @@ public class ProtobufMessageBodyProducer
private ThreadLocal<byte[]> buffer = new ThreadLocal<byte[]>(); private ThreadLocal<byte[]> buffer = new ThreadLocal<byte[]>();
@Override @Override
public boolean isWriteable(Class<?> type, Type genericType, public boolean isWriteable(Class<?> type, Type genericType,
Annotation[] annotations, MediaType mediaType) { Annotation[] annotations, MediaType mediaType) {
return ProtobufMessageHandler.class.isAssignableFrom(type); return ProtobufMessageHandler.class.isAssignableFrom(type);
} }
@Override @Override
public long getSize(ProtobufMessageHandler m, Class<?> type, Type genericType, public long getSize(ProtobufMessageHandler m, Class<?> type, Type genericType,
Annotation[] annotations, MediaType mediaType) { Annotation[] annotations, MediaType mediaType) {
ByteArrayOutputStream baos = new ByteArrayOutputStream(); ByteArrayOutputStream baos = new ByteArrayOutputStream();
try { try {
baos.write(m.createProtobufOutput()); baos.write(m.createProtobufOutput());
} catch (IOException e) { } catch (IOException e) {
return -1; return -1;
} }
byte[] bytes = baos.toByteArray(); byte[] bytes = baos.toByteArray();
buffer.set(bytes); buffer.set(bytes);
return bytes.length; return bytes.length;
} }
public void writeTo(ProtobufMessageHandler m, Class<?> type, Type genericType, public void writeTo(ProtobufMessageHandler m, Class<?> type, Type genericType,
Annotation[] annotations, MediaType mediaType, Annotation[] annotations, MediaType mediaType,
MultivaluedMap<String, Object> httpHeaders, OutputStream entityStream) MultivaluedMap<String, Object> httpHeaders, OutputStream entityStream)
throws IOException, WebApplicationException { throws IOException, WebApplicationException {
byte[] bytes = buffer.get(); byte[] bytes = buffer.get();
entityStream.write(bytes); entityStream.write(bytes);
buffer.remove(); buffer.remove();
} }
} }

View File

@ -26,10 +26,10 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
*/ */
@InterfaceAudience.Private @InterfaceAudience.Private
public interface HBaseRPCErrorHandler { public interface HBaseRPCErrorHandler {
/** /**
* Take actions on the event of an OutOfMemoryError. * Take actions on the event of an OutOfMemoryError.
* @param e the throwable * @param e the throwable
* @return if the server should be shut down * @return if the server should be shut down
*/ */
boolean checkOOME(final Throwable e) ; boolean checkOOME(final Throwable e) ;
} }

View File

@ -976,7 +976,7 @@ public class RegionPlacementMaintainer {
opt.addOption("munkres", false, opt.addOption("munkres", false,
"use munkres to place secondaries and tertiaries"); "use munkres to place secondaries and tertiaries");
opt.addOption("ld", "locality-dispersion", false, "print locality and dispersion " + opt.addOption("ld", "locality-dispersion", false, "print locality and dispersion " +
"information for current plan"); "information for current plan");
try { try {
// Set the log4j // Set the log4j
Logger.getLogger("org.apache.zookeeper").setLevel(Level.ERROR); Logger.getLogger("org.apache.zookeeper").setLevel(Level.ERROR);

View File

@ -94,7 +94,7 @@ public class SnapshotOfRegionAssignmentFromMeta {
*/ */
public void initialize() throws IOException { public void initialize() throws IOException {
LOG.info("Start to scan the hbase:meta for the current region assignment " + LOG.info("Start to scan the hbase:meta for the current region assignment " +
"snappshot"); "snappshot");
// TODO: at some point this code could live in the MetaTableAccessor // TODO: at some point this code could live in the MetaTableAccessor
Visitor v = new Visitor() { Visitor v = new Visitor() {
@Override @Override

View File

@ -183,9 +183,9 @@ public abstract class CleanerChore<T extends FileCleanerDelegate> extends Chore
// if the directory still has children, we can't delete it, so we are done // if the directory still has children, we can't delete it, so we are done
if (!allChildrenDeleted) return false; if (!allChildrenDeleted) return false;
} catch (IOException e) { } catch (IOException e) {
e = e instanceof RemoteException ? e = e instanceof RemoteException ?
((RemoteException)e).unwrapRemoteException() : e; ((RemoteException)e).unwrapRemoteException() : e;
LOG.warn("Error while listing directory: " + dir, e); LOG.warn("Error while listing directory: " + dir, e);
// couldn't list directory, so don't try to delete, and don't return success // couldn't list directory, so don't try to delete, and don't return success
return false; return false;
} }

View File

@ -390,9 +390,9 @@ class MemStoreFlusher implements FlushRequester {
this.server.compactSplitThread.requestSystemCompaction( this.server.compactSplitThread.requestSystemCompaction(
region, Thread.currentThread().getName()); region, Thread.currentThread().getName());
} catch (IOException e) { } catch (IOException e) {
e = e instanceof RemoteException ? e = e instanceof RemoteException ?
((RemoteException)e).unwrapRemoteException() : e; ((RemoteException)e).unwrapRemoteException() : e;
LOG.error( LOG.error(
"Cache flush failed for region " + Bytes.toStringBinary(region.getRegionName()), "Cache flush failed for region " + Bytes.toStringBinary(region.getRegionName()),
e); e);
} }

View File

@ -115,7 +115,7 @@ public class SplitLogWorker implements Runnable {
|| cause instanceof ConnectException || cause instanceof ConnectException
|| cause instanceof SocketTimeoutException)) { || cause instanceof SocketTimeoutException)) {
LOG.warn("log replaying of " + filename + " can't connect to the target regionserver, " LOG.warn("log replaying of " + filename + " can't connect to the target regionserver, "
+ "resigning", e); + "resigning", e);
return Status.RESIGNED; return Status.RESIGNED;
} else if (cause instanceof InterruptedException) { } else if (cause instanceof InterruptedException) {
LOG.warn("log splitting of " + filename + " interrupted, resigning", e); LOG.warn("log splitting of " + filename + " interrupted, resigning", e);

View File

@ -54,7 +54,7 @@ public class WALSplitterHandler extends EventHandler {
public WALSplitterHandler(final Server server, SplitLogWorkerCoordination coordination, public WALSplitterHandler(final Server server, SplitLogWorkerCoordination coordination,
SplitLogWorkerCoordination.SplitTaskDetails splitDetails, CancelableProgressable reporter, SplitLogWorkerCoordination.SplitTaskDetails splitDetails, CancelableProgressable reporter,
AtomicInteger inProgressTasks, TaskExecutor splitTaskExecutor, RecoveryMode mode) { AtomicInteger inProgressTasks, TaskExecutor splitTaskExecutor, RecoveryMode mode) {
super(server, EventType.RS_LOG_REPLAY); super(server, EventType.RS_LOG_REPLAY);
this.splitTaskDetails = splitDetails; this.splitTaskDetails = splitDetails;
this.coordination = coordination; this.coordination = coordination;
this.reporter = reporter; this.reporter = reporter;

View File

@ -367,8 +367,8 @@ public class HBaseFsck extends Configured {
if (hbckOutFd == null) { if (hbckOutFd == null) {
setRetCode(-1); setRetCode(-1);
LOG.error("Another instance of hbck is running, exiting this instance.[If you are sure" + LOG.error("Another instance of hbck is running, exiting this instance.[If you are sure" +
" no other instance is running, delete the lock file " + " no other instance is running, delete the lock file " +
HBCK_LOCK_PATH + " and rerun the tool]"); HBCK_LOCK_PATH + " and rerun the tool]");
throw new IOException("Duplicate hbck - Abort"); throw new IOException("Duplicate hbck - Abort");
} }
@ -1640,8 +1640,8 @@ public class HBaseFsck extends Configured {
*/ */
private void checkAndFixConsistency() private void checkAndFixConsistency()
throws IOException, KeeperException, InterruptedException { throws IOException, KeeperException, InterruptedException {
// Divide the checks in two phases. One for default/primary replicas and another // Divide the checks in two phases. One for default/primary replicas and another
// for the non-primary ones. Keeps code cleaner this way. // for the non-primary ones. Keeps code cleaner this way.
for (java.util.Map.Entry<String, HbckInfo> e: regionInfoMap.entrySet()) { for (java.util.Map.Entry<String, HbckInfo> e: regionInfoMap.entrySet()) {
if (e.getValue().getReplicaId() == HRegionInfo.DEFAULT_REPLICA_ID) { if (e.getValue().getReplicaId() == HRegionInfo.DEFAULT_REPLICA_ID) {
checkRegionConsistency(e.getKey(), e.getValue()); checkRegionConsistency(e.getKey(), e.getValue());
@ -1890,8 +1890,8 @@ public class HBaseFsck extends Configured {
private void checkRegionConsistency(final String key, final HbckInfo hbi) private void checkRegionConsistency(final String key, final HbckInfo hbi)
throws IOException, KeeperException, InterruptedException { throws IOException, KeeperException, InterruptedException {
if (hbi.isSkipChecks()) return; if (hbi.isSkipChecks()) return;
String descriptiveName = hbi.toString(); String descriptiveName = hbi.toString();
boolean inMeta = hbi.metaEntry != null; boolean inMeta = hbi.metaEntry != null;
// In case not checking HDFS, assume the region is on HDFS // In case not checking HDFS, assume the region is on HDFS
boolean inHdfs = !shouldCheckHdfs() || hbi.getHdfsRegionDir() != null; boolean inHdfs = !shouldCheckHdfs() || hbi.getHdfsRegionDir() != null;

View File

@ -50,7 +50,7 @@ import org.apache.zookeeper.KeeperException;
public class RegionServerTracker extends ZooKeeperListener { public class RegionServerTracker extends ZooKeeperListener {
private static final Log LOG = LogFactory.getLog(RegionServerTracker.class); private static final Log LOG = LogFactory.getLog(RegionServerTracker.class);
private NavigableMap<ServerName, RegionServerInfo> regionServers = private NavigableMap<ServerName, RegionServerInfo> regionServers =
new TreeMap<ServerName, RegionServerInfo>(); new TreeMap<ServerName, RegionServerInfo>();
private ServerManager serverManager; private ServerManager serverManager;
private Server server; private Server server;

View File

@ -1032,8 +1032,8 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility {
* @throws IOException * @throws IOException
*/ */
public Path getDefaultRootDirPath() throws IOException { public Path getDefaultRootDirPath() throws IOException {
FileSystem fs = FileSystem.get(this.conf); FileSystem fs = FileSystem.get(this.conf);
return new Path(fs.makeQualified(fs.getHomeDirectory()),"hbase"); return new Path(fs.makeQualified(fs.getHomeDirectory()),"hbase");
} }
/** /**

View File

@ -135,7 +135,7 @@ public class TestBigDecimalColumnInterpreter {
Scan scan = new Scan(); Scan scan = new Scan();
scan.addColumn(TEST_FAMILY, TEST_QUALIFIER); scan.addColumn(TEST_FAMILY, TEST_QUALIFIER);
final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci = final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci =
new BigDecimalColumnInterpreter(); new BigDecimalColumnInterpreter();
BigDecimal median = aClient.median(TEST_TABLE, ci, scan); BigDecimal median = aClient.median(TEST_TABLE, ci, scan);
assertEquals(new BigDecimal("8.00"), median); assertEquals(new BigDecimal("8.00"), median);
} }
@ -154,7 +154,7 @@ public class TestBigDecimalColumnInterpreter {
Scan scan = new Scan(); Scan scan = new Scan();
scan.addColumn(TEST_FAMILY, TEST_QUALIFIER); scan.addColumn(TEST_FAMILY, TEST_QUALIFIER);
final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci = final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci =
new BigDecimalColumnInterpreter(); new BigDecimalColumnInterpreter();
BigDecimal maximum = aClient.max(TEST_TABLE, ci, scan); BigDecimal maximum = aClient.max(TEST_TABLE, ci, scan);
assertEquals(new BigDecimal("19.00"), maximum); assertEquals(new BigDecimal("19.00"), maximum);
} }
@ -203,7 +203,7 @@ public class TestBigDecimalColumnInterpreter {
public void testMaxWithValidRangeWithNullCF() { public void testMaxWithValidRangeWithNullCF() {
AggregationClient aClient = new AggregationClient(conf); AggregationClient aClient = new AggregationClient(conf);
final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci = final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci =
new BigDecimalColumnInterpreter(); new BigDecimalColumnInterpreter();
Scan scan = new Scan(); Scan scan = new Scan();
BigDecimal max = null; BigDecimal max = null;
try { try {
@ -219,7 +219,7 @@ public class TestBigDecimalColumnInterpreter {
public void testMaxWithInvalidRange() { public void testMaxWithInvalidRange() {
AggregationClient aClient = new AggregationClient(conf); AggregationClient aClient = new AggregationClient(conf);
final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci = final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci =
new BigDecimalColumnInterpreter(); new BigDecimalColumnInterpreter();
Scan scan = new Scan(); Scan scan = new Scan();
scan.setStartRow(ROWS[4]); scan.setStartRow(ROWS[4]);
scan.setStopRow(ROWS[2]); scan.setStopRow(ROWS[2]);
@ -244,7 +244,7 @@ public class TestBigDecimalColumnInterpreter {
try { try {
AggregationClient aClient = new AggregationClient(conf); AggregationClient aClient = new AggregationClient(conf);
final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci = final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci =
new BigDecimalColumnInterpreter(); new BigDecimalColumnInterpreter();
max = aClient.max(TEST_TABLE, ci, scan); max = aClient.max(TEST_TABLE, ci, scan);
} catch (Exception e) { } catch (Exception e) {
max = BigDecimal.ZERO; max = BigDecimal.ZERO;
@ -261,7 +261,7 @@ public class TestBigDecimalColumnInterpreter {
Filter f = new PrefixFilter(Bytes.toBytes("foo:bar")); Filter f = new PrefixFilter(Bytes.toBytes("foo:bar"));
scan.setFilter(f); scan.setFilter(f);
final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci = final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci =
new BigDecimalColumnInterpreter(); new BigDecimalColumnInterpreter();
max = aClient.max(TEST_TABLE, ci, scan); max = aClient.max(TEST_TABLE, ci, scan);
assertEquals(null, max); assertEquals(null, max);
} }
@ -281,7 +281,7 @@ public class TestBigDecimalColumnInterpreter {
scan.setStartRow(HConstants.EMPTY_START_ROW); scan.setStartRow(HConstants.EMPTY_START_ROW);
scan.setStopRow(HConstants.EMPTY_END_ROW); scan.setStopRow(HConstants.EMPTY_END_ROW);
final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci = final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci =
new BigDecimalColumnInterpreter(); new BigDecimalColumnInterpreter();
BigDecimal min = aClient.min(TEST_TABLE, ci, scan); BigDecimal min = aClient.min(TEST_TABLE, ci, scan);
assertEquals(new BigDecimal("0.00"), min); assertEquals(new BigDecimal("0.00"), min);
} }
@ -297,7 +297,7 @@ public class TestBigDecimalColumnInterpreter {
scan.setStartRow(ROWS[5]); scan.setStartRow(ROWS[5]);
scan.setStopRow(ROWS[15]); scan.setStopRow(ROWS[15]);
final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci = final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci =
new BigDecimalColumnInterpreter(); new BigDecimalColumnInterpreter();
BigDecimal min = aClient.min(TEST_TABLE, ci, scan); BigDecimal min = aClient.min(TEST_TABLE, ci, scan);
assertEquals(new BigDecimal("5.00"), min); assertEquals(new BigDecimal("5.00"), min);
} }
@ -310,7 +310,7 @@ public class TestBigDecimalColumnInterpreter {
scan.setStartRow(HConstants.EMPTY_START_ROW); scan.setStartRow(HConstants.EMPTY_START_ROW);
scan.setStopRow(HConstants.EMPTY_END_ROW); scan.setStopRow(HConstants.EMPTY_END_ROW);
final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci = final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci =
new BigDecimalColumnInterpreter(); new BigDecimalColumnInterpreter();
BigDecimal min = aClient.min(TEST_TABLE, ci, scan); BigDecimal min = aClient.min(TEST_TABLE, ci, scan);
assertEquals(new BigDecimal("0.00"), min); assertEquals(new BigDecimal("0.00"), min);
} }
@ -323,7 +323,7 @@ public class TestBigDecimalColumnInterpreter {
scan.setStartRow(ROWS[6]); scan.setStartRow(ROWS[6]);
scan.setStopRow(ROWS[7]); scan.setStopRow(ROWS[7]);
final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci = final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci =
new BigDecimalColumnInterpreter(); new BigDecimalColumnInterpreter();
BigDecimal min = aClient.min(TEST_TABLE, ci, scan); BigDecimal min = aClient.min(TEST_TABLE, ci, scan);
assertEquals(new BigDecimal("0.60"), min); assertEquals(new BigDecimal("0.60"), min);
} }
@ -335,7 +335,7 @@ public class TestBigDecimalColumnInterpreter {
scan.setStartRow(ROWS[5]); scan.setStartRow(ROWS[5]);
scan.setStopRow(ROWS[15]); scan.setStopRow(ROWS[15]);
final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci = final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci =
new BigDecimalColumnInterpreter(); new BigDecimalColumnInterpreter();
BigDecimal min = null; BigDecimal min = null;
try { try {
min = aClient.min(TEST_TABLE, ci, scan); min = aClient.min(TEST_TABLE, ci, scan);
@ -354,7 +354,7 @@ public class TestBigDecimalColumnInterpreter {
scan.setStartRow(ROWS[4]); scan.setStartRow(ROWS[4]);
scan.setStopRow(ROWS[2]); scan.setStopRow(ROWS[2]);
final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci = final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci =
new BigDecimalColumnInterpreter(); new BigDecimalColumnInterpreter();
try { try {
min = aClient.min(TEST_TABLE, ci, scan); min = aClient.min(TEST_TABLE, ci, scan);
} catch (Throwable e) { } catch (Throwable e) {
@ -370,7 +370,7 @@ public class TestBigDecimalColumnInterpreter {
scan.setStartRow(ROWS[6]); scan.setStartRow(ROWS[6]);
scan.setStopRow(ROWS[6]); scan.setStopRow(ROWS[6]);
final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci = final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci =
new BigDecimalColumnInterpreter(); new BigDecimalColumnInterpreter();
BigDecimal min = null; BigDecimal min = null;
try { try {
min = aClient.min(TEST_TABLE, ci, scan); min = aClient.min(TEST_TABLE, ci, scan);
@ -387,7 +387,7 @@ public class TestBigDecimalColumnInterpreter {
Filter f = new PrefixFilter(Bytes.toBytes("foo:bar")); Filter f = new PrefixFilter(Bytes.toBytes("foo:bar"));
scan.setFilter(f); scan.setFilter(f);
final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci = final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci =
new BigDecimalColumnInterpreter(); new BigDecimalColumnInterpreter();
BigDecimal min = null; BigDecimal min = null;
min = aClient.min(TEST_TABLE, ci, scan); min = aClient.min(TEST_TABLE, ci, scan);
assertEquals(null, min); assertEquals(null, min);
@ -405,7 +405,7 @@ public class TestBigDecimalColumnInterpreter {
Scan scan = new Scan(); Scan scan = new Scan();
scan.addColumn(TEST_FAMILY, TEST_QUALIFIER); scan.addColumn(TEST_FAMILY, TEST_QUALIFIER);
final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci = final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci =
new BigDecimalColumnInterpreter(); new BigDecimalColumnInterpreter();
BigDecimal sum = aClient.sum(TEST_TABLE, ci, scan); BigDecimal sum = aClient.sum(TEST_TABLE, ci, scan);
assertEquals(new BigDecimal("190.00"), sum); assertEquals(new BigDecimal("190.00"), sum);
} }
@ -421,7 +421,7 @@ public class TestBigDecimalColumnInterpreter {
scan.setStartRow(ROWS[5]); scan.setStartRow(ROWS[5]);
scan.setStopRow(ROWS[15]); scan.setStopRow(ROWS[15]);
final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci = final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci =
new BigDecimalColumnInterpreter(); new BigDecimalColumnInterpreter();
BigDecimal sum = aClient.sum(TEST_TABLE, ci, scan); BigDecimal sum = aClient.sum(TEST_TABLE, ci, scan);
assertEquals(new BigDecimal("95.00"), sum); assertEquals(new BigDecimal("95.00"), sum);
} }
@ -432,7 +432,7 @@ public class TestBigDecimalColumnInterpreter {
Scan scan = new Scan(); Scan scan = new Scan();
scan.addFamily(TEST_FAMILY); scan.addFamily(TEST_FAMILY);
final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci = final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci =
new BigDecimalColumnInterpreter(); new BigDecimalColumnInterpreter();
BigDecimal sum = aClient.sum(TEST_TABLE, ci, scan); BigDecimal sum = aClient.sum(TEST_TABLE, ci, scan);
assertEquals(new BigDecimal("209.00"), sum); // 190 + 19 assertEquals(new BigDecimal("209.00"), sum); // 190 + 19
} }
@ -445,7 +445,7 @@ public class TestBigDecimalColumnInterpreter {
scan.setStartRow(ROWS[6]); scan.setStartRow(ROWS[6]);
scan.setStopRow(ROWS[7]); scan.setStopRow(ROWS[7]);
final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci = final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci =
new BigDecimalColumnInterpreter(); new BigDecimalColumnInterpreter();
BigDecimal sum = aClient.sum(TEST_TABLE, ci, scan); BigDecimal sum = aClient.sum(TEST_TABLE, ci, scan);
assertEquals(new BigDecimal("6.60"), sum); // 6 + 60 assertEquals(new BigDecimal("6.60"), sum); // 6 + 60
} }
@ -457,7 +457,7 @@ public class TestBigDecimalColumnInterpreter {
scan.setStartRow(ROWS[6]); scan.setStartRow(ROWS[6]);
scan.setStopRow(ROWS[7]); scan.setStopRow(ROWS[7]);
final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci = final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci =
new BigDecimalColumnInterpreter(); new BigDecimalColumnInterpreter();
BigDecimal sum = null; BigDecimal sum = null;
try { try {
sum = aClient.sum(TEST_TABLE, ci, scan); sum = aClient.sum(TEST_TABLE, ci, scan);
@ -475,7 +475,7 @@ public class TestBigDecimalColumnInterpreter {
scan.setStartRow(ROWS[6]); scan.setStartRow(ROWS[6]);
scan.setStopRow(ROWS[2]); scan.setStopRow(ROWS[2]);
final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci = final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci =
new BigDecimalColumnInterpreter(); new BigDecimalColumnInterpreter();
BigDecimal sum = null; BigDecimal sum = null;
try { try {
sum = aClient.sum(TEST_TABLE, ci, scan); sum = aClient.sum(TEST_TABLE, ci, scan);
@ -492,7 +492,7 @@ public class TestBigDecimalColumnInterpreter {
scan.addFamily(TEST_FAMILY); scan.addFamily(TEST_FAMILY);
scan.setFilter(f); scan.setFilter(f);
final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci = final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci =
new BigDecimalColumnInterpreter(); new BigDecimalColumnInterpreter();
BigDecimal sum = null; BigDecimal sum = null;
sum = aClient.sum(TEST_TABLE, ci, scan); sum = aClient.sum(TEST_TABLE, ci, scan);
assertEquals(null, sum); assertEquals(null, sum);
@ -510,7 +510,7 @@ public class TestBigDecimalColumnInterpreter {
Scan scan = new Scan(); Scan scan = new Scan();
scan.addColumn(TEST_FAMILY, TEST_QUALIFIER); scan.addColumn(TEST_FAMILY, TEST_QUALIFIER);
final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci = final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci =
new BigDecimalColumnInterpreter(); new BigDecimalColumnInterpreter();
double avg = aClient.avg(TEST_TABLE, ci, scan); double avg = aClient.avg(TEST_TABLE, ci, scan);
assertEquals(9.5, avg, 0); assertEquals(9.5, avg, 0);
} }
@ -526,7 +526,7 @@ public class TestBigDecimalColumnInterpreter {
scan.setStartRow(ROWS[5]); scan.setStartRow(ROWS[5]);
scan.setStopRow(ROWS[15]); scan.setStopRow(ROWS[15]);
final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci = final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci =
new BigDecimalColumnInterpreter(); new BigDecimalColumnInterpreter();
double avg = aClient.avg(TEST_TABLE, ci, scan); double avg = aClient.avg(TEST_TABLE, ci, scan);
assertEquals(9.5, avg, 0); assertEquals(9.5, avg, 0);
} }
@ -537,7 +537,7 @@ public class TestBigDecimalColumnInterpreter {
Scan scan = new Scan(); Scan scan = new Scan();
scan.addFamily(TEST_FAMILY); scan.addFamily(TEST_FAMILY);
final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci = final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci =
new BigDecimalColumnInterpreter(); new BigDecimalColumnInterpreter();
double avg = aClient.avg(TEST_TABLE, ci, scan); double avg = aClient.avg(TEST_TABLE, ci, scan);
assertEquals(10.45, avg, 0.01); assertEquals(10.45, avg, 0.01);
} }
@ -550,7 +550,7 @@ public class TestBigDecimalColumnInterpreter {
scan.setStartRow(ROWS[6]); scan.setStartRow(ROWS[6]);
scan.setStopRow(ROWS[7]); scan.setStopRow(ROWS[7]);
final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci = final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci =
new BigDecimalColumnInterpreter(); new BigDecimalColumnInterpreter();
double avg = aClient.avg(TEST_TABLE, ci, scan); double avg = aClient.avg(TEST_TABLE, ci, scan);
assertEquals(6 + 0.60, avg, 0); assertEquals(6 + 0.60, avg, 0);
} }
@ -560,7 +560,7 @@ public class TestBigDecimalColumnInterpreter {
AggregationClient aClient = new AggregationClient(conf); AggregationClient aClient = new AggregationClient(conf);
Scan scan = new Scan(); Scan scan = new Scan();
final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci = final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci =
new BigDecimalColumnInterpreter(); new BigDecimalColumnInterpreter();
Double avg = null; Double avg = null;
try { try {
avg = aClient.avg(TEST_TABLE, ci, scan); avg = aClient.avg(TEST_TABLE, ci, scan);
@ -578,7 +578,7 @@ public class TestBigDecimalColumnInterpreter {
scan.setStartRow(ROWS[5]); scan.setStartRow(ROWS[5]);
scan.setStopRow(ROWS[1]); scan.setStopRow(ROWS[1]);
final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci = final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci =
new BigDecimalColumnInterpreter(); new BigDecimalColumnInterpreter();
Double avg = null; Double avg = null;
try { try {
avg = aClient.avg(TEST_TABLE, ci, scan); avg = aClient.avg(TEST_TABLE, ci, scan);
@ -595,7 +595,7 @@ public class TestBigDecimalColumnInterpreter {
Filter f = new PrefixFilter(Bytes.toBytes("foo:bar")); Filter f = new PrefixFilter(Bytes.toBytes("foo:bar"));
scan.setFilter(f); scan.setFilter(f);
final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci = final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci =
new BigDecimalColumnInterpreter(); new BigDecimalColumnInterpreter();
Double avg = null; Double avg = null;
avg = aClient.avg(TEST_TABLE, ci, scan); avg = aClient.avg(TEST_TABLE, ci, scan);
assertEquals(Double.NaN, avg, 0); assertEquals(Double.NaN, avg, 0);
@ -613,7 +613,7 @@ public class TestBigDecimalColumnInterpreter {
Scan scan = new Scan(); Scan scan = new Scan();
scan.addColumn(TEST_FAMILY, TEST_QUALIFIER); scan.addColumn(TEST_FAMILY, TEST_QUALIFIER);
final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci = final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci =
new BigDecimalColumnInterpreter(); new BigDecimalColumnInterpreter();
double std = aClient.std(TEST_TABLE, ci, scan); double std = aClient.std(TEST_TABLE, ci, scan);
assertEquals(5.766, std, 0.05d); assertEquals(5.766, std, 0.05d);
} }
@ -630,7 +630,7 @@ public class TestBigDecimalColumnInterpreter {
scan.setStartRow(ROWS[5]); scan.setStartRow(ROWS[5]);
scan.setStopRow(ROWS[15]); scan.setStopRow(ROWS[15]);
final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci = final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci =
new BigDecimalColumnInterpreter(); new BigDecimalColumnInterpreter();
double std = aClient.std(TEST_TABLE, ci, scan); double std = aClient.std(TEST_TABLE, ci, scan);
assertEquals(2.87, std, 0.05d); assertEquals(2.87, std, 0.05d);
} }
@ -645,7 +645,7 @@ public class TestBigDecimalColumnInterpreter {
Scan scan = new Scan(); Scan scan = new Scan();
scan.addFamily(TEST_FAMILY); scan.addFamily(TEST_FAMILY);
final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci = final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci =
new BigDecimalColumnInterpreter(); new BigDecimalColumnInterpreter();
double std = aClient.std(TEST_TABLE, ci, scan); double std = aClient.std(TEST_TABLE, ci, scan);
assertEquals(6.342, std, 0.05d); assertEquals(6.342, std, 0.05d);
} }
@ -658,7 +658,7 @@ public class TestBigDecimalColumnInterpreter {
scan.setStartRow(ROWS[6]); scan.setStartRow(ROWS[6]);
scan.setStopRow(ROWS[7]); scan.setStopRow(ROWS[7]);
final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci = final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci =
new BigDecimalColumnInterpreter(); new BigDecimalColumnInterpreter();
double std = aClient.std(TEST_TABLE, ci, scan); double std = aClient.std(TEST_TABLE, ci, scan);
System.out.println("std is:" + std); System.out.println("std is:" + std);
assertEquals(0, std, 0.05d); assertEquals(0, std, 0.05d);
@ -671,7 +671,7 @@ public class TestBigDecimalColumnInterpreter {
scan.setStartRow(ROWS[6]); scan.setStartRow(ROWS[6]);
scan.setStopRow(ROWS[17]); scan.setStopRow(ROWS[17]);
final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci = final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci =
new BigDecimalColumnInterpreter(); new BigDecimalColumnInterpreter();
Double std = null; Double std = null;
try { try {
std = aClient.std(TEST_TABLE, ci, scan); std = aClient.std(TEST_TABLE, ci, scan);
@ -689,7 +689,7 @@ public class TestBigDecimalColumnInterpreter {
scan.setStartRow(ROWS[6]); scan.setStartRow(ROWS[6]);
scan.setStopRow(ROWS[1]); scan.setStopRow(ROWS[1]);
final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci = final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci =
new BigDecimalColumnInterpreter(); new BigDecimalColumnInterpreter();
Double std = null; Double std = null;
try { try {
std = aClient.std(TEST_TABLE, ci, scan); std = aClient.std(TEST_TABLE, ci, scan);

View File

@ -50,18 +50,18 @@ import org.junit.experimental.categories.Category;
public class TestDependentColumnFilter { public class TestDependentColumnFilter {
private final Log LOG = LogFactory.getLog(this.getClass()); private final Log LOG = LogFactory.getLog(this.getClass());
private static final byte[][] ROWS = { private static final byte[][] ROWS = {
Bytes.toBytes("test1"),Bytes.toBytes("test2") Bytes.toBytes("test1"),Bytes.toBytes("test2")
}; };
private static final byte[][] FAMILIES = { private static final byte[][] FAMILIES = {
Bytes.toBytes("familyOne"),Bytes.toBytes("familyTwo") Bytes.toBytes("familyOne"),Bytes.toBytes("familyTwo")
}; };
private static final long STAMP_BASE = System.currentTimeMillis(); private static final long STAMP_BASE = System.currentTimeMillis();
private static final long[] STAMPS = { private static final long[] STAMPS = {
STAMP_BASE-100, STAMP_BASE-200, STAMP_BASE-300 STAMP_BASE-100, STAMP_BASE-200, STAMP_BASE-300
}; };
private static final byte[] QUALIFIER = Bytes.toBytes("qualifier"); private static final byte[] QUALIFIER = Bytes.toBytes("qualifier");
private static final byte[][] BAD_VALS = { private static final byte[][] BAD_VALS = {
Bytes.toBytes("bad1"), Bytes.toBytes("bad2"), Bytes.toBytes("bad3") Bytes.toBytes("bad1"), Bytes.toBytes("bad2"), Bytes.toBytes("bad3")
}; };
private static final byte[] MATCH_VAL = Bytes.toBytes("match"); private static final byte[] MATCH_VAL = Bytes.toBytes("match");
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
@ -119,14 +119,14 @@ public class TestDependentColumnFilter {
} }
private List<KeyValue> makeTestVals() { private List<KeyValue> makeTestVals() {
List<KeyValue> testVals = new ArrayList<KeyValue>(); List<KeyValue> testVals = new ArrayList<KeyValue>();
testVals.add(new KeyValue(ROWS[0], FAMILIES[0], QUALIFIER, STAMPS[0], BAD_VALS[0])); testVals.add(new KeyValue(ROWS[0], FAMILIES[0], QUALIFIER, STAMPS[0], BAD_VALS[0]));
testVals.add(new KeyValue(ROWS[0], FAMILIES[0], QUALIFIER, STAMPS[1], BAD_VALS[1])); testVals.add(new KeyValue(ROWS[0], FAMILIES[0], QUALIFIER, STAMPS[1], BAD_VALS[1]));
testVals.add(new KeyValue(ROWS[0], FAMILIES[1], QUALIFIER, STAMPS[1], BAD_VALS[2])); testVals.add(new KeyValue(ROWS[0], FAMILIES[1], QUALIFIER, STAMPS[1], BAD_VALS[2]));
testVals.add(new KeyValue(ROWS[0], FAMILIES[1], QUALIFIER, STAMPS[0], MATCH_VAL)); testVals.add(new KeyValue(ROWS[0], FAMILIES[1], QUALIFIER, STAMPS[0], MATCH_VAL));
testVals.add(new KeyValue(ROWS[0], FAMILIES[1], QUALIFIER, STAMPS[2], BAD_VALS[2])); testVals.add(new KeyValue(ROWS[0], FAMILIES[1], QUALIFIER, STAMPS[2], BAD_VALS[2]));
return testVals; return testVals;
} }
/** /**

View File

@ -615,7 +615,7 @@ public class TestParseFilter {
@Test @Test
public void testUnescapedQuote3 () throws IOException { public void testUnescapedQuote3 () throws IOException {
String filterString = " InclusiveStopFilter ('''')"; String filterString = " InclusiveStopFilter ('''')";
InclusiveStopFilter inclusiveStopFilter = InclusiveStopFilter inclusiveStopFilter =
doTestFilter(filterString, InclusiveStopFilter.class); doTestFilter(filterString, InclusiveStopFilter.class);
byte [] stopRowKey = inclusiveStopFilter.getStopRowKey(); byte [] stopRowKey = inclusiveStopFilter.getStopRowKey();

View File

@ -175,8 +175,8 @@ public class TestHFilePerformance extends AbstractHBaseTool {
if ("HFile".equals(fileType)){ if ("HFile".equals(fileType)){
HFileContextBuilder builder = new HFileContextBuilder() HFileContextBuilder builder = new HFileContextBuilder()
.withCompression(AbstractHFileWriter.compressionByName(codecName)) .withCompression(AbstractHFileWriter.compressionByName(codecName))
.withBlockSize(minBlockSize); .withBlockSize(minBlockSize);
if (cipherName != "none") { if (cipherName != "none") {
byte[] cipherKey = new byte[AES.KEY_LENGTH]; byte[] cipherKey = new byte[AES.KEY_LENGTH];
new SecureRandom().nextBytes(cipherKey); new SecureRandom().nextBytes(cipherKey);

View File

@ -104,7 +104,7 @@ public class TestClockSkewDetection {
long warningSkew = c.getLong("hbase.master.warningclockskew", 1000); long warningSkew = c.getLong("hbase.master.warningclockskew", 1000);
try { try {
//Master Time > Region Server Time //Master Time > Region Server Time
LOG.debug("Test: Master Time > Region Server Time"); LOG.debug("Test: Master Time > Region Server Time");
LOG.debug("regionServerStartup 2"); LOG.debug("regionServerStartup 2");
InetAddress ia2 = InetAddress.getLocalHost(); InetAddress ia2 = InetAddress.getLocalHost();

View File

@ -398,7 +398,7 @@ public class TestMajorCompaction {
private void createSmallerStoreFile(final HRegion region) throws IOException { private void createSmallerStoreFile(final HRegion region) throws IOException {
HRegionIncommon loader = new HRegionIncommon(region); HRegionIncommon loader = new HRegionIncommon(region);
HBaseTestCase.addContent(loader, Bytes.toString(COLUMN_FAMILY), ("" + HBaseTestCase.addContent(loader, Bytes.toString(COLUMN_FAMILY), ("" +
"bbb").getBytes(), null); "bbb").getBytes(), null);
loader.flushcache(); loader.flushcache();
} }

View File

@ -287,11 +287,11 @@ public class TestTags {
put1.add(fam, qual, HConstants.LATEST_TIMESTAMP, value1); put1.add(fam, qual, HConstants.LATEST_TIMESTAMP, value1);
table.put(put1); table.put(put1);
admin.flush(tableName); admin.flush(tableName);
// We are lacking an API for confirming flush request compaction. // We are lacking an API for confirming flush request compaction.
// Just sleep for a short time. We won't be able to confirm flush // Just sleep for a short time. We won't be able to confirm flush
// completion but the test won't hang now or in the future if // completion but the test won't hang now or in the future if
// default compaction policy causes compaction between flush and // default compaction policy causes compaction between flush and
// when we go to confirm it. // when we go to confirm it.
Thread.sleep(1000); Thread.sleep(1000);
put1 = new Put(row2); put1 = new Put(row2);

View File

@ -511,7 +511,7 @@ public class TestReplicationSmallTests extends TestReplicationBase {
*/ */
@Test(timeout = 300000) @Test(timeout = 300000)
public void testVerifyListReplicatedTable() throws Exception { public void testVerifyListReplicatedTable() throws Exception {
LOG.info("testVerifyListReplicatedTable"); LOG.info("testVerifyListReplicatedTable");
final String tName = "VerifyListReplicated_"; final String tName = "VerifyListReplicated_";
final String colFam = "cf1"; final String colFam = "cf1";

View File

@ -121,7 +121,7 @@ public class LoadTestTool extends AbstractHBaseTool {
public static final String OPT_INMEMORY = "in_memory"; public static final String OPT_INMEMORY = "in_memory";
public static final String OPT_USAGE_IN_MEMORY = "Tries to keep the HFiles of the CF " + public static final String OPT_USAGE_IN_MEMORY = "Tries to keep the HFiles of the CF " +
"inmemory as far as possible. Not guaranteed that reads are always served from inmemory"; "inmemory as far as possible. Not guaranteed that reads are always served from inmemory";
public static final String OPT_GENERATOR = "generator"; public static final String OPT_GENERATOR = "generator";
public static final String OPT_GENERATOR_USAGE = "The class which generates load for the tool." public static final String OPT_GENERATOR_USAGE = "The class which generates load for the tool."

View File

@ -379,9 +379,9 @@ public class MultiThreadedReader extends MultiThreadedAction
numKeysVerified.incrementAndGet(); numKeysVerified.incrementAndGet();
} }
} else { } else {
HRegionLocation hloc = connection.getRegionLocation(tableName, HRegionLocation hloc = connection.getRegionLocation(tableName,
get.getRow(), false); get.getRow(), false);
String rowKey = Bytes.toString(get.getRow()); String rowKey = Bytes.toString(get.getRow());
LOG.info("Key = " + rowKey + ", Region location: " + hloc); LOG.info("Key = " + rowKey + ", Region location: " + hloc);
if(isNullExpected) { if(isNullExpected) {
nullResult.incrementAndGet(); nullResult.incrementAndGet();

View File

@ -300,7 +300,7 @@ public class MultiThreadedUpdater extends MultiThreadedWriterBase {
} catch (IOException e) { } catch (IOException e) {
if (ignoreNonceConflicts && (e instanceof OperationConflictException)) { if (ignoreNonceConflicts && (e instanceof OperationConflictException)) {
LOG.info("Detected nonce conflict, ignoring: " + e.getMessage()); LOG.info("Detected nonce conflict, ignoring: " + e.getMessage());
totalOpTimeMs.addAndGet(System.currentTimeMillis() - start); totalOpTimeMs.addAndGet(System.currentTimeMillis() - start);
return; return;
} }
failedKeySet.add(keyBase); failedKeySet.add(keyBase);

View File

@ -59,75 +59,75 @@ public class TestHTablePool {
protected abstract PoolType getPoolType(); protected abstract PoolType getPoolType();
@Test @Test
public void testTableWithStringName() throws Exception { public void testTableWithStringName() throws Exception {
HTablePool pool = new HTablePool(TEST_UTIL.getConfiguration(), HTablePool pool = new HTablePool(TEST_UTIL.getConfiguration(),
Integer.MAX_VALUE, getPoolType()); Integer.MAX_VALUE, getPoolType());
String tableName = TABLENAME; String tableName = TABLENAME;
// Request a table from an empty pool // Request a table from an empty pool
Table table = pool.getTable(tableName); Table table = pool.getTable(tableName);
Assert.assertNotNull(table); Assert.assertNotNull(table);
// Close table (returns table to the pool) // Close table (returns table to the pool)
table.close(); table.close();
// Request a table of the same name // Request a table of the same name
Table sameTable = pool.getTable(tableName); Table sameTable = pool.getTable(tableName);
Assert.assertSame( Assert.assertSame(
((HTablePool.PooledHTable) table).getWrappedTable(), ((HTablePool.PooledHTable) table).getWrappedTable(),
((HTablePool.PooledHTable) sameTable).getWrappedTable()); ((HTablePool.PooledHTable) sameTable).getWrappedTable());
} }
@Test @Test
public void testTableWithByteArrayName() throws IOException { public void testTableWithByteArrayName() throws IOException {
HTablePool pool = new HTablePool(TEST_UTIL.getConfiguration(), HTablePool pool = new HTablePool(TEST_UTIL.getConfiguration(),
Integer.MAX_VALUE, getPoolType()); Integer.MAX_VALUE, getPoolType());
// Request a table from an empty pool // Request a table from an empty pool
Table table = pool.getTable(TABLENAME); Table table = pool.getTable(TABLENAME);
Assert.assertNotNull(table); Assert.assertNotNull(table);
// Close table (returns table to the pool) // Close table (returns table to the pool)
table.close(); table.close();
// Request a table of the same name // Request a table of the same name
Table sameTable = pool.getTable(TABLENAME); Table sameTable = pool.getTable(TABLENAME);
Assert.assertSame( Assert.assertSame(
((HTablePool.PooledHTable) table).getWrappedTable(), ((HTablePool.PooledHTable) table).getWrappedTable(),
((HTablePool.PooledHTable) sameTable).getWrappedTable()); ((HTablePool.PooledHTable) sameTable).getWrappedTable());
} }
@Test @Test
public void testTablesWithDifferentNames() throws IOException { public void testTablesWithDifferentNames() throws IOException {
HTablePool pool = new HTablePool(TEST_UTIL.getConfiguration(), HTablePool pool = new HTablePool(TEST_UTIL.getConfiguration(),
Integer.MAX_VALUE, getPoolType()); Integer.MAX_VALUE, getPoolType());
// We add the class to the table name as the HBase cluster is reused // We add the class to the table name as the HBase cluster is reused
// during the tests: this gives naming unicity. // during the tests: this gives naming unicity.
byte[] otherTable = Bytes.toBytes( byte[] otherTable = Bytes.toBytes(
"OtherTable_" + getClass().getSimpleName() "OtherTable_" + getClass().getSimpleName()
); );
TEST_UTIL.createTable(otherTable, HConstants.CATALOG_FAMILY); TEST_UTIL.createTable(otherTable, HConstants.CATALOG_FAMILY);
// Request a table from an empty pool // Request a table from an empty pool
Table table1 = pool.getTable(TABLENAME); Table table1 = pool.getTable(TABLENAME);
Table table2 = pool.getTable(otherTable); Table table2 = pool.getTable(otherTable);
Assert.assertNotNull(table2); Assert.assertNotNull(table2);
// Close tables (returns tables to the pool) // Close tables (returns tables to the pool)
table1.close(); table1.close();
table2.close(); table2.close();
// Request tables of the same names // Request tables of the same names
Table sameTable1 = pool.getTable(TABLENAME); Table sameTable1 = pool.getTable(TABLENAME);
Table sameTable2 = pool.getTable(otherTable); Table sameTable2 = pool.getTable(otherTable);
Assert.assertSame( Assert.assertSame(
((HTablePool.PooledHTable) table1).getWrappedTable(), ((HTablePool.PooledHTable) table1).getWrappedTable(),
((HTablePool.PooledHTable) sameTable1).getWrappedTable()); ((HTablePool.PooledHTable) sameTable1).getWrappedTable());
Assert.assertSame( Assert.assertSame(
((HTablePool.PooledHTable) table2).getWrappedTable(), ((HTablePool.PooledHTable) table2).getWrappedTable(),
((HTablePool.PooledHTable) sameTable2).getWrappedTable()); ((HTablePool.PooledHTable) sameTable2).getWrappedTable());
} }
@Test @Test
public void testProxyImplementationReturned() { public void testProxyImplementationReturned() {
HTablePool pool = new HTablePool(TEST_UTIL.getConfiguration(), HTablePool pool = new HTablePool(TEST_UTIL.getConfiguration(),
@ -146,8 +146,8 @@ public class TestHTablePool {
HTablePool pool = new HTablePool(TEST_UTIL.getConfiguration(), HTablePool pool = new HTablePool(TEST_UTIL.getConfiguration(),
Integer.MAX_VALUE); Integer.MAX_VALUE);
String tableName = TABLENAME;// Request a table from String tableName = TABLENAME;// Request a table from
// an // an
// empty pool // empty pool
// get table will return proxy implementation // get table will return proxy implementation
HTableInterface table = pool.getTable(tableName); HTableInterface table = pool.getTable(tableName);
@ -168,8 +168,8 @@ public class TestHTablePool {
HTablePool pool = new HTablePool(TEST_UTIL.getConfiguration(), HTablePool pool = new HTablePool(TEST_UTIL.getConfiguration(),
Integer.MAX_VALUE); Integer.MAX_VALUE);
String tableName = TABLENAME;// Request a table from String tableName = TABLENAME;// Request a table from
// an // an
// empty pool // empty pool
// get table will return proxy implementation // get table will return proxy implementation
final Table table = pool.getTable(tableName); final Table table = pool.getTable(tableName);
@ -213,154 +213,154 @@ public class TestHTablePool {
} }
@Category({ClientTests.class, MediumTests.class}) @Category({ClientTests.class, MediumTests.class})
public static class TestHTableReusablePool extends TestHTablePoolType { public static class TestHTableReusablePool extends TestHTablePoolType {
@Override @Override
protected PoolType getPoolType() { protected PoolType getPoolType() {
return PoolType.Reusable; return PoolType.Reusable;
} }
@Test @Test
public void testTableWithMaxSize() throws Exception { public void testTableWithMaxSize() throws Exception {
HTablePool pool = new HTablePool(TEST_UTIL.getConfiguration(), 2, HTablePool pool = new HTablePool(TEST_UTIL.getConfiguration(), 2,
getPoolType()); getPoolType());
// Request tables from an empty pool // Request tables from an empty pool
Table table1 = pool.getTable(TABLENAME); Table table1 = pool.getTable(TABLENAME);
Table table2 = pool.getTable(TABLENAME); Table table2 = pool.getTable(TABLENAME);
Table table3 = pool.getTable(TABLENAME); Table table3 = pool.getTable(TABLENAME);
// Close tables (returns tables to the pool) // Close tables (returns tables to the pool)
table1.close(); table1.close();
table2.close(); table2.close();
// The pool should reject this one since it is already full // The pool should reject this one since it is already full
table3.close(); table3.close();
// Request tables of the same name // Request tables of the same name
Table sameTable1 = pool.getTable(TABLENAME); Table sameTable1 = pool.getTable(TABLENAME);
Table sameTable2 = pool.getTable(TABLENAME); Table sameTable2 = pool.getTable(TABLENAME);
Table sameTable3 = pool.getTable(TABLENAME); Table sameTable3 = pool.getTable(TABLENAME);
Assert.assertSame( Assert.assertSame(
((HTablePool.PooledHTable) table1).getWrappedTable(), ((HTablePool.PooledHTable) table1).getWrappedTable(),
((HTablePool.PooledHTable) sameTable1).getWrappedTable()); ((HTablePool.PooledHTable) sameTable1).getWrappedTable());
Assert.assertSame( Assert.assertSame(
((HTablePool.PooledHTable) table2).getWrappedTable(), ((HTablePool.PooledHTable) table2).getWrappedTable(),
((HTablePool.PooledHTable) sameTable2).getWrappedTable()); ((HTablePool.PooledHTable) sameTable2).getWrappedTable());
Assert.assertNotSame( Assert.assertNotSame(
((HTablePool.PooledHTable) table3).getWrappedTable(), ((HTablePool.PooledHTable) table3).getWrappedTable(),
((HTablePool.PooledHTable) sameTable3).getWrappedTable()); ((HTablePool.PooledHTable) sameTable3).getWrappedTable());
} }
@Test @Test
public void testCloseTablePool() throws IOException { public void testCloseTablePool() throws IOException {
HTablePool pool = new HTablePool(TEST_UTIL.getConfiguration(), 4, HTablePool pool = new HTablePool(TEST_UTIL.getConfiguration(), 4,
getPoolType()); getPoolType());
HBaseAdmin admin = new HBaseAdmin(TEST_UTIL.getConfiguration()); HBaseAdmin admin = new HBaseAdmin(TEST_UTIL.getConfiguration());
if (admin.tableExists(TABLENAME)) { if (admin.tableExists(TABLENAME)) {
admin.disableTable(TABLENAME); admin.disableTable(TABLENAME);
admin.deleteTable(TABLENAME); admin.deleteTable(TABLENAME);
} }
HTableDescriptor tableDescriptor = new HTableDescriptor(TableName.valueOf(TABLENAME)); HTableDescriptor tableDescriptor = new HTableDescriptor(TableName.valueOf(TABLENAME));
tableDescriptor.addFamily(new HColumnDescriptor("randomFamily")); tableDescriptor.addFamily(new HColumnDescriptor("randomFamily"));
admin.createTable(tableDescriptor); admin.createTable(tableDescriptor);
// Request tables from an empty pool // Request tables from an empty pool
Table[] tables = new Table[4]; Table[] tables = new Table[4];
for (int i = 0; i < 4; ++i) { for (int i = 0; i < 4; ++i) {
tables[i] = pool.getTable(TABLENAME); tables[i] = pool.getTable(TABLENAME);
} }
pool.closeTablePool(TABLENAME); pool.closeTablePool(TABLENAME);
for (int i = 0; i < 4; ++i) { for (int i = 0; i < 4; ++i) {
tables[i].close(); tables[i].close();
} }
Assert.assertEquals(4, Assert.assertEquals(4,
pool.getCurrentPoolSize(TABLENAME)); pool.getCurrentPoolSize(TABLENAME));
pool.closeTablePool(TABLENAME); pool.closeTablePool(TABLENAME);
Assert.assertEquals(0, Assert.assertEquals(0,
pool.getCurrentPoolSize(TABLENAME)); pool.getCurrentPoolSize(TABLENAME));
} }
} }
@Category({ClientTests.class, MediumTests.class}) @Category({ClientTests.class, MediumTests.class})
public static class TestHTableThreadLocalPool extends TestHTablePoolType { public static class TestHTableThreadLocalPool extends TestHTablePoolType {
@Override @Override
protected PoolType getPoolType() { protected PoolType getPoolType() {
return PoolType.ThreadLocal; return PoolType.ThreadLocal;
} }
@Test @Test
public void testTableWithMaxSize() throws Exception { public void testTableWithMaxSize() throws Exception {
HTablePool pool = new HTablePool(TEST_UTIL.getConfiguration(), 2, HTablePool pool = new HTablePool(TEST_UTIL.getConfiguration(), 2,
getPoolType()); getPoolType());
// Request tables from an empty pool // Request tables from an empty pool
Table table1 = pool.getTable(TABLENAME); Table table1 = pool.getTable(TABLENAME);
Table table2 = pool.getTable(TABLENAME); Table table2 = pool.getTable(TABLENAME);
Table table3 = pool.getTable(TABLENAME); Table table3 = pool.getTable(TABLENAME);
// Close tables (returns tables to the pool) // Close tables (returns tables to the pool)
table1.close(); table1.close();
table2.close(); table2.close();
// The pool should not reject this one since the number of threads // The pool should not reject this one since the number of threads
// <= 2 // <= 2
table3.close(); table3.close();
// Request tables of the same name // Request tables of the same name
Table sameTable1 = pool.getTable(TABLENAME); Table sameTable1 = pool.getTable(TABLENAME);
Table sameTable2 = pool.getTable(TABLENAME); Table sameTable2 = pool.getTable(TABLENAME);
Table sameTable3 = pool.getTable(TABLENAME); Table sameTable3 = pool.getTable(TABLENAME);
Assert.assertSame( Assert.assertSame(
((HTablePool.PooledHTable) table3).getWrappedTable(), ((HTablePool.PooledHTable) table3).getWrappedTable(),
((HTablePool.PooledHTable) sameTable1).getWrappedTable()); ((HTablePool.PooledHTable) sameTable1).getWrappedTable());
Assert.assertSame( Assert.assertSame(
((HTablePool.PooledHTable) table3).getWrappedTable(), ((HTablePool.PooledHTable) table3).getWrappedTable(),
((HTablePool.PooledHTable) sameTable2).getWrappedTable()); ((HTablePool.PooledHTable) sameTable2).getWrappedTable());
Assert.assertSame( Assert.assertSame(
((HTablePool.PooledHTable) table3).getWrappedTable(), ((HTablePool.PooledHTable) table3).getWrappedTable(),
((HTablePool.PooledHTable) sameTable3).getWrappedTable()); ((HTablePool.PooledHTable) sameTable3).getWrappedTable());
} }
@Test @Test
public void testCloseTablePool() throws IOException { public void testCloseTablePool() throws IOException {
HTablePool pool = new HTablePool(TEST_UTIL.getConfiguration(), 4, HTablePool pool = new HTablePool(TEST_UTIL.getConfiguration(), 4,
getPoolType()); getPoolType());
HBaseAdmin admin = new HBaseAdmin(TEST_UTIL.getConfiguration()); HBaseAdmin admin = new HBaseAdmin(TEST_UTIL.getConfiguration());
if (admin.tableExists(TABLENAME)) { if (admin.tableExists(TABLENAME)) {
admin.disableTable(TABLENAME); admin.disableTable(TABLENAME);
admin.deleteTable(TABLENAME); admin.deleteTable(TABLENAME);
} }
HTableDescriptor tableDescriptor = new HTableDescriptor(TableName.valueOf(TABLENAME)); HTableDescriptor tableDescriptor = new HTableDescriptor(TableName.valueOf(TABLENAME));
tableDescriptor.addFamily(new HColumnDescriptor("randomFamily")); tableDescriptor.addFamily(new HColumnDescriptor("randomFamily"));
admin.createTable(tableDescriptor); admin.createTable(tableDescriptor);
// Request tables from an empty pool // Request tables from an empty pool
Table[] tables = new Table[4]; Table[] tables = new Table[4];
for (int i = 0; i < 4; ++i) { for (int i = 0; i < 4; ++i) {
tables[i] = pool.getTable(TABLENAME); tables[i] = pool.getTable(TABLENAME);
} }
pool.closeTablePool(TABLENAME); pool.closeTablePool(TABLENAME);
for (int i = 0; i < 4; ++i) { for (int i = 0; i < 4; ++i) {
tables[i].close(); tables[i].close();
} }
Assert.assertEquals(1, Assert.assertEquals(1,
pool.getCurrentPoolSize(TABLENAME)); pool.getCurrentPoolSize(TABLENAME));
pool.closeTablePool(TABLENAME); pool.closeTablePool(TABLENAME);
Assert.assertEquals(0, Assert.assertEquals(0,
pool.getCurrentPoolSize(TABLENAME)); pool.getCurrentPoolSize(TABLENAME));
} }
} }
} }