From 58b6b24c2999511daefa0dff2e2ae745cba24e35 Mon Sep 17 00:00:00 2001 From: stack Date: Fri, 28 Nov 2014 20:50:02 -0800 Subject: [PATCH] HBASE-12519 Remove tabs used as whitespace (Varun Saxena) --- ...cludePrivateAnnotationsStandardDoclet.java | 2 +- .../tools/RootDocProcessor.java | 187 ++++----- .../tools/StabilityOptions.java | 16 +- .../apache/hadoop/hbase/HTableDescriptor.java | 2 +- .../apache/hadoop/hbase/client/HTable.java | 4 +- .../client/UnmodifyableHTableDescriptor.java | 2 +- .../client/coprocessor/AggregationClient.java | 11 +- .../hbase/filter/DependentColumnFilter.java | 12 +- .../hbase/protobuf/RequestConverter.java | 2 +- .../org/apache/hadoop/hbase/io/TimeRange.java | 12 +- .../hadoop/hbase/util/CollectionUtils.java | 6 +- .../apache/hadoop/hbase/util/JenkinsHash.java | 2 +- .../hadoop/hbase/TestHBaseConfiguration.java | 4 +- ...rationTestBigLinkedListWithVisibility.java | 4 +- .../codec/prefixtree/PrefixTreeBlockMeta.java | 6 +- .../decode/PrefixTreeArraySearcher.java | 2 +- .../encode/column/ColumnSectionWriter.java | 4 +- .../prefixtree/encode/other/LongEncoder.java | 4 +- .../prefixtree/encode/tokenize/Tokenizer.java | 2 +- .../encode/tokenize/TokenizerNode.java | 6 +- .../tokenize/TokenizerRowSearchPosition.java | 8 +- .../hadoop/hbase/util/vint/UFIntTool.java | 14 +- .../row/TestPrefixTreeSearcher.java | 2 +- .../codec/prefixtree/row/TestRowEncoder.java | 4 +- .../data/TestRowDataComplexQualifiers.java | 2 +- .../row/data/TestRowDataDeeper.java | 48 +-- .../data/TestRowDataDifferentTimestamps.java | 68 ++-- .../row/data/TestRowDataExerciseFInts.java | 84 ++-- .../prefixtree/row/data/TestRowDataNub.java | 46 +-- .../TestRowDataQualifierByteOrdering.java | 18 +- .../row/data/TestRowDataSearcherRowMiss.java | 24 +- .../row/data/TestRowDataSingleQualifier.java | 12 +- .../row/data/TestRowDataTrivial.java | 12 +- .../prefixtree/row/data/TestRowDataUrls.java | 2 +- .../row/data/TestRowDataUrlsExample.java | 54 +-- .../model/StorageClusterVersionModel.java | 58 +-- .../hbase/rest/model/TableListModel.java | 108 +++--- .../hadoop/hbase/rest/model/TableModel.java | 72 ++-- .../hadoop/hbase/rest/model/VersionModel.java | 84 ++-- .../rest/provider/JAXBContextResolver.java | 36 +- .../PlainTextMessageBodyProducer.java | 24 +- .../producer/ProtobufMessageBodyProducer.java | 46 +-- .../hbase/ipc/HBaseRPCErrorHandler.java | 10 +- .../master/RegionPlacementMaintainer.java | 2 +- .../SnapshotOfRegionAssignmentFromMeta.java | 2 +- .../hbase/master/cleaner/CleanerChore.java | 6 +- .../hbase/regionserver/MemStoreFlusher.java | 6 +- .../hbase/regionserver/SplitLogWorker.java | 2 +- .../handler/WALSplitterHandler.java | 2 +- .../apache/hadoop/hbase/util/HBaseFsck.java | 12 +- .../hbase/zookeeper/RegionServerTracker.java | 2 +- .../hadoop/hbase/HBaseTestingUtility.java | 4 +- .../TestBigDecimalColumnInterpreter.java | 68 ++-- .../filter/TestDependentColumnFilter.java | 22 +- .../hadoop/hbase/filter/TestParseFilter.java | 2 +- .../hbase/io/hfile/TestHFilePerformance.java | 4 +- .../hbase/master/TestClockSkewDetection.java | 2 +- .../regionserver/TestMajorCompaction.java | 2 +- .../hadoop/hbase/regionserver/TestTags.java | 10 +- .../TestReplicationSmallTests.java | 2 +- .../hadoop/hbase/util/LoadTestTool.java | 2 +- .../hbase/util/MultiThreadedReader.java | 6 +- .../hbase/util/MultiThreadedUpdater.java | 2 +- .../hadoop/hbase/thrift2/TestHTablePool.java | 358 +++++++++--------- 64 files changed, 810 insertions(+), 834 deletions(-) diff --git a/hbase-annotations/src/main/java/org/apache/hadoop/hbase/classification/tools/ExcludePrivateAnnotationsStandardDoclet.java b/hbase-annotations/src/main/java/org/apache/hadoop/hbase/classification/tools/ExcludePrivateAnnotationsStandardDoclet.java index eb03a21fa99..f93e13f728a 100644 --- a/hbase-annotations/src/main/java/org/apache/hadoop/hbase/classification/tools/ExcludePrivateAnnotationsStandardDoclet.java +++ b/hbase-annotations/src/main/java/org/apache/hadoop/hbase/classification/tools/ExcludePrivateAnnotationsStandardDoclet.java @@ -39,7 +39,7 @@ public class ExcludePrivateAnnotationsStandardDoclet { public static boolean start(RootDoc root) { System.out.println( - ExcludePrivateAnnotationsStandardDoclet.class.getSimpleName()); + ExcludePrivateAnnotationsStandardDoclet.class.getSimpleName()); return Standard.start(RootDocProcessor.process(root)); } diff --git a/hbase-annotations/src/main/java/org/apache/hadoop/hbase/classification/tools/RootDocProcessor.java b/hbase-annotations/src/main/java/org/apache/hadoop/hbase/classification/tools/RootDocProcessor.java index 78e81632070..2ea1022a24d 100644 --- a/hbase-annotations/src/main/java/org/apache/hadoop/hbase/classification/tools/RootDocProcessor.java +++ b/hbase-annotations/src/main/java/org/apache/hadoop/hbase/classification/tools/RootDocProcessor.java @@ -65,10 +65,10 @@ class RootDocProcessor { return getProxy(obj); } else if (obj instanceof Object[]) { Class componentType = type.isArray() ? type.getComponentType() - : cls.getComponentType(); + : cls.getComponentType(); Object[] array = (Object[]) obj; Object[] newArray = (Object[]) Array.newInstance(componentType, - array.length); + array.length); for (int i = 0; i < array.length; ++i) { newArray[i] = process(array[i], componentType); } @@ -98,116 +98,99 @@ class RootDocProcessor { } @Override - public Object invoke(Object proxy, Method method, Object[] args) - throws Throwable { + public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { String methodName = method.getName(); if (target instanceof Doc) { - if (methodName.equals("isIncluded")) { - Doc doc = (Doc) target; - return !exclude(doc) && doc.isIncluded(); - } - if (target instanceof RootDoc) { - if (methodName.equals("classes")) { - return filter(((RootDoc) target).classes(), ClassDoc.class); - } else if (methodName.equals("specifiedClasses")) { - return filter(((RootDoc) target).specifiedClasses(), ClassDoc.class); - } else if (methodName.equals("specifiedPackages")) { - return filter(((RootDoc) target).specifiedPackages(), PackageDoc.class); - } - } else if (target instanceof ClassDoc) { - if (isFiltered(args)) { - if (methodName.equals("methods")) { - return filter(((ClassDoc) target).methods(true), MethodDoc.class); - } else if (methodName.equals("fields")) { - return filter(((ClassDoc) target).fields(true), FieldDoc.class); - } else if (methodName.equals("innerClasses")) { - return filter(((ClassDoc) target).innerClasses(true), - ClassDoc.class); - } else if (methodName.equals("constructors")) { - return filter(((ClassDoc) target).constructors(true), - ConstructorDoc.class); - } - } - } else if (target instanceof PackageDoc) { - if (methodName.equals("allClasses")) { - if (isFiltered(args)) { - return filter(((PackageDoc) target).allClasses(true), - ClassDoc.class); - } else { - return filter(((PackageDoc) target).allClasses(), ClassDoc.class); - } - } else if (methodName.equals("annotationTypes")) { - return filter(((PackageDoc) target).annotationTypes(), - AnnotationTypeDoc.class); - } else if (methodName.equals("enums")) { - return filter(((PackageDoc) target).enums(), - ClassDoc.class); - } else if (methodName.equals("errors")) { - return filter(((PackageDoc) target).errors(), - ClassDoc.class); - } else if (methodName.equals("exceptions")) { - return filter(((PackageDoc) target).exceptions(), - ClassDoc.class); - } else if (methodName.equals("interfaces")) { - return filter(((PackageDoc) target).interfaces(), - ClassDoc.class); - } else if (methodName.equals("ordinaryClasses")) { - return filter(((PackageDoc) target).ordinaryClasses(), - ClassDoc.class); - } - } + if (methodName.equals("isIncluded")) { + Doc doc = (Doc) target; + return !exclude(doc) && doc.isIncluded(); + } + if (target instanceof RootDoc) { + if (methodName.equals("classes")) { + return filter(((RootDoc) target).classes(), ClassDoc.class); + } else if (methodName.equals("specifiedClasses")) { + return filter(((RootDoc) target).specifiedClasses(), ClassDoc.class); + } else if (methodName.equals("specifiedPackages")) { + return filter(((RootDoc) target).specifiedPackages(), PackageDoc.class); + } + } else if (target instanceof ClassDoc) { + if (isFiltered(args)) { + if (methodName.equals("methods")) { + return filter(((ClassDoc) target).methods(true), MethodDoc.class); + } else if (methodName.equals("fields")) { + return filter(((ClassDoc) target).fields(true), FieldDoc.class); + } else if (methodName.equals("innerClasses")) { + return filter(((ClassDoc) target).innerClasses(true), ClassDoc.class); + } else if (methodName.equals("constructors")) { + return filter(((ClassDoc) target).constructors(true), ConstructorDoc.class); + } + } + } else if (target instanceof PackageDoc) { + if (methodName.equals("allClasses")) { + if (isFiltered(args)) { + return filter(((PackageDoc) target).allClasses(true), ClassDoc.class); + } else { + return filter(((PackageDoc) target).allClasses(), ClassDoc.class); + } + } else if (methodName.equals("annotationTypes")) { + return filter(((PackageDoc) target).annotationTypes(), AnnotationTypeDoc.class); + } else if (methodName.equals("enums")) { + return filter(((PackageDoc) target).enums(), ClassDoc.class); + } else if (methodName.equals("errors")) { + return filter(((PackageDoc) target).errors(), ClassDoc.class); + } else if (methodName.equals("exceptions")) { + return filter(((PackageDoc) target).exceptions(), ClassDoc.class); + } else if (methodName.equals("interfaces")) { + return filter(((PackageDoc) target).interfaces(), ClassDoc.class); + } else if (methodName.equals("ordinaryClasses")) { + return filter(((PackageDoc) target).ordinaryClasses(), ClassDoc.class); + } + } } if (args != null) { - if (methodName.equals("compareTo") || methodName.equals("equals") - || methodName.equals("overrides") - || methodName.equals("subclassOf")) { - args[0] = unwrap(args[0]); - } + if (methodName.equals("compareTo") || methodName.equals("equals") + || methodName.equals("overrides") || methodName.equals("subclassOf")) { + args[0] = unwrap(args[0]); + } } try { - return process(method.invoke(target, args), method.getReturnType()); + return process(method.invoke(target, args), method.getReturnType()); } catch (InvocationTargetException e) { - throw e.getTargetException(); + throw e.getTargetException(); } } private static boolean exclude(Doc doc) { AnnotationDesc[] annotations = null; if (doc instanceof ProgramElementDoc) { - annotations = ((ProgramElementDoc) doc).annotations(); + annotations = ((ProgramElementDoc) doc).annotations(); } else if (doc instanceof PackageDoc) { - annotations = ((PackageDoc) doc).annotations(); + annotations = ((PackageDoc) doc).annotations(); } if (annotations != null) { - for (AnnotationDesc annotation : annotations) { - String qualifiedTypeName = annotation.annotationType().qualifiedTypeName(); - if (qualifiedTypeName.equals( - InterfaceAudience.Private.class.getCanonicalName()) - || qualifiedTypeName.equals( - InterfaceAudience.LimitedPrivate.class.getCanonicalName())) { - return true; - } - if (stability.equals(StabilityOptions.EVOLVING_OPTION)) { - if (qualifiedTypeName.equals( - InterfaceStability.Unstable.class.getCanonicalName())) { - return true; - } - } - if (stability.equals(StabilityOptions.STABLE_OPTION)) { - if (qualifiedTypeName.equals( - InterfaceStability.Unstable.class.getCanonicalName()) - || qualifiedTypeName.equals( - InterfaceStability.Evolving.class.getCanonicalName())) { - return true; - } - } - } for (AnnotationDesc annotation : annotations) { - String qualifiedTypeName = - annotation.annotationType().qualifiedTypeName(); - if (qualifiedTypeName.equals( - InterfaceAudience.Public.class.getCanonicalName())) { + String qualifiedTypeName = annotation.annotationType().qualifiedTypeName(); + if (qualifiedTypeName.equals(InterfaceAudience.Private.class.getCanonicalName()) + || qualifiedTypeName + .equals(InterfaceAudience.LimitedPrivate.class.getCanonicalName())) { + return true; + } + if (stability.equals(StabilityOptions.EVOLVING_OPTION)) { + if (qualifiedTypeName.equals(InterfaceStability.Unstable.class.getCanonicalName())) { + return true; + } + } + if (stability.equals(StabilityOptions.STABLE_OPTION)) { + if (qualifiedTypeName.equals(InterfaceStability.Unstable.class.getCanonicalName()) + || qualifiedTypeName.equals(InterfaceStability.Evolving.class.getCanonicalName())) { + return true; + } + } + } + for (AnnotationDesc annotation : annotations) { + String qualifiedTypeName = annotation.annotationType().qualifiedTypeName(); + if (qualifiedTypeName.equals(InterfaceAudience.Public.class.getCanonicalName())) { return false; } } @@ -220,28 +203,24 @@ class RootDocProcessor { private static Object[] filter(Doc[] array, Class componentType) { if (array == null || array.length == 0) { - return array; + return array; } List list = new ArrayList(array.length); for (Doc entry : array) { - if (!exclude(entry)) { - list.add(process(entry, componentType)); - } + if (!exclude(entry)) { + list.add(process(entry, componentType)); + } } - return list.toArray((Object[]) Array.newInstance(componentType, list - .size())); + return list.toArray((Object[]) Array.newInstance(componentType, list.size())); } private Object unwrap(Object proxy) { - if (proxy instanceof Proxy) - return ((ExcludeHandler) Proxy.getInvocationHandler(proxy)).target; + if (proxy instanceof Proxy) return ((ExcludeHandler) Proxy.getInvocationHandler(proxy)).target; return proxy; } private boolean isFiltered(Object[] args) { return args != null && Boolean.TRUE.equals(args[0]); } - } - } diff --git a/hbase-annotations/src/main/java/org/apache/hadoop/hbase/classification/tools/StabilityOptions.java b/hbase-annotations/src/main/java/org/apache/hadoop/hbase/classification/tools/StabilityOptions.java index f3169bab1d2..809d96c884e 100644 --- a/hbase-annotations/src/main/java/org/apache/hadoop/hbase/classification/tools/StabilityOptions.java +++ b/hbase-annotations/src/main/java/org/apache/hadoop/hbase/classification/tools/StabilityOptions.java @@ -35,16 +35,15 @@ class StabilityOptions { return null; } - public static void validOptions(String[][] options, - DocErrorReporter reporter) { + public static void validOptions(String[][] options, DocErrorReporter reporter) { for (int i = 0; i < options.length; i++) { String opt = options[i][0].toLowerCase(); if (opt.equals(UNSTABLE_OPTION)) { - RootDocProcessor.stability = UNSTABLE_OPTION; + RootDocProcessor.stability = UNSTABLE_OPTION; } else if (opt.equals(EVOLVING_OPTION)) { - RootDocProcessor.stability = EVOLVING_OPTION; + RootDocProcessor.stability = EVOLVING_OPTION; } else if (opt.equals(STABLE_OPTION)) { - RootDocProcessor.stability = STABLE_OPTION; + RootDocProcessor.stability = STABLE_OPTION; } } } @@ -53,9 +52,9 @@ class StabilityOptions { List optionsList = new ArrayList(); for (int i = 0; i < options.length; i++) { if (!options[i][0].equalsIgnoreCase(UNSTABLE_OPTION) - && !options[i][0].equalsIgnoreCase(EVOLVING_OPTION) - && !options[i][0].equalsIgnoreCase(STABLE_OPTION)) { - optionsList.add(options[i]); + && !options[i][0].equalsIgnoreCase(EVOLVING_OPTION) + && !options[i][0].equalsIgnoreCase(STABLE_OPTION)) { + optionsList.add(options[i]); } } String[][] filteredOptions = new String[optionsList.size()][]; @@ -65,5 +64,4 @@ class StabilityOptions { } return filteredOptions; } - } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java index bfd33009d61..0ae05388afa 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java @@ -973,7 +973,7 @@ public class HTableDescriptor implements Comparable { * This compares the content of the two descriptors and not the reference. * * @return 0 if the contents of the descriptors are exactly matching, - * 1 if there is a mismatch in the contents + * 1 if there is a mismatch in the contents */ @Override public int compareTo(@Nonnull final HTableDescriptor other) { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java index a52fe392522..1e473f582de 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java @@ -428,7 +428,7 @@ public class HTable implements HTableInterface, RegionLocator { * @param tableName Name of table to check. * @return {@code true} if table is online. * @throws IOException if a remote or network exception occurs - * @deprecated use {@link HBaseAdmin#isTableEnabled(byte[])} + * @deprecated use {@link HBaseAdmin#isTableEnabled(byte[])} */ @Deprecated public static boolean isTableEnabled(Configuration conf, String tableName) @@ -442,7 +442,7 @@ public class HTable implements HTableInterface, RegionLocator { * @param tableName Name of table to check. * @return {@code true} if table is online. * @throws IOException if a remote or network exception occurs - * @deprecated use {@link HBaseAdmin#isTableEnabled(byte[])} + * @deprecated use {@link HBaseAdmin#isTableEnabled(byte[])} */ @Deprecated public static boolean isTableEnabled(Configuration conf, byte[] tableName) diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java index 322a55bf3e2..55a81d62ad6 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java @@ -32,7 +32,7 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; public class UnmodifyableHTableDescriptor extends HTableDescriptor { /** Default constructor */ public UnmodifyableHTableDescriptor() { - super(); + super(); } /* diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java index 7b7cd16ddb9..5421e57a43f 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java @@ -180,12 +180,11 @@ public class AggregationClient implements Closeable { */ private void validateParameters(Scan scan, boolean canFamilyBeAbsent) throws IOException { if (scan == null - || (Bytes.equals(scan.getStartRow(), scan.getStopRow()) && !Bytes - .equals(scan.getStartRow(), HConstants.EMPTY_START_ROW)) - || ((Bytes.compareTo(scan.getStartRow(), scan.getStopRow()) > 0) && - !Bytes.equals(scan.getStopRow(), HConstants.EMPTY_END_ROW))) { - throw new IOException( - "Agg client Exception: Startrow should be smaller than Stoprow"); + || (Bytes.equals(scan.getStartRow(), scan.getStopRow()) && !Bytes.equals( + scan.getStartRow(), HConstants.EMPTY_START_ROW)) + || ((Bytes.compareTo(scan.getStartRow(), scan.getStopRow()) > 0) && !Bytes.equals( + scan.getStopRow(), HConstants.EMPTY_END_ROW))) { + throw new IOException("Agg client Exception: Startrow should be smaller than Stoprow"); } else if (!canFamilyBeAbsent) { if (scan.getFamilyMap().size() != 1) { throw new IOException("There must be only one family."); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java index 4fe052266a1..6d198426ac7 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java @@ -68,8 +68,8 @@ public class DependentColumnFilter extends CompareFilter { * @param valueComparator comparator */ public DependentColumnFilter(final byte [] family, final byte[] qualifier, - final boolean dropDependentColumn, final CompareOp valueCompareOp, - final ByteArrayComparable valueComparator) { + final boolean dropDependentColumn, final CompareOp valueCompareOp, + final ByteArrayComparable valueComparator) { // set up the comparator super(valueCompareOp, valueComparator); this.columnFamily = family; @@ -136,19 +136,19 @@ public class DependentColumnFilter extends CompareFilter { @Override public ReturnCode filterKeyValue(Cell c) { // Check if the column and qualifier match - if (!CellUtil.matchingColumn(c, this.columnFamily, this.columnQualifier)) { + if (!CellUtil.matchingColumn(c, this.columnFamily, this.columnQualifier)) { // include non-matches for the time being, they'll be discarded afterwards return ReturnCode.INCLUDE; - } + } // If it doesn't pass the op, skip it if (comparator != null && doCompare(compareOp, comparator, c.getValueArray(), c.getValueOffset(), c.getValueLength())) return ReturnCode.SKIP; - + stampSet.add(c.getTimestamp()); if(dropDependentColumn) { - return ReturnCode.SKIP; + return ReturnCode.SKIP; } return ReturnCode.INCLUDE; } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java index d558fce8548..2d8c53ec7f8 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java @@ -1049,7 +1049,7 @@ public final class RequestConverter { public static MoveRegionRequest buildMoveRegionRequest( final byte [] encodedRegionName, final byte [] destServerName) throws DeserializationException { - MoveRegionRequest.Builder builder = MoveRegionRequest.newBuilder(); + MoveRegionRequest.Builder builder = MoveRegionRequest.newBuilder(); builder.setRegion( buildRegionSpecifier(RegionSpecifierType.ENCODED_REGION_NAME,encodedRegionName)); if (destServerName != null) { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/TimeRange.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/TimeRange.java index b23b2f1fabd..8c16389aaf0 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/TimeRange.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/TimeRange.java @@ -61,7 +61,7 @@ public class TimeRange { * @param minStamp the minimum timestamp value, inclusive */ public TimeRange(byte [] minStamp) { - this.minStamp = Bytes.toLong(minStamp); + this.minStamp = Bytes.toLong(minStamp); } /** @@ -126,8 +126,8 @@ public class TimeRange { * @return true if within TimeRange, false if not */ public boolean withinTimeRange(byte [] bytes, int offset) { - if(allTime) return true; - return withinTimeRange(Bytes.toLong(bytes, offset)); + if(allTime) return true; + return withinTimeRange(Bytes.toLong(bytes, offset)); } /** @@ -139,9 +139,9 @@ public class TimeRange { * @return true if within TimeRange, false if not */ public boolean withinTimeRange(long timestamp) { - if(allTime) return true; - // check if >= minStamp - return (minStamp <= timestamp && timestamp < maxStamp); + if(allTime) return true; + // check if >= minStamp + return (minStamp <= timestamp && timestamp < maxStamp); } /** diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CollectionUtils.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CollectionUtils.java index b15b2e0bb54..b7b9bebc164 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CollectionUtils.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CollectionUtils.java @@ -43,7 +43,7 @@ public class CollectionUtils { return in; } - /************************ size ************************************/ + /************************ size ************************************/ public static int nullSafeSize(Collection collection) { if (collection == null) { @@ -56,7 +56,7 @@ public class CollectionUtils { return nullSafeSize(a) == nullSafeSize(b); } - /*************************** empty ****************************************/ + /*************************** empty ****************************************/ public static boolean isEmpty(Collection collection) { return collection == null || collection.isEmpty(); @@ -66,7 +66,7 @@ public class CollectionUtils { return !isEmpty(collection); } - /************************ first/last **************************/ + /************************ first/last **************************/ public static T getFirst(Collection collection) { if (CollectionUtils.isEmpty(collection)) { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JenkinsHash.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JenkinsHash.java index 42d8b11726a..359e7a90c2a 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JenkinsHash.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JenkinsHash.java @@ -164,7 +164,7 @@ public class JenkinsHash extends Hash { //-------------------------------- last block: affect all 32 bits of (c) switch (length) { // all the case statements fall through case 12: - c += ((key[offset + 11] & BYTE_MASK) << 24); + c += ((key[offset + 11] & BYTE_MASK) << 24); case 11: c += ((key[offset + 10] & BYTE_MASK) << 16); case 10: diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestHBaseConfiguration.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestHBaseConfiguration.java index 90e9e192a9d..99e4a33c7e0 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestHBaseConfiguration.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestHBaseConfiguration.java @@ -201,7 +201,7 @@ public class TestHBaseConfiguration { hadoopClassesAvailable = true; LOG.info("Credential provider classes have been" + - " loaded and initialized successfully through reflection."); + " loaded and initialized successfully through reflection."); return true; } @@ -280,7 +280,7 @@ public class TestHBaseConfiguration { List providers = getCredentialProviders(conf); if (null == providers) { throw new IOException("Could not fetch any CredentialProviders, " + - "is the implementation available?"); + "is the implementation available?"); } Object provider = providers.get(0); diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java index 106be611e4a..dc517a5c558 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java @@ -583,8 +583,8 @@ public class IntegrationTestBigLinkedListWithVisibility extends IntegrationTestB if (args.length < 5) { System.err .println("Usage: Loop " + - " " + - " [ ]"); + " " + + " [ ]"); return 1; } LOG.info("Running Loop with args:" + Arrays.deepToString(args)); diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeBlockMeta.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeBlockMeta.java index 620a6efa8ff..8410cf3653d 100644 --- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeBlockMeta.java +++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeBlockMeta.java @@ -45,7 +45,7 @@ public class PrefixTreeBlockMeta { public static final int MAX_FAMILY_LENGTH = Byte.MAX_VALUE;// hard-coded in KeyValue public static final int - NUM_LONGS = 2, + NUM_LONGS = 2, NUM_INTS = 28, NUM_SHORTS = 0,//keyValueTypeWidth not persisted NUM_SINGLE_BYTES = 2, @@ -135,7 +135,7 @@ public class PrefixTreeBlockMeta { } - /**************** operate on each field **********************/ + /**************** operate on each field **********************/ public int calculateNumMetaBytes(){ int numBytes = 0; @@ -339,7 +339,7 @@ public class PrefixTreeBlockMeta { position += UVIntTool.numBytes(numUniqueTags); } - //TODO method that can read directly from ByteBuffer instead of InputStream + //TODO method that can read directly from ByteBuffer instead of InputStream /*************** methods *************************/ diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeArraySearcher.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeArraySearcher.java index 8ea6e8550d4..ec54c2aea6f 100644 --- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeArraySearcher.java +++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeArraySearcher.java @@ -306,7 +306,7 @@ public class PrefixTreeArraySearcher extends PrefixTreeArrayReversibleScanner im } - /****************** complete seek when token mismatch ******************/ + /****************** complete seek when token mismatch ******************/ /** * @param searcherIsAfterInputKey <0: input key is before the searcher's position
diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/column/ColumnSectionWriter.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/column/ColumnSectionWriter.java index 47933353a9f..3ceae633bfb 100644 --- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/column/ColumnSectionWriter.java +++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/column/ColumnSectionWriter.java @@ -71,7 +71,7 @@ public class ColumnSectionWriter { private List outputArrayOffsets; - /*********************** construct *********************/ + /*********************** construct *********************/ public ColumnSectionWriter() { this.nonLeaves = Lists.newArrayList(); @@ -100,7 +100,7 @@ public class ColumnSectionWriter { } - /****************** methods *******************************/ + /****************** methods *******************************/ public ColumnSectionWriter compile() { if (this.nodeType == ColumnNodeType.FAMILY) { diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/other/LongEncoder.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/other/LongEncoder.java index c6ae3477da2..3291d7296da 100644 --- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/other/LongEncoder.java +++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/other/LongEncoder.java @@ -75,7 +75,7 @@ public class LongEncoder { } - /************* methods ***************************/ + /************* methods ***************************/ public void add(long timestamp) { uniqueValues.add(timestamp); @@ -158,7 +158,7 @@ public class LongEncoder { } - /******************** get/set **************************/ + /******************** get/set **************************/ public long getMin() { return min; diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/tokenize/Tokenizer.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/tokenize/Tokenizer.java index cf13add94e1..75a11addb3b 100644 --- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/tokenize/Tokenizer.java +++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/tokenize/Tokenizer.java @@ -179,7 +179,7 @@ public class Tokenizer{ } - /********************** write ***************************/ + /********************** write ***************************/ public Tokenizer setNodeFirstInsertionIndexes() { root.setInsertionIndexes(0); diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/tokenize/TokenizerNode.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/tokenize/TokenizerNode.java index eaf8ab767ec..e51d5be2bf3 100644 --- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/tokenize/TokenizerNode.java +++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/tokenize/TokenizerNode.java @@ -289,7 +289,7 @@ public class TokenizerNode{ } - /************************ byte[] utils *************************/ + /************************ byte[] utils *************************/ protected boolean partiallyMatchesToken(ByteRange bytes) { return numIdenticalBytes(bytes) > 0; @@ -304,7 +304,7 @@ public class TokenizerNode{ } - /***************** moving nodes around ************************/ + /***************** moving nodes around ************************/ public void appendNodesToExternalList(List appendTo, boolean includeNonLeaves, boolean includeLeaves) { @@ -462,7 +462,7 @@ public class TokenizerNode{ } - /********************** count different node types ********************/ + /********************** count different node types ********************/ public int getNumBranchNodesIncludingThisNode() { if (isLeaf()) { diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/tokenize/TokenizerRowSearchPosition.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/tokenize/TokenizerRowSearchPosition.java index 6054a46d784..1166baaee29 100644 --- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/tokenize/TokenizerRowSearchPosition.java +++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/tokenize/TokenizerRowSearchPosition.java @@ -30,9 +30,9 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; @InterfaceAudience.Private public enum TokenizerRowSearchPosition { - AFTER,//the key is after this tree node, so keep searching - BEFORE,//in a binary search, this tells us to back up - MATCH,//the current node is a full match - NO_MATCH,//might as well return a value more informative than null + AFTER,//the key is after this tree node, so keep searching + BEFORE,//in a binary search, this tells us to back up + MATCH,//the current node is a full match + NO_MATCH,//might as well return a value more informative than null } diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/util/vint/UFIntTool.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/util/vint/UFIntTool.java index 8b63fd16213..fc7c107125b 100644 --- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/util/vint/UFIntTool.java +++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/util/vint/UFIntTool.java @@ -78,13 +78,13 @@ public class UFIntTool { private static final long[] MASKS = new long[] { (long) 255, - (long) 255 << 8, - (long) 255 << 16, - (long) 255 << 24, - (long) 255 << 32, - (long) 255 << 40, - (long) 255 << 48, - (long) 255 << 56 + (long) 255 << 8, + (long) 255 << 16, + (long) 255 << 24, + (long) 255 << 32, + (long) 255 << 40, + (long) 255 << 48, + (long) 255 << 56 }; public static void writeBytes(int outputWidth, final long value, OutputStream os) throws IOException { diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestPrefixTreeSearcher.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestPrefixTreeSearcher.java index 55d3d221c7e..98513da1180 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestPrefixTreeSearcher.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestPrefixTreeSearcher.java @@ -48,7 +48,7 @@ import org.junit.runners.Parameterized.Parameters; @RunWith(Parameterized.class) public class TestPrefixTreeSearcher { - protected static int BLOCK_START = 7; + protected static int BLOCK_START = 7; @Parameters public static Collection parameters() { diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestRowEncoder.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestRowEncoder.java index d63144e7280..ec115511584 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestRowEncoder.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestRowEncoder.java @@ -77,7 +77,7 @@ public class TestRowEncoder { this.rows = testRows; } - @Before + @Before public void compile() throws IOException { // Always run with tags. But should also ensure that KVs without tags work fine os = new ByteArrayOutputStream(1 << 20); @@ -175,7 +175,7 @@ public class TestRowEncoder { } - /**************** helper **************************/ + /**************** helper **************************/ protected void assertKeyAndValueEqual(Cell expected, Cell actual) { // assert keys are equal (doesn't compare values) diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataComplexQualifiers.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataComplexQualifiers.java index bd6f02b4458..66fe3f33fea 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataComplexQualifiers.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataComplexQualifiers.java @@ -29,7 +29,7 @@ import com.google.common.collect.Lists; public class TestRowDataComplexQualifiers extends BaseTestRowData{ - static byte[] + static byte[] Arow = Bytes.toBytes("Arow"), cf = PrefixTreeTestConstants.TEST_CF, v0 = Bytes.toBytes("v0"); diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataDeeper.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataDeeper.java index cb3913c1599..4d057421656 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataDeeper.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataDeeper.java @@ -36,36 +36,36 @@ import com.google.common.collect.Lists; */ public class TestRowDataDeeper extends BaseTestRowData{ - static byte[] - cdc = Bytes.toBytes("cdc"), - cf6 = Bytes.toBytes("cf6"), - cfc = Bytes.toBytes("cfc"), - f = Bytes.toBytes("f"), - q = Bytes.toBytes("q"), - v = Bytes.toBytes("v"); + static byte[] + cdc = Bytes.toBytes("cdc"), + cf6 = Bytes.toBytes("cf6"), + cfc = Bytes.toBytes("cfc"), + f = Bytes.toBytes("f"), + q = Bytes.toBytes("q"), + v = Bytes.toBytes("v"); - static long - ts = 55L; + static long + ts = 55L; - static List d = Lists.newArrayList(); - static{ - d.add(new KeyValue(cdc, f, q, ts, v)); + static List d = Lists.newArrayList(); + static{ + d.add(new KeyValue(cdc, f, q, ts, v)); d.add(new KeyValue(cf6, f, q, ts, v)); d.add(new KeyValue(cfc, f, q, ts, v)); - } + } - @Override - public List getInputs() { - return d; - } + @Override + public List getInputs() { + return d; + } - @Override - public void individualBlockMetaAssertions(PrefixTreeBlockMeta blockMeta) { - //0: token:c; fan:d,f - //1: token:f; fan:6,c - //2: leaves - Assert.assertEquals(3, blockMeta.getRowTreeDepth()); - } + @Override + public void individualBlockMetaAssertions(PrefixTreeBlockMeta blockMeta) { + //0: token:c; fan:d,f + //1: token:f; fan:6,c + //2: leaves + Assert.assertEquals(3, blockMeta.getRowTreeDepth()); + } @Override public void individualSearcherAssertions(CellSearcher searcher) { diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataDifferentTimestamps.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataDifferentTimestamps.java index 2668f2afd0f..8639e8fb3d9 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataDifferentTimestamps.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataDifferentTimestamps.java @@ -33,62 +33,62 @@ import com.google.common.collect.Lists; */ public class TestRowDataDifferentTimestamps extends BaseTestRowData{ - static byte[] - Arow = Bytes.toBytes("Arow"), - Brow = Bytes.toBytes("Brow"), - cf = Bytes.toBytes("fammy"), - cq0 = Bytes.toBytes("cq0"), - cq1 = Bytes.toBytes("cq1"), - v0 = Bytes.toBytes("v0"); + static byte[] + Arow = Bytes.toBytes("Arow"), + Brow = Bytes.toBytes("Brow"), + cf = Bytes.toBytes("fammy"), + cq0 = Bytes.toBytes("cq0"), + cq1 = Bytes.toBytes("cq1"), + v0 = Bytes.toBytes("v0"); - static List d = Lists.newArrayList(); - static{ - KeyValue kv0 = new KeyValue(Arow, cf, cq0, 0L, v0); - kv0.setSequenceId(123456789L); - d.add(kv0); + static List d = Lists.newArrayList(); + static{ + KeyValue kv0 = new KeyValue(Arow, cf, cq0, 0L, v0); + kv0.setSequenceId(123456789L); + d.add(kv0); - KeyValue kv1 = new KeyValue(Arow, cf, cq1, 1L, v0); + KeyValue kv1 = new KeyValue(Arow, cf, cq1, 1L, v0); kv1.setSequenceId(3L); d.add(kv1); - KeyValue kv2 = new KeyValue(Brow, cf, cq0, 12345678L, v0); + KeyValue kv2 = new KeyValue(Brow, cf, cq0, 12345678L, v0); kv2.setSequenceId(65537L); d.add(kv2); - //watch out... Long.MAX_VALUE comes back as 1332221664203, even with other encoders -// d.add(new KeyValue(Brow, cf, cq1, Long.MAX_VALUE, v0)); - KeyValue kv3 = new KeyValue(Brow, cf, cq1, Long.MAX_VALUE-1, v0); + //watch out... Long.MAX_VALUE comes back as 1332221664203, even with other encoders + //d.add(new KeyValue(Brow, cf, cq1, Long.MAX_VALUE, v0)); + KeyValue kv3 = new KeyValue(Brow, cf, cq1, Long.MAX_VALUE-1, v0); kv3.setSequenceId(1L); d.add(kv3); - KeyValue kv4 = new KeyValue(Brow, cf, cq1, 999999999, v0); + KeyValue kv4 = new KeyValue(Brow, cf, cq1, 999999999, v0); //don't set memstoreTS - d.add(kv4); + d.add(kv4); - KeyValue kv5 = new KeyValue(Brow, cf, cq1, 12345, v0); + KeyValue kv5 = new KeyValue(Brow, cf, cq1, 12345, v0); kv5.setSequenceId(0L); d.add(kv5); - } + } - @Override - public List getInputs() { - return d; - } + @Override + public List getInputs() { + return d; + } - @Override - public void individualBlockMetaAssertions(PrefixTreeBlockMeta blockMeta) { - Assert.assertTrue(blockMeta.getNumMvccVersionBytes() > 0); - Assert.assertEquals(12, blockMeta.getNumValueBytes()); + @Override + public void individualBlockMetaAssertions(PrefixTreeBlockMeta blockMeta) { + Assert.assertTrue(blockMeta.getNumMvccVersionBytes() > 0); + Assert.assertEquals(12, blockMeta.getNumValueBytes()); - Assert.assertFalse(blockMeta.isAllSameTimestamp()); - Assert.assertNotNull(blockMeta.getMinTimestamp()); - Assert.assertTrue(blockMeta.getTimestampIndexWidth() > 0); - Assert.assertTrue(blockMeta.getTimestampDeltaWidth() > 0); + Assert.assertFalse(blockMeta.isAllSameTimestamp()); + Assert.assertNotNull(blockMeta.getMinTimestamp()); + Assert.assertTrue(blockMeta.getTimestampIndexWidth() > 0); + Assert.assertTrue(blockMeta.getTimestampDeltaWidth() > 0); Assert.assertFalse(blockMeta.isAllSameMvccVersion()); Assert.assertNotNull(blockMeta.getMinMvccVersion()); Assert.assertTrue(blockMeta.getMvccVersionIndexWidth() > 0); Assert.assertTrue(blockMeta.getMvccVersionDeltaWidth() > 0); - } + } } diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataExerciseFInts.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataExerciseFInts.java index 184f5376f4f..c49db134763 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataExerciseFInts.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataExerciseFInts.java @@ -42,49 +42,49 @@ import com.google.common.collect.Lists; public class TestRowDataExerciseFInts extends BaseTestRowData{ static List rows; - static{ - List rowStrings = new ArrayList(); - rowStrings.add("com.edsBlog/directoryAa/pageAaa"); - rowStrings.add("com.edsBlog/directoryAa/pageBbb"); - rowStrings.add("com.edsBlog/directoryAa/pageCcc"); - rowStrings.add("com.edsBlog/directoryAa/pageDdd"); - rowStrings.add("com.edsBlog/directoryBb/pageEee"); - rowStrings.add("com.edsBlog/directoryBb/pageFff"); - rowStrings.add("com.edsBlog/directoryBb/pageGgg"); - rowStrings.add("com.edsBlog/directoryBb/pageHhh"); - rowStrings.add("com.isabellasBlog/directoryAa/pageAaa"); - rowStrings.add("com.isabellasBlog/directoryAa/pageBbb"); - rowStrings.add("com.isabellasBlog/directoryAa/pageCcc"); - rowStrings.add("com.isabellasBlog/directoryAa/pageDdd"); - rowStrings.add("com.isabellasBlog/directoryBb/pageEee"); - rowStrings.add("com.isabellasBlog/directoryBb/pageFff"); - rowStrings.add("com.isabellasBlog/directoryBb/pageGgg"); - rowStrings.add("com.isabellasBlog/directoryBb/pageHhh"); - ByteRangeTreeSet ba = new ByteRangeTreeSet(); - for(String row : rowStrings){ - ba.add(new SimpleMutableByteRange(Bytes.toBytes(row))); - } - rows = ba.compile().getSortedRanges(); - } + static{ + List rowStrings = new ArrayList(); + rowStrings.add("com.edsBlog/directoryAa/pageAaa"); + rowStrings.add("com.edsBlog/directoryAa/pageBbb"); + rowStrings.add("com.edsBlog/directoryAa/pageCcc"); + rowStrings.add("com.edsBlog/directoryAa/pageDdd"); + rowStrings.add("com.edsBlog/directoryBb/pageEee"); + rowStrings.add("com.edsBlog/directoryBb/pageFff"); + rowStrings.add("com.edsBlog/directoryBb/pageGgg"); + rowStrings.add("com.edsBlog/directoryBb/pageHhh"); + rowStrings.add("com.isabellasBlog/directoryAa/pageAaa"); + rowStrings.add("com.isabellasBlog/directoryAa/pageBbb"); + rowStrings.add("com.isabellasBlog/directoryAa/pageCcc"); + rowStrings.add("com.isabellasBlog/directoryAa/pageDdd"); + rowStrings.add("com.isabellasBlog/directoryBb/pageEee"); + rowStrings.add("com.isabellasBlog/directoryBb/pageFff"); + rowStrings.add("com.isabellasBlog/directoryBb/pageGgg"); + rowStrings.add("com.isabellasBlog/directoryBb/pageHhh"); + ByteRangeTreeSet ba = new ByteRangeTreeSet(); + for(String row : rowStrings){ + ba.add(new SimpleMutableByteRange(Bytes.toBytes(row))); + } + rows = ba.compile().getSortedRanges(); + } - static List cols = Lists.newArrayList(); - static{ - cols.add("Chrome"); - cols.add("Chromeb"); - cols.add("Firefox"); - cols.add("InternetExplorer"); - cols.add("Opera"); - cols.add("Safari"); - cols.add("Z1stBrowserWithHuuuuuuuuuuuugeQualifier"); - cols.add("Z2ndBrowserWithEvenBiggerQualifierMoreMoreMoreMoreMore"); - cols.add("Z3rdBrowserWithEvenBiggerQualifierMoreMoreMoreMoreMore"); - cols.add("Z4thBrowserWithEvenBiggerQualifierMoreMoreMoreMoreMore"); - cols.add("Z5thBrowserWithEvenBiggerQualifierMoreMoreMoreMoreMore"); - cols.add("Z6thBrowserWithEvenBiggerQualifierMoreMoreMoreMoreMore"); - cols.add("Z7thBrowserWithEvenBiggerQualifierMoreMoreMoreMoreMore"); - cols.add("Z8thBrowserWithEvenBiggerQualifierMoreMoreMoreMoreMore"); - cols.add("Z9thBrowserWithEvenBiggerQualifierMoreMoreMoreMoreMore"); - } + static List cols = Lists.newArrayList(); + static{ + cols.add("Chrome"); + cols.add("Chromeb"); + cols.add("Firefox"); + cols.add("InternetExplorer"); + cols.add("Opera"); + cols.add("Safari"); + cols.add("Z1stBrowserWithHuuuuuuuuuuuugeQualifier"); + cols.add("Z2ndBrowserWithEvenBiggerQualifierMoreMoreMoreMoreMore"); + cols.add("Z3rdBrowserWithEvenBiggerQualifierMoreMoreMoreMoreMore"); + cols.add("Z4thBrowserWithEvenBiggerQualifierMoreMoreMoreMoreMore"); + cols.add("Z5thBrowserWithEvenBiggerQualifierMoreMoreMoreMoreMore"); + cols.add("Z6thBrowserWithEvenBiggerQualifierMoreMoreMoreMoreMore"); + cols.add("Z7thBrowserWithEvenBiggerQualifierMoreMoreMoreMoreMore"); + cols.add("Z8thBrowserWithEvenBiggerQualifierMoreMoreMoreMoreMore"); + cols.add("Z9thBrowserWithEvenBiggerQualifierMoreMoreMoreMoreMore"); + } static long ts = 1234567890; diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataNub.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataNub.java index ad19cd40603..a818f5e4135 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataNub.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataNub.java @@ -29,31 +29,31 @@ import com.google.common.collect.Lists; public class TestRowDataNub extends BaseTestRowData{ - static byte[] - rowA = Bytes.toBytes("rowA"), - rowB = Bytes.toBytes("rowB"),//nub - rowBB = Bytes.toBytes("rowBB"), - cf = PrefixTreeTestConstants.TEST_CF, - cq0 = Bytes.toBytes("cq0"), - cq1 = Bytes.toBytes("cq1"), - v0 = Bytes.toBytes("v0"); + static byte[] + rowA = Bytes.toBytes("rowA"), + rowB = Bytes.toBytes("rowB"),//nub + rowBB = Bytes.toBytes("rowBB"), + cf = PrefixTreeTestConstants.TEST_CF, + cq0 = Bytes.toBytes("cq0"), + cq1 = Bytes.toBytes("cq1"), + v0 = Bytes.toBytes("v0"); - static long - ts = 55L; + static long + ts = 55L; - static List d = Lists.newArrayList(); - static{ - d.add(new KeyValue(rowA, cf, cq0, ts, v0)); - d.add(new KeyValue(rowA, cf, cq1, ts, v0)); - d.add(new KeyValue(rowB, cf, cq0, ts, v0)); - d.add(new KeyValue(rowB, cf, cq1, ts, v0)); - d.add(new KeyValue(rowBB, cf, cq0, ts, v0)); - d.add(new KeyValue(rowBB, cf, cq1, ts, v0)); - } + static List d = Lists.newArrayList(); + static{ + d.add(new KeyValue(rowA, cf, cq0, ts, v0)); + d.add(new KeyValue(rowA, cf, cq1, ts, v0)); + d.add(new KeyValue(rowB, cf, cq0, ts, v0)); + d.add(new KeyValue(rowB, cf, cq1, ts, v0)); + d.add(new KeyValue(rowBB, cf, cq0, ts, v0)); + d.add(new KeyValue(rowBB, cf, cq1, ts, v0)); + } - @Override - public List getInputs() { - return d; - } + @Override + public List getInputs() { + return d; + } } diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataQualifierByteOrdering.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataQualifierByteOrdering.java index a8c4646eb91..6d3918ff45c 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataQualifierByteOrdering.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataQualifierByteOrdering.java @@ -28,15 +28,15 @@ import com.google.common.collect.Lists; public class TestRowDataQualifierByteOrdering extends BaseTestRowData{ - static byte[] - Arow = Bytes.toBytes("Arow"), - Brow = Bytes.toBytes("Brow"), - Brow2 = Bytes.toBytes("Brow2"), - fam = Bytes.toBytes("HappyFam"), - cq0 = Bytes.toBytes("cq0"), - cq1 = Bytes.toBytes("cq1tail"),//make sure tail does not come back as liat - cq2 = Bytes.toBytes("cq2"), - v0 = Bytes.toBytes("v0"); + static byte[] + Arow = Bytes.toBytes("Arow"), + Brow = Bytes.toBytes("Brow"), + Brow2 = Bytes.toBytes("Brow2"), + fam = Bytes.toBytes("HappyFam"), + cq0 = Bytes.toBytes("cq0"), + cq1 = Bytes.toBytes("cq1tail"),//make sure tail does not come back as liat + cq2 = Bytes.toBytes("cq2"), + v0 = Bytes.toBytes("v0"); static long ts = 55L; diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSearcherRowMiss.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSearcherRowMiss.java index 5f319fc1a61..edea305825a 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSearcherRowMiss.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSearcherRowMiss.java @@ -55,13 +55,13 @@ public class TestRowDataSearcherRowMiss extends BaseTestRowData{ d.add(new KeyValue(B, cf, cq, ts, v)); } - @Override - public List getInputs() { - return d; - } + @Override + public List getInputs() { + return d; + } - @Override - public void individualSearcherAssertions(CellSearcher searcher) { + @Override + public void individualSearcherAssertions(CellSearcher searcher) { assertRowOffsetsCorrect(); searcher.resetToBeforeFirstEntry(); @@ -83,13 +83,13 @@ public class TestRowDataSearcherRowMiss extends BaseTestRowData{ testBetween2and3(searcher); } - /************ private methods, call from above *******************/ + /************ private methods, call from above *******************/ - private void assertRowOffsetsCorrect(){ - Assert.assertEquals(4, getRowStartIndexes().size()); - } + private void assertRowOffsetsCorrect(){ + Assert.assertEquals(4, getRowStartIndexes().size()); + } - private void testBetween1and2(CellSearcher searcher){ + private void testBetween1and2(CellSearcher searcher){ CellScannerPosition p;//reuse Cell betweenAAndAAA = new KeyValue(AA, cf, cq, ts-2, v); @@ -105,7 +105,7 @@ public class TestRowDataSearcherRowMiss extends BaseTestRowData{ p = searcher.positionAtOrAfter(betweenAAndAAA); Assert.assertEquals(CellScannerPosition.AFTER, p); Assert.assertTrue(CellComparator.equals(searcher.current(), d.get(2))); - } + } private void testBetween2and3(CellSearcher searcher){ CellScannerPosition p;//reuse diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSingleQualifier.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSingleQualifier.java index 9944057176c..35ba2c137ae 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSingleQualifier.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSingleQualifier.java @@ -29,12 +29,12 @@ import com.google.common.collect.Lists; public class TestRowDataSingleQualifier extends BaseTestRowData{ - static byte[] - rowA = Bytes.toBytes("rowA"), - rowB = Bytes.toBytes("rowB"), - cf = PrefixTreeTestConstants.TEST_CF, - cq0 = Bytes.toBytes("cq0"), - v0 = Bytes.toBytes("v0"); + static byte[] + rowA = Bytes.toBytes("rowA"), + rowB = Bytes.toBytes("rowB"), + cf = PrefixTreeTestConstants.TEST_CF, + cq0 = Bytes.toBytes("cq0"), + v0 = Bytes.toBytes("v0"); static long ts = 55L; diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataTrivial.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataTrivial.java index df785facefb..2a53976cb7a 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataTrivial.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataTrivial.java @@ -33,12 +33,12 @@ import com.google.common.collect.Lists; public class TestRowDataTrivial extends BaseTestRowData{ - static byte[] - rA = Bytes.toBytes("rA"), - rB = Bytes.toBytes("rB"),//turn "r" into a branch for the Searcher tests - cf = Bytes.toBytes("fam"), - cq0 = Bytes.toBytes("q0"), - v0 = Bytes.toBytes("v0"); + static byte[] + rA = Bytes.toBytes("rA"), + rB = Bytes.toBytes("rB"),//turn "r" into a branch for the Searcher tests + cf = Bytes.toBytes("fam"), + cq0 = Bytes.toBytes("q0"), + v0 = Bytes.toBytes("v0"); static long ts = 55L; diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataUrls.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataUrls.java index d0801c19f43..74ac85ad86a 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataUrls.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataUrls.java @@ -40,7 +40,7 @@ import com.google.common.collect.Lists; public class TestRowDataUrls extends BaseTestRowData{ static List rows; - static{ + static{ List rowStrings = new ArrayList(); rowStrings.add("com.edsBlog/directoryAa/pageAaa"); rowStrings.add("com.edsBlog/directoryAa/pageBbb"); diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataUrlsExample.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataUrlsExample.java index 543afb60eee..56d4e854457 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataUrlsExample.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataUrlsExample.java @@ -51,33 +51,33 @@ public class TestRowDataUrlsExample extends BaseTestRowData{ static String FAMILY = "hits"; static List BROWSERS = Lists.newArrayList( "Chrome", "IE8", "IE9beta");//, "Opera", "Safari"); - static long TIMESTAMP = 1234567890; + static long TIMESTAMP = 1234567890; - static int MAX_VALUE = 50; + static int MAX_VALUE = 50; - static List kvs = Lists.newArrayList(); - static{ - for(String rowKey : URLS){ - for(String qualifier : BROWSERS){ - KeyValue kv = new KeyValue( - Bytes.toBytes(rowKey), - Bytes.toBytes(FAMILY), - Bytes.toBytes(qualifier), - TIMESTAMP, - KeyValue.Type.Put, - Bytes.toBytes("VvvV")); - kvs.add(kv); - } - } - } + static List kvs = Lists.newArrayList(); + static{ + for(String rowKey : URLS){ + for(String qualifier : BROWSERS){ + KeyValue kv = new KeyValue( + Bytes.toBytes(rowKey), + Bytes.toBytes(FAMILY), + Bytes.toBytes(qualifier), + TIMESTAMP, + KeyValue.Type.Put, + Bytes.toBytes("VvvV")); + kvs.add(kv); + } + } + } - /** - * Used for generating docs. - */ - public static void main(String... args) throws IOException{ + /** + * Used for generating docs. + */ + public static void main(String... args) throws IOException{ System.out.println("-- inputs --"); System.out.println(KeyValueTestUtil.toStringWithPadding(kvs, true)); - ByteArrayOutputStream os = new ByteArrayOutputStream(1<<20); + ByteArrayOutputStream os = new ByteArrayOutputStream(1<<20); PrefixTreeEncoder encoder = new PrefixTreeEncoder(os, false); for(KeyValue kv : kvs){ @@ -116,11 +116,11 @@ public class TestRowDataUrlsExample extends BaseTestRowData{ System.out.println("-- concatenated values --"); System.out.println(Bytes.toStringBinary(encoder.getValueByteRange().deepCopyToNewArray())); - } + } - @Override - public List getInputs() { - return kvs; - } + @Override + public List getInputs() { + return kvs; + } } diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterVersionModel.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterVersionModel.java index 4321a8ef582..e332d49f1c5 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterVersionModel.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterVersionModel.java @@ -40,39 +40,39 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; @XmlRootElement(name="ClusterVersion") @InterfaceAudience.Private public class StorageClusterVersionModel implements Serializable { - private static final long serialVersionUID = 1L; + private static final long serialVersionUID = 1L; - private String version; + private String version; - /** - * @return the storage cluster version - */ - @XmlValue - public String getVersion() { - return version; - } + /** + * @return the storage cluster version + */ + @XmlValue + public String getVersion() { + return version; + } - /** - * @param version the storage cluster version - */ - public void setVersion(String version) { - this.version = version; - } + /** + * @param version the storage cluster version + */ + public void setVersion(String version) { + this.version = version; + } - /* (non-Javadoc) - * @see java.lang.Object#toString() - */ + /* (non-Javadoc) + * @see java.lang.Object#toString() + */ @JsonValue - @Override - public String toString() { - return version; - } + @Override + public String toString() { + return version; + } - //needed for jackson deserialization - private static StorageClusterVersionModel valueOf(String value) { - StorageClusterVersionModel versionModel - = new StorageClusterVersionModel(); - versionModel.setVersion(value); - return versionModel; - } + //needed for jackson deserialization + private static StorageClusterVersionModel valueOf(String value) { + StorageClusterVersionModel versionModel + = new StorageClusterVersionModel(); + versionModel.setVersion(value); + return versionModel; + } } diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/TableListModel.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/TableListModel.java index 596adac7c17..2ed4e80a8fa 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/TableListModel.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/TableListModel.java @@ -38,67 +38,67 @@ import org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableLis @InterfaceAudience.Private public class TableListModel implements Serializable, ProtobufMessageHandler { - private static final long serialVersionUID = 1L; + private static final long serialVersionUID = 1L; - private List tables = new ArrayList(); + private List tables = new ArrayList(); - /** - * Default constructor - */ - public TableListModel() {} + /** + * Default constructor + */ + public TableListModel() {} - /** - * Add the table name model to the list - * @param table the table model - */ - public void add(TableModel table) { - tables.add(table); - } - - /** - * @param index the index - * @return the table model - */ - public TableModel get(int index) { - return tables.get(index); - } + /** + * Add the table name model to the list + * @param table the table model + */ + public void add(TableModel table) { + tables.add(table); + } + + /** + * @param index the index + * @return the table model + */ + public TableModel get(int index) { + return tables.get(index); + } - /** - * @return the tables - */ - @XmlElementRef(name="table") - public List getTables() { - return tables; - } + /** + * @return the tables + */ + @XmlElementRef(name="table") + public List getTables() { + return tables; + } - /** - * @param tables the tables to set - */ - public void setTables(List tables) { - this.tables = tables; - } + /** + * @param tables the tables to set + */ + public void setTables(List tables) { + this.tables = tables; + } - /* (non-Javadoc) - * @see java.lang.Object#toString() - */ - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - for(TableModel aTable : tables) { - sb.append(aTable.toString()); - sb.append('\n'); - } - return sb.toString(); - } + /* (non-Javadoc) + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + for(TableModel aTable : tables) { + sb.append(aTable.toString()); + sb.append('\n'); + } + return sb.toString(); + } - @Override - public byte[] createProtobufOutput() { - TableList.Builder builder = TableList.newBuilder(); - for (TableModel aTable : tables) { - builder.addName(aTable.getName()); - } - return builder.build().toByteArray(); - } + @Override + public byte[] createProtobufOutput() { + TableList.Builder builder = TableList.newBuilder(); + for (TableModel aTable : tables) { + builder.addName(aTable.getName()); + } + return builder.build().toByteArray(); + } @Override public ProtobufMessageHandler getObjectFromMessage(byte[] message) diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/TableModel.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/TableModel.java index 0fb0d6e6059..3be0da98503 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/TableModel.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/TableModel.java @@ -41,44 +41,44 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; @InterfaceAudience.Private public class TableModel implements Serializable { - private static final long serialVersionUID = 1L; - - private String name; - - /** - * Default constructor - */ - public TableModel() {} + private static final long serialVersionUID = 1L; + + private String name; + + /** + * Default constructor + */ + public TableModel() {} - /** - * Constructor - * @param name - */ - public TableModel(String name) { - super(); - this.name = name; - } + /** + * Constructor + * @param name + */ + public TableModel(String name) { + super(); + this.name = name; + } - /** - * @return the name - */ - @XmlAttribute - public String getName() { - return name; - } + /** + * @return the name + */ + @XmlAttribute + public String getName() { + return name; + } - /** - * @param name the name to set - */ - public void setName(String name) { - this.name = name; - } + /** + * @param name the name to set + */ + public void setName(String name) { + this.name = name; + } - /* (non-Javadoc) - * @see java.lang.Object#toString() - */ - @Override - public String toString() { - return this.name; - } + /* (non-Javadoc) + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + return this.name; + } } diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/VersionModel.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/VersionModel.java index 093880340cb..8b08279e376 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/VersionModel.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/VersionModel.java @@ -48,9 +48,9 @@ import com.sun.jersey.spi.container.servlet.ServletContainer; @InterfaceAudience.Private public class VersionModel implements Serializable, ProtobufMessageHandler { - private static final long serialVersionUID = 1L; + private static final long serialVersionUID = 1L; - private String restVersion; + private String restVersion; private String jvmVersion; private String osVersion; private String serverVersion; @@ -65,30 +65,30 @@ public class VersionModel implements Serializable, ProtobufMessageHandler { * Constructor * @param context the servlet context */ - public VersionModel(ServletContext context) { - restVersion = RESTServlet.VERSION_STRING; - jvmVersion = System.getProperty("java.vm.vendor") + ' ' + + public VersionModel(ServletContext context) { + restVersion = RESTServlet.VERSION_STRING; + jvmVersion = System.getProperty("java.vm.vendor") + ' ' + System.getProperty("java.version") + '-' + System.getProperty("java.vm.version"); - osVersion = System.getProperty("os.name") + ' ' + + osVersion = System.getProperty("os.name") + ' ' + System.getProperty("os.version") + ' ' + System.getProperty("os.arch"); - serverVersion = context.getServerInfo(); - jerseyVersion = ServletContainer.class.getPackage() + serverVersion = context.getServerInfo(); + jerseyVersion = ServletContainer.class.getPackage() .getImplementationVersion(); - } + } - /** - * @return the REST gateway version - */ - @XmlAttribute(name="REST") - public String getRESTVersion() { + /** + * @return the REST gateway version + */ + @XmlAttribute(name="REST") + public String getRESTVersion() { return restVersion; } - /** - * @return the JVM vendor and version - */ + /** + * @return the JVM vendor and version + */ @XmlAttribute(name="JVM") public String getJVMVersion() { return jvmVersion; @@ -154,34 +154,34 @@ public class VersionModel implements Serializable, ProtobufMessageHandler { } /* (non-Javadoc) - * @see java.lang.Object#toString() - */ - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append("rest "); - sb.append(restVersion); - sb.append(" [JVM: "); - sb.append(jvmVersion); - sb.append("] [OS: "); - sb.append(osVersion); - sb.append("] [Server: "); - sb.append(serverVersion); - sb.append("] [Jersey: "); + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("rest "); + sb.append(restVersion); + sb.append(" [JVM: "); + sb.append(jvmVersion); + sb.append("] [OS: "); + sb.append(osVersion); + sb.append("] [Server: "); + sb.append(serverVersion); + sb.append("] [Jersey: "); sb.append(jerseyVersion); - sb.append("]\n"); - return sb.toString(); - } + sb.append("]\n"); + return sb.toString(); + } - @Override + @Override public byte[] createProtobufOutput() { - Version.Builder builder = Version.newBuilder(); - builder.setRestVersion(restVersion); - builder.setJvmVersion(jvmVersion); - builder.setOsVersion(osVersion); - builder.setServerVersion(serverVersion); - builder.setJerseyVersion(jerseyVersion); - return builder.build().toByteArray(); + Version.Builder builder = Version.newBuilder(); + builder.setRestVersion(restVersion); + builder.setJvmVersion(jvmVersion); + builder.setOsVersion(osVersion); + builder.setServerVersion(serverVersion); + builder.setJerseyVersion(jerseyVersion); + return builder.build().toByteArray(); } @Override diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/JAXBContextResolver.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/JAXBContextResolver.java index 3b8b8ca6a07..216f830ba00 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/JAXBContextResolver.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/JAXBContextResolver.java @@ -55,12 +55,12 @@ import com.sun.jersey.api.json.JSONJAXBContext; @InterfaceAudience.Private public class JAXBContextResolver implements ContextResolver { - private final JAXBContext context; + private final JAXBContext context; - private final Set> types; + private final Set> types; - private final Class[] cTypes = { - CellModel.class, + private final Class[] cTypes = { + CellModel.class, CellSetModel.class, ColumnSchemaModel.class, RowModel.class, @@ -68,22 +68,22 @@ public class JAXBContextResolver implements ContextResolver { StorageClusterStatusModel.class, StorageClusterVersionModel.class, TableInfoModel.class, - TableListModel.class, - TableModel.class, - TableRegionModel.class, - TableSchemaModel.class, - VersionModel.class - }; + TableListModel.class, + TableModel.class, + TableRegionModel.class, + TableSchemaModel.class, + VersionModel.class + }; - @SuppressWarnings("unchecked") + @SuppressWarnings("unchecked") public JAXBContextResolver() throws Exception { - this.types = new HashSet(Arrays.asList(cTypes)); - this.context = new JSONJAXBContext(JSONConfiguration.natural().build(), - cTypes); - } + this.types = new HashSet(Arrays.asList(cTypes)); + this.context = new JSONJAXBContext(JSONConfiguration.natural().build(), + cTypes); + } - @Override - public JAXBContext getContext(Class objectType) { - return (types.contains(objectType)) ? context : null; + @Override + public JAXBContext getContext(Class objectType) { + return (types.contains(objectType)) ? context : null; } } diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/producer/PlainTextMessageBodyProducer.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/producer/PlainTextMessageBodyProducer.java index 523692a94c0..fca45447afa 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/producer/PlainTextMessageBodyProducer.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/producer/PlainTextMessageBodyProducer.java @@ -54,21 +54,21 @@ public class PlainTextMessageBodyProducer return true; } - @Override - public long getSize(Object object, Class type, Type genericType, - Annotation[] annotations, MediaType mediaType) { + @Override + public long getSize(Object object, Class type, Type genericType, + Annotation[] annotations, MediaType mediaType) { byte[] bytes = object.toString().getBytes(); - buffer.set(bytes); + buffer.set(bytes); return bytes.length; - } + } - @Override - public void writeTo(Object object, Class type, Type genericType, - Annotation[] annotations, MediaType mediaType, - MultivaluedMap httpHeaders, OutputStream outStream) - throws IOException, WebApplicationException { + @Override + public void writeTo(Object object, Class type, Type genericType, + Annotation[] annotations, MediaType mediaType, + MultivaluedMap httpHeaders, OutputStream outStream) + throws IOException, WebApplicationException { byte[] bytes = buffer.get(); - outStream.write(bytes); + outStream.write(bytes); buffer.remove(); - } + } } diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/producer/ProtobufMessageBodyProducer.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/producer/ProtobufMessageBodyProducer.java index 6d737b52599..12171a4270e 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/producer/ProtobufMessageBodyProducer.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/producer/ProtobufMessageBodyProducer.java @@ -50,32 +50,32 @@ public class ProtobufMessageBodyProducer private ThreadLocal buffer = new ThreadLocal(); - @Override - public boolean isWriteable(Class type, Type genericType, - Annotation[] annotations, MediaType mediaType) { - return ProtobufMessageHandler.class.isAssignableFrom(type); + @Override + public boolean isWriteable(Class type, Type genericType, + Annotation[] annotations, MediaType mediaType) { + return ProtobufMessageHandler.class.isAssignableFrom(type); } - @Override - public long getSize(ProtobufMessageHandler m, Class type, Type genericType, - Annotation[] annotations, MediaType mediaType) { - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - try { - baos.write(m.createProtobufOutput()); - } catch (IOException e) { - return -1; - } - byte[] bytes = baos.toByteArray(); - buffer.set(bytes); - return bytes.length; - } + @Override + public long getSize(ProtobufMessageHandler m, Class type, Type genericType, + Annotation[] annotations, MediaType mediaType) { + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + try { + baos.write(m.createProtobufOutput()); + } catch (IOException e) { + return -1; + } + byte[] bytes = baos.toByteArray(); + buffer.set(bytes); + return bytes.length; + } - public void writeTo(ProtobufMessageHandler m, Class type, Type genericType, - Annotation[] annotations, MediaType mediaType, - MultivaluedMap httpHeaders, OutputStream entityStream) - throws IOException, WebApplicationException { + public void writeTo(ProtobufMessageHandler m, Class type, Type genericType, + Annotation[] annotations, MediaType mediaType, + MultivaluedMap httpHeaders, OutputStream entityStream) + throws IOException, WebApplicationException { byte[] bytes = buffer.get(); - entityStream.write(bytes); + entityStream.write(bytes); buffer.remove(); - } + } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseRPCErrorHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseRPCErrorHandler.java index 6a21a699323..7c4ed01a8e6 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseRPCErrorHandler.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseRPCErrorHandler.java @@ -26,10 +26,10 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; */ @InterfaceAudience.Private public interface HBaseRPCErrorHandler { - /** - * Take actions on the event of an OutOfMemoryError. - * @param e the throwable - * @return if the server should be shut down - */ + /** + * Take actions on the event of an OutOfMemoryError. + * @param e the throwable + * @return if the server should be shut down + */ boolean checkOOME(final Throwable e) ; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/RegionPlacementMaintainer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/RegionPlacementMaintainer.java index e7d52a29afb..196320d3cde 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/RegionPlacementMaintainer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/RegionPlacementMaintainer.java @@ -976,7 +976,7 @@ public class RegionPlacementMaintainer { opt.addOption("munkres", false, "use munkres to place secondaries and tertiaries"); opt.addOption("ld", "locality-dispersion", false, "print locality and dispersion " + - "information for current plan"); + "information for current plan"); try { // Set the log4j Logger.getLogger("org.apache.zookeeper").setLevel(Level.ERROR); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SnapshotOfRegionAssignmentFromMeta.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SnapshotOfRegionAssignmentFromMeta.java index d6f1b67e902..b03611c5301 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SnapshotOfRegionAssignmentFromMeta.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SnapshotOfRegionAssignmentFromMeta.java @@ -94,7 +94,7 @@ public class SnapshotOfRegionAssignmentFromMeta { */ public void initialize() throws IOException { LOG.info("Start to scan the hbase:meta for the current region assignment " + - "snappshot"); + "snappshot"); // TODO: at some point this code could live in the MetaTableAccessor Visitor v = new Visitor() { @Override diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java index 0d94bc96324..294131e24f5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java @@ -183,9 +183,9 @@ public abstract class CleanerChore extends Chore // if the directory still has children, we can't delete it, so we are done if (!allChildrenDeleted) return false; } catch (IOException e) { - e = e instanceof RemoteException ? - ((RemoteException)e).unwrapRemoteException() : e; - LOG.warn("Error while listing directory: " + dir, e); + e = e instanceof RemoteException ? + ((RemoteException)e).unwrapRemoteException() : e; + LOG.warn("Error while listing directory: " + dir, e); // couldn't list directory, so don't try to delete, and don't return success return false; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.java index 4d4f1467086..1d5970152a5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.java @@ -390,9 +390,9 @@ class MemStoreFlusher implements FlushRequester { this.server.compactSplitThread.requestSystemCompaction( region, Thread.currentThread().getName()); } catch (IOException e) { - e = e instanceof RemoteException ? - ((RemoteException)e).unwrapRemoteException() : e; - LOG.error( + e = e instanceof RemoteException ? + ((RemoteException)e).unwrapRemoteException() : e; + LOG.error( "Cache flush failed for region " + Bytes.toStringBinary(region.getRegionName()), e); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitLogWorker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitLogWorker.java index a182aa1a8d4..7590f3c5093 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitLogWorker.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitLogWorker.java @@ -115,7 +115,7 @@ public class SplitLogWorker implements Runnable { || cause instanceof ConnectException || cause instanceof SocketTimeoutException)) { LOG.warn("log replaying of " + filename + " can't connect to the target regionserver, " - + "resigning", e); + + "resigning", e); return Status.RESIGNED; } else if (cause instanceof InterruptedException) { LOG.warn("log splitting of " + filename + " interrupted, resigning", e); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/WALSplitterHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/WALSplitterHandler.java index 6ab6b75f5eb..9a03192b2c9 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/WALSplitterHandler.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/WALSplitterHandler.java @@ -54,7 +54,7 @@ public class WALSplitterHandler extends EventHandler { public WALSplitterHandler(final Server server, SplitLogWorkerCoordination coordination, SplitLogWorkerCoordination.SplitTaskDetails splitDetails, CancelableProgressable reporter, AtomicInteger inProgressTasks, TaskExecutor splitTaskExecutor, RecoveryMode mode) { - super(server, EventType.RS_LOG_REPLAY); + super(server, EventType.RS_LOG_REPLAY); this.splitTaskDetails = splitDetails; this.coordination = coordination; this.reporter = reporter; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java index f08800b6d77..998f1e2c313 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java @@ -367,8 +367,8 @@ public class HBaseFsck extends Configured { if (hbckOutFd == null) { setRetCode(-1); LOG.error("Another instance of hbck is running, exiting this instance.[If you are sure" + - " no other instance is running, delete the lock file " + - HBCK_LOCK_PATH + " and rerun the tool]"); + " no other instance is running, delete the lock file " + + HBCK_LOCK_PATH + " and rerun the tool]"); throw new IOException("Duplicate hbck - Abort"); } @@ -1640,8 +1640,8 @@ public class HBaseFsck extends Configured { */ private void checkAndFixConsistency() throws IOException, KeeperException, InterruptedException { - // Divide the checks in two phases. One for default/primary replicas and another - // for the non-primary ones. Keeps code cleaner this way. + // Divide the checks in two phases. One for default/primary replicas and another + // for the non-primary ones. Keeps code cleaner this way. for (java.util.Map.Entry e: regionInfoMap.entrySet()) { if (e.getValue().getReplicaId() == HRegionInfo.DEFAULT_REPLICA_ID) { checkRegionConsistency(e.getKey(), e.getValue()); @@ -1890,8 +1890,8 @@ public class HBaseFsck extends Configured { private void checkRegionConsistency(final String key, final HbckInfo hbi) throws IOException, KeeperException, InterruptedException { - if (hbi.isSkipChecks()) return; - String descriptiveName = hbi.toString(); + if (hbi.isSkipChecks()) return; + String descriptiveName = hbi.toString(); boolean inMeta = hbi.metaEntry != null; // In case not checking HDFS, assume the region is on HDFS boolean inHdfs = !shouldCheckHdfs() || hbi.getHdfsRegionDir() != null; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/RegionServerTracker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/RegionServerTracker.java index 025d98e5eeb..1eab2d235d2 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/RegionServerTracker.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/RegionServerTracker.java @@ -50,7 +50,7 @@ import org.apache.zookeeper.KeeperException; public class RegionServerTracker extends ZooKeeperListener { private static final Log LOG = LogFactory.getLog(RegionServerTracker.class); private NavigableMap regionServers = - new TreeMap(); + new TreeMap(); private ServerManager serverManager; private Server server; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java index c3ebf193380..01744dd18ae 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java @@ -1032,8 +1032,8 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility { * @throws IOException */ public Path getDefaultRootDirPath() throws IOException { - FileSystem fs = FileSystem.get(this.conf); - return new Path(fs.makeQualified(fs.getHomeDirectory()),"hbase"); + FileSystem fs = FileSystem.get(this.conf); + return new Path(fs.makeQualified(fs.getHomeDirectory()),"hbase"); } /** diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBigDecimalColumnInterpreter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBigDecimalColumnInterpreter.java index 43b5ea97cce..7e2d96e44ab 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBigDecimalColumnInterpreter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBigDecimalColumnInterpreter.java @@ -135,7 +135,7 @@ public class TestBigDecimalColumnInterpreter { Scan scan = new Scan(); scan.addColumn(TEST_FAMILY, TEST_QUALIFIER); final ColumnInterpreter ci = - new BigDecimalColumnInterpreter(); + new BigDecimalColumnInterpreter(); BigDecimal median = aClient.median(TEST_TABLE, ci, scan); assertEquals(new BigDecimal("8.00"), median); } @@ -154,7 +154,7 @@ public class TestBigDecimalColumnInterpreter { Scan scan = new Scan(); scan.addColumn(TEST_FAMILY, TEST_QUALIFIER); final ColumnInterpreter ci = - new BigDecimalColumnInterpreter(); + new BigDecimalColumnInterpreter(); BigDecimal maximum = aClient.max(TEST_TABLE, ci, scan); assertEquals(new BigDecimal("19.00"), maximum); } @@ -203,7 +203,7 @@ public class TestBigDecimalColumnInterpreter { public void testMaxWithValidRangeWithNullCF() { AggregationClient aClient = new AggregationClient(conf); final ColumnInterpreter ci = - new BigDecimalColumnInterpreter(); + new BigDecimalColumnInterpreter(); Scan scan = new Scan(); BigDecimal max = null; try { @@ -219,7 +219,7 @@ public class TestBigDecimalColumnInterpreter { public void testMaxWithInvalidRange() { AggregationClient aClient = new AggregationClient(conf); final ColumnInterpreter ci = - new BigDecimalColumnInterpreter(); + new BigDecimalColumnInterpreter(); Scan scan = new Scan(); scan.setStartRow(ROWS[4]); scan.setStopRow(ROWS[2]); @@ -244,7 +244,7 @@ public class TestBigDecimalColumnInterpreter { try { AggregationClient aClient = new AggregationClient(conf); final ColumnInterpreter ci = - new BigDecimalColumnInterpreter(); + new BigDecimalColumnInterpreter(); max = aClient.max(TEST_TABLE, ci, scan); } catch (Exception e) { max = BigDecimal.ZERO; @@ -261,7 +261,7 @@ public class TestBigDecimalColumnInterpreter { Filter f = new PrefixFilter(Bytes.toBytes("foo:bar")); scan.setFilter(f); final ColumnInterpreter ci = - new BigDecimalColumnInterpreter(); + new BigDecimalColumnInterpreter(); max = aClient.max(TEST_TABLE, ci, scan); assertEquals(null, max); } @@ -281,7 +281,7 @@ public class TestBigDecimalColumnInterpreter { scan.setStartRow(HConstants.EMPTY_START_ROW); scan.setStopRow(HConstants.EMPTY_END_ROW); final ColumnInterpreter ci = - new BigDecimalColumnInterpreter(); + new BigDecimalColumnInterpreter(); BigDecimal min = aClient.min(TEST_TABLE, ci, scan); assertEquals(new BigDecimal("0.00"), min); } @@ -297,7 +297,7 @@ public class TestBigDecimalColumnInterpreter { scan.setStartRow(ROWS[5]); scan.setStopRow(ROWS[15]); final ColumnInterpreter ci = - new BigDecimalColumnInterpreter(); + new BigDecimalColumnInterpreter(); BigDecimal min = aClient.min(TEST_TABLE, ci, scan); assertEquals(new BigDecimal("5.00"), min); } @@ -310,7 +310,7 @@ public class TestBigDecimalColumnInterpreter { scan.setStartRow(HConstants.EMPTY_START_ROW); scan.setStopRow(HConstants.EMPTY_END_ROW); final ColumnInterpreter ci = - new BigDecimalColumnInterpreter(); + new BigDecimalColumnInterpreter(); BigDecimal min = aClient.min(TEST_TABLE, ci, scan); assertEquals(new BigDecimal("0.00"), min); } @@ -323,7 +323,7 @@ public class TestBigDecimalColumnInterpreter { scan.setStartRow(ROWS[6]); scan.setStopRow(ROWS[7]); final ColumnInterpreter ci = - new BigDecimalColumnInterpreter(); + new BigDecimalColumnInterpreter(); BigDecimal min = aClient.min(TEST_TABLE, ci, scan); assertEquals(new BigDecimal("0.60"), min); } @@ -335,7 +335,7 @@ public class TestBigDecimalColumnInterpreter { scan.setStartRow(ROWS[5]); scan.setStopRow(ROWS[15]); final ColumnInterpreter ci = - new BigDecimalColumnInterpreter(); + new BigDecimalColumnInterpreter(); BigDecimal min = null; try { min = aClient.min(TEST_TABLE, ci, scan); @@ -354,7 +354,7 @@ public class TestBigDecimalColumnInterpreter { scan.setStartRow(ROWS[4]); scan.setStopRow(ROWS[2]); final ColumnInterpreter ci = - new BigDecimalColumnInterpreter(); + new BigDecimalColumnInterpreter(); try { min = aClient.min(TEST_TABLE, ci, scan); } catch (Throwable e) { @@ -370,7 +370,7 @@ public class TestBigDecimalColumnInterpreter { scan.setStartRow(ROWS[6]); scan.setStopRow(ROWS[6]); final ColumnInterpreter ci = - new BigDecimalColumnInterpreter(); + new BigDecimalColumnInterpreter(); BigDecimal min = null; try { min = aClient.min(TEST_TABLE, ci, scan); @@ -387,7 +387,7 @@ public class TestBigDecimalColumnInterpreter { Filter f = new PrefixFilter(Bytes.toBytes("foo:bar")); scan.setFilter(f); final ColumnInterpreter ci = - new BigDecimalColumnInterpreter(); + new BigDecimalColumnInterpreter(); BigDecimal min = null; min = aClient.min(TEST_TABLE, ci, scan); assertEquals(null, min); @@ -405,7 +405,7 @@ public class TestBigDecimalColumnInterpreter { Scan scan = new Scan(); scan.addColumn(TEST_FAMILY, TEST_QUALIFIER); final ColumnInterpreter ci = - new BigDecimalColumnInterpreter(); + new BigDecimalColumnInterpreter(); BigDecimal sum = aClient.sum(TEST_TABLE, ci, scan); assertEquals(new BigDecimal("190.00"), sum); } @@ -421,7 +421,7 @@ public class TestBigDecimalColumnInterpreter { scan.setStartRow(ROWS[5]); scan.setStopRow(ROWS[15]); final ColumnInterpreter ci = - new BigDecimalColumnInterpreter(); + new BigDecimalColumnInterpreter(); BigDecimal sum = aClient.sum(TEST_TABLE, ci, scan); assertEquals(new BigDecimal("95.00"), sum); } @@ -432,7 +432,7 @@ public class TestBigDecimalColumnInterpreter { Scan scan = new Scan(); scan.addFamily(TEST_FAMILY); final ColumnInterpreter ci = - new BigDecimalColumnInterpreter(); + new BigDecimalColumnInterpreter(); BigDecimal sum = aClient.sum(TEST_TABLE, ci, scan); assertEquals(new BigDecimal("209.00"), sum); // 190 + 19 } @@ -445,7 +445,7 @@ public class TestBigDecimalColumnInterpreter { scan.setStartRow(ROWS[6]); scan.setStopRow(ROWS[7]); final ColumnInterpreter ci = - new BigDecimalColumnInterpreter(); + new BigDecimalColumnInterpreter(); BigDecimal sum = aClient.sum(TEST_TABLE, ci, scan); assertEquals(new BigDecimal("6.60"), sum); // 6 + 60 } @@ -457,7 +457,7 @@ public class TestBigDecimalColumnInterpreter { scan.setStartRow(ROWS[6]); scan.setStopRow(ROWS[7]); final ColumnInterpreter ci = - new BigDecimalColumnInterpreter(); + new BigDecimalColumnInterpreter(); BigDecimal sum = null; try { sum = aClient.sum(TEST_TABLE, ci, scan); @@ -475,7 +475,7 @@ public class TestBigDecimalColumnInterpreter { scan.setStartRow(ROWS[6]); scan.setStopRow(ROWS[2]); final ColumnInterpreter ci = - new BigDecimalColumnInterpreter(); + new BigDecimalColumnInterpreter(); BigDecimal sum = null; try { sum = aClient.sum(TEST_TABLE, ci, scan); @@ -492,7 +492,7 @@ public class TestBigDecimalColumnInterpreter { scan.addFamily(TEST_FAMILY); scan.setFilter(f); final ColumnInterpreter ci = - new BigDecimalColumnInterpreter(); + new BigDecimalColumnInterpreter(); BigDecimal sum = null; sum = aClient.sum(TEST_TABLE, ci, scan); assertEquals(null, sum); @@ -510,7 +510,7 @@ public class TestBigDecimalColumnInterpreter { Scan scan = new Scan(); scan.addColumn(TEST_FAMILY, TEST_QUALIFIER); final ColumnInterpreter ci = - new BigDecimalColumnInterpreter(); + new BigDecimalColumnInterpreter(); double avg = aClient.avg(TEST_TABLE, ci, scan); assertEquals(9.5, avg, 0); } @@ -526,7 +526,7 @@ public class TestBigDecimalColumnInterpreter { scan.setStartRow(ROWS[5]); scan.setStopRow(ROWS[15]); final ColumnInterpreter ci = - new BigDecimalColumnInterpreter(); + new BigDecimalColumnInterpreter(); double avg = aClient.avg(TEST_TABLE, ci, scan); assertEquals(9.5, avg, 0); } @@ -537,7 +537,7 @@ public class TestBigDecimalColumnInterpreter { Scan scan = new Scan(); scan.addFamily(TEST_FAMILY); final ColumnInterpreter ci = - new BigDecimalColumnInterpreter(); + new BigDecimalColumnInterpreter(); double avg = aClient.avg(TEST_TABLE, ci, scan); assertEquals(10.45, avg, 0.01); } @@ -550,7 +550,7 @@ public class TestBigDecimalColumnInterpreter { scan.setStartRow(ROWS[6]); scan.setStopRow(ROWS[7]); final ColumnInterpreter ci = - new BigDecimalColumnInterpreter(); + new BigDecimalColumnInterpreter(); double avg = aClient.avg(TEST_TABLE, ci, scan); assertEquals(6 + 0.60, avg, 0); } @@ -560,7 +560,7 @@ public class TestBigDecimalColumnInterpreter { AggregationClient aClient = new AggregationClient(conf); Scan scan = new Scan(); final ColumnInterpreter ci = - new BigDecimalColumnInterpreter(); + new BigDecimalColumnInterpreter(); Double avg = null; try { avg = aClient.avg(TEST_TABLE, ci, scan); @@ -578,7 +578,7 @@ public class TestBigDecimalColumnInterpreter { scan.setStartRow(ROWS[5]); scan.setStopRow(ROWS[1]); final ColumnInterpreter ci = - new BigDecimalColumnInterpreter(); + new BigDecimalColumnInterpreter(); Double avg = null; try { avg = aClient.avg(TEST_TABLE, ci, scan); @@ -595,7 +595,7 @@ public class TestBigDecimalColumnInterpreter { Filter f = new PrefixFilter(Bytes.toBytes("foo:bar")); scan.setFilter(f); final ColumnInterpreter ci = - new BigDecimalColumnInterpreter(); + new BigDecimalColumnInterpreter(); Double avg = null; avg = aClient.avg(TEST_TABLE, ci, scan); assertEquals(Double.NaN, avg, 0); @@ -613,7 +613,7 @@ public class TestBigDecimalColumnInterpreter { Scan scan = new Scan(); scan.addColumn(TEST_FAMILY, TEST_QUALIFIER); final ColumnInterpreter ci = - new BigDecimalColumnInterpreter(); + new BigDecimalColumnInterpreter(); double std = aClient.std(TEST_TABLE, ci, scan); assertEquals(5.766, std, 0.05d); } @@ -630,7 +630,7 @@ public class TestBigDecimalColumnInterpreter { scan.setStartRow(ROWS[5]); scan.setStopRow(ROWS[15]); final ColumnInterpreter ci = - new BigDecimalColumnInterpreter(); + new BigDecimalColumnInterpreter(); double std = aClient.std(TEST_TABLE, ci, scan); assertEquals(2.87, std, 0.05d); } @@ -645,7 +645,7 @@ public class TestBigDecimalColumnInterpreter { Scan scan = new Scan(); scan.addFamily(TEST_FAMILY); final ColumnInterpreter ci = - new BigDecimalColumnInterpreter(); + new BigDecimalColumnInterpreter(); double std = aClient.std(TEST_TABLE, ci, scan); assertEquals(6.342, std, 0.05d); } @@ -658,7 +658,7 @@ public class TestBigDecimalColumnInterpreter { scan.setStartRow(ROWS[6]); scan.setStopRow(ROWS[7]); final ColumnInterpreter ci = - new BigDecimalColumnInterpreter(); + new BigDecimalColumnInterpreter(); double std = aClient.std(TEST_TABLE, ci, scan); System.out.println("std is:" + std); assertEquals(0, std, 0.05d); @@ -671,7 +671,7 @@ public class TestBigDecimalColumnInterpreter { scan.setStartRow(ROWS[6]); scan.setStopRow(ROWS[17]); final ColumnInterpreter ci = - new BigDecimalColumnInterpreter(); + new BigDecimalColumnInterpreter(); Double std = null; try { std = aClient.std(TEST_TABLE, ci, scan); @@ -689,7 +689,7 @@ public class TestBigDecimalColumnInterpreter { scan.setStartRow(ROWS[6]); scan.setStopRow(ROWS[1]); final ColumnInterpreter ci = - new BigDecimalColumnInterpreter(); + new BigDecimalColumnInterpreter(); Double std = null; try { std = aClient.std(TEST_TABLE, ci, scan); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java index 29070886524..06e0260d7ff 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java @@ -50,18 +50,18 @@ import org.junit.experimental.categories.Category; public class TestDependentColumnFilter { private final Log LOG = LogFactory.getLog(this.getClass()); private static final byte[][] ROWS = { - Bytes.toBytes("test1"),Bytes.toBytes("test2") + Bytes.toBytes("test1"),Bytes.toBytes("test2") }; private static final byte[][] FAMILIES = { - Bytes.toBytes("familyOne"),Bytes.toBytes("familyTwo") + Bytes.toBytes("familyOne"),Bytes.toBytes("familyTwo") }; private static final long STAMP_BASE = System.currentTimeMillis(); private static final long[] STAMPS = { - STAMP_BASE-100, STAMP_BASE-200, STAMP_BASE-300 + STAMP_BASE-100, STAMP_BASE-200, STAMP_BASE-300 }; private static final byte[] QUALIFIER = Bytes.toBytes("qualifier"); private static final byte[][] BAD_VALS = { - Bytes.toBytes("bad1"), Bytes.toBytes("bad2"), Bytes.toBytes("bad3") + Bytes.toBytes("bad1"), Bytes.toBytes("bad2"), Bytes.toBytes("bad3") }; private static final byte[] MATCH_VAL = Bytes.toBytes("match"); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); @@ -119,14 +119,14 @@ public class TestDependentColumnFilter { } private List makeTestVals() { - List testVals = new ArrayList(); - testVals.add(new KeyValue(ROWS[0], FAMILIES[0], QUALIFIER, STAMPS[0], BAD_VALS[0])); - testVals.add(new KeyValue(ROWS[0], FAMILIES[0], QUALIFIER, STAMPS[1], BAD_VALS[1])); - testVals.add(new KeyValue(ROWS[0], FAMILIES[1], QUALIFIER, STAMPS[1], BAD_VALS[2])); - testVals.add(new KeyValue(ROWS[0], FAMILIES[1], QUALIFIER, STAMPS[0], MATCH_VAL)); - testVals.add(new KeyValue(ROWS[0], FAMILIES[1], QUALIFIER, STAMPS[2], BAD_VALS[2])); + List testVals = new ArrayList(); + testVals.add(new KeyValue(ROWS[0], FAMILIES[0], QUALIFIER, STAMPS[0], BAD_VALS[0])); + testVals.add(new KeyValue(ROWS[0], FAMILIES[0], QUALIFIER, STAMPS[1], BAD_VALS[1])); + testVals.add(new KeyValue(ROWS[0], FAMILIES[1], QUALIFIER, STAMPS[1], BAD_VALS[2])); + testVals.add(new KeyValue(ROWS[0], FAMILIES[1], QUALIFIER, STAMPS[0], MATCH_VAL)); + testVals.add(new KeyValue(ROWS[0], FAMILIES[1], QUALIFIER, STAMPS[2], BAD_VALS[2])); - return testVals; + return testVals; } /** diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestParseFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestParseFilter.java index 132cf9eb1fb..4b2df33b0ad 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestParseFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestParseFilter.java @@ -615,7 +615,7 @@ public class TestParseFilter { @Test public void testUnescapedQuote3 () throws IOException { - String filterString = " InclusiveStopFilter ('''')"; + String filterString = " InclusiveStopFilter ('''')"; InclusiveStopFilter inclusiveStopFilter = doTestFilter(filterString, InclusiveStopFilter.class); byte [] stopRowKey = inclusiveStopFilter.getStopRowKey(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFilePerformance.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFilePerformance.java index ff122340b5b..8f62639e1c8 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFilePerformance.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFilePerformance.java @@ -175,8 +175,8 @@ public class TestHFilePerformance extends AbstractHBaseTool { if ("HFile".equals(fileType)){ HFileContextBuilder builder = new HFileContextBuilder() - .withCompression(AbstractHFileWriter.compressionByName(codecName)) - .withBlockSize(minBlockSize); + .withCompression(AbstractHFileWriter.compressionByName(codecName)) + .withBlockSize(minBlockSize); if (cipherName != "none") { byte[] cipherKey = new byte[AES.KEY_LENGTH]; new SecureRandom().nextBytes(cipherKey); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestClockSkewDetection.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestClockSkewDetection.java index ced0c4ca58c..dd53993c5dd 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestClockSkewDetection.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestClockSkewDetection.java @@ -104,7 +104,7 @@ public class TestClockSkewDetection { long warningSkew = c.getLong("hbase.master.warningclockskew", 1000); try { - //Master Time > Region Server Time + //Master Time > Region Server Time LOG.debug("Test: Master Time > Region Server Time"); LOG.debug("regionServerStartup 2"); InetAddress ia2 = InetAddress.getLocalHost(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMajorCompaction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMajorCompaction.java index 03acfdc73e8..df43bd0227f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMajorCompaction.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMajorCompaction.java @@ -398,7 +398,7 @@ public class TestMajorCompaction { private void createSmallerStoreFile(final HRegion region) throws IOException { HRegionIncommon loader = new HRegionIncommon(region); HBaseTestCase.addContent(loader, Bytes.toString(COLUMN_FAMILY), ("" + - "bbb").getBytes(), null); + "bbb").getBytes(), null); loader.flushcache(); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTags.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTags.java index 1c8f405cf6a..b9829772fec 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTags.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTags.java @@ -287,11 +287,11 @@ public class TestTags { put1.add(fam, qual, HConstants.LATEST_TIMESTAMP, value1); table.put(put1); admin.flush(tableName); - // We are lacking an API for confirming flush request compaction. - // Just sleep for a short time. We won't be able to confirm flush - // completion but the test won't hang now or in the future if - // default compaction policy causes compaction between flush and - // when we go to confirm it. + // We are lacking an API for confirming flush request compaction. + // Just sleep for a short time. We won't be able to confirm flush + // completion but the test won't hang now or in the future if + // default compaction policy causes compaction between flush and + // when we go to confirm it. Thread.sleep(1000); put1 = new Put(row2); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java index 2f8b0ae1cb9..43770828184 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java @@ -511,7 +511,7 @@ public class TestReplicationSmallTests extends TestReplicationBase { */ @Test(timeout = 300000) public void testVerifyListReplicatedTable() throws Exception { - LOG.info("testVerifyListReplicatedTable"); + LOG.info("testVerifyListReplicatedTable"); final String tName = "VerifyListReplicated_"; final String colFam = "cf1"; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java index a3a78000d96..45ddddb098b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java @@ -121,7 +121,7 @@ public class LoadTestTool extends AbstractHBaseTool { public static final String OPT_INMEMORY = "in_memory"; public static final String OPT_USAGE_IN_MEMORY = "Tries to keep the HFiles of the CF " + - "inmemory as far as possible. Not guaranteed that reads are always served from inmemory"; + "inmemory as far as possible. Not guaranteed that reads are always served from inmemory"; public static final String OPT_GENERATOR = "generator"; public static final String OPT_GENERATOR_USAGE = "The class which generates load for the tool." diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReader.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReader.java index c47c3287d86..ca06e976726 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReader.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReader.java @@ -379,9 +379,9 @@ public class MultiThreadedReader extends MultiThreadedAction numKeysVerified.incrementAndGet(); } } else { - HRegionLocation hloc = connection.getRegionLocation(tableName, - get.getRow(), false); - String rowKey = Bytes.toString(get.getRow()); + HRegionLocation hloc = connection.getRegionLocation(tableName, + get.getRow(), false); + String rowKey = Bytes.toString(get.getRow()); LOG.info("Key = " + rowKey + ", Region location: " + hloc); if(isNullExpected) { nullResult.incrementAndGet(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java index 464f44b1c81..c61bd78c562 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java @@ -300,7 +300,7 @@ public class MultiThreadedUpdater extends MultiThreadedWriterBase { } catch (IOException e) { if (ignoreNonceConflicts && (e instanceof OperationConflictException)) { LOG.info("Detected nonce conflict, ignoring: " + e.getMessage()); - totalOpTimeMs.addAndGet(System.currentTimeMillis() - start); + totalOpTimeMs.addAndGet(System.currentTimeMillis() - start); return; } failedKeySet.add(keyBase); diff --git a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestHTablePool.java b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestHTablePool.java index 7cc006143f2..2826b05e2bc 100644 --- a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestHTablePool.java +++ b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestHTablePool.java @@ -59,75 +59,75 @@ public class TestHTablePool { protected abstract PoolType getPoolType(); - @Test - public void testTableWithStringName() throws Exception { - HTablePool pool = new HTablePool(TEST_UTIL.getConfiguration(), - Integer.MAX_VALUE, getPoolType()); - String tableName = TABLENAME; + @Test + public void testTableWithStringName() throws Exception { + HTablePool pool = new HTablePool(TEST_UTIL.getConfiguration(), + Integer.MAX_VALUE, getPoolType()); + String tableName = TABLENAME; - // Request a table from an empty pool - Table table = pool.getTable(tableName); - Assert.assertNotNull(table); + // Request a table from an empty pool + Table table = pool.getTable(tableName); + Assert.assertNotNull(table); - // Close table (returns table to the pool) - table.close(); + // Close table (returns table to the pool) + table.close(); - // Request a table of the same name - Table sameTable = pool.getTable(tableName); - Assert.assertSame( - ((HTablePool.PooledHTable) table).getWrappedTable(), - ((HTablePool.PooledHTable) sameTable).getWrappedTable()); - } + // Request a table of the same name + Table sameTable = pool.getTable(tableName); + Assert.assertSame( + ((HTablePool.PooledHTable) table).getWrappedTable(), + ((HTablePool.PooledHTable) sameTable).getWrappedTable()); + } - @Test - public void testTableWithByteArrayName() throws IOException { - HTablePool pool = new HTablePool(TEST_UTIL.getConfiguration(), - Integer.MAX_VALUE, getPoolType()); + @Test + public void testTableWithByteArrayName() throws IOException { + HTablePool pool = new HTablePool(TEST_UTIL.getConfiguration(), + Integer.MAX_VALUE, getPoolType()); - // Request a table from an empty pool - Table table = pool.getTable(TABLENAME); - Assert.assertNotNull(table); + // Request a table from an empty pool + Table table = pool.getTable(TABLENAME); + Assert.assertNotNull(table); - // Close table (returns table to the pool) - table.close(); + // Close table (returns table to the pool) + table.close(); - // Request a table of the same name - Table sameTable = pool.getTable(TABLENAME); - Assert.assertSame( - ((HTablePool.PooledHTable) table).getWrappedTable(), - ((HTablePool.PooledHTable) sameTable).getWrappedTable()); - } + // Request a table of the same name + Table sameTable = pool.getTable(TABLENAME); + Assert.assertSame( + ((HTablePool.PooledHTable) table).getWrappedTable(), + ((HTablePool.PooledHTable) sameTable).getWrappedTable()); + } - @Test - public void testTablesWithDifferentNames() throws IOException { - HTablePool pool = new HTablePool(TEST_UTIL.getConfiguration(), - Integer.MAX_VALUE, getPoolType()); + @Test + public void testTablesWithDifferentNames() throws IOException { + HTablePool pool = new HTablePool(TEST_UTIL.getConfiguration(), + Integer.MAX_VALUE, getPoolType()); // We add the class to the table name as the HBase cluster is reused // during the tests: this gives naming unicity. - byte[] otherTable = Bytes.toBytes( + byte[] otherTable = Bytes.toBytes( "OtherTable_" + getClass().getSimpleName() ); - TEST_UTIL.createTable(otherTable, HConstants.CATALOG_FAMILY); + TEST_UTIL.createTable(otherTable, HConstants.CATALOG_FAMILY); - // Request a table from an empty pool - Table table1 = pool.getTable(TABLENAME); - Table table2 = pool.getTable(otherTable); - Assert.assertNotNull(table2); + // Request a table from an empty pool + Table table1 = pool.getTable(TABLENAME); + Table table2 = pool.getTable(otherTable); + Assert.assertNotNull(table2); - // Close tables (returns tables to the pool) - table1.close(); - table2.close(); + // Close tables (returns tables to the pool) + table1.close(); + table2.close(); - // Request tables of the same names - Table sameTable1 = pool.getTable(TABLENAME); - Table sameTable2 = pool.getTable(otherTable); - Assert.assertSame( - ((HTablePool.PooledHTable) table1).getWrappedTable(), - ((HTablePool.PooledHTable) sameTable1).getWrappedTable()); - Assert.assertSame( - ((HTablePool.PooledHTable) table2).getWrappedTable(), - ((HTablePool.PooledHTable) sameTable2).getWrappedTable()); - } + // Request tables of the same names + Table sameTable1 = pool.getTable(TABLENAME); + Table sameTable2 = pool.getTable(otherTable); + Assert.assertSame( + ((HTablePool.PooledHTable) table1).getWrappedTable(), + ((HTablePool.PooledHTable) sameTable1).getWrappedTable()); + Assert.assertSame( + ((HTablePool.PooledHTable) table2).getWrappedTable(), + ((HTablePool.PooledHTable) sameTable2).getWrappedTable()); + } @Test public void testProxyImplementationReturned() { HTablePool pool = new HTablePool(TEST_UTIL.getConfiguration(), @@ -146,8 +146,8 @@ public class TestHTablePool { HTablePool pool = new HTablePool(TEST_UTIL.getConfiguration(), Integer.MAX_VALUE); String tableName = TABLENAME;// Request a table from - // an - // empty pool + // an + // empty pool // get table will return proxy implementation HTableInterface table = pool.getTable(tableName); @@ -168,8 +168,8 @@ public class TestHTablePool { HTablePool pool = new HTablePool(TEST_UTIL.getConfiguration(), Integer.MAX_VALUE); String tableName = TABLENAME;// Request a table from - // an - // empty pool + // an + // empty pool // get table will return proxy implementation final Table table = pool.getTable(tableName); @@ -213,154 +213,154 @@ public class TestHTablePool { } @Category({ClientTests.class, MediumTests.class}) - public static class TestHTableReusablePool extends TestHTablePoolType { - @Override - protected PoolType getPoolType() { - return PoolType.Reusable; - } + public static class TestHTableReusablePool extends TestHTablePoolType { + @Override + protected PoolType getPoolType() { + return PoolType.Reusable; + } - @Test - public void testTableWithMaxSize() throws Exception { - HTablePool pool = new HTablePool(TEST_UTIL.getConfiguration(), 2, - getPoolType()); + @Test + public void testTableWithMaxSize() throws Exception { + HTablePool pool = new HTablePool(TEST_UTIL.getConfiguration(), 2, + getPoolType()); - // Request tables from an empty pool - Table table1 = pool.getTable(TABLENAME); - Table table2 = pool.getTable(TABLENAME); - Table table3 = pool.getTable(TABLENAME); + // Request tables from an empty pool + Table table1 = pool.getTable(TABLENAME); + Table table2 = pool.getTable(TABLENAME); + Table table3 = pool.getTable(TABLENAME); - // Close tables (returns tables to the pool) - table1.close(); - table2.close(); - // The pool should reject this one since it is already full - table3.close(); + // Close tables (returns tables to the pool) + table1.close(); + table2.close(); + // The pool should reject this one since it is already full + table3.close(); - // Request tables of the same name - Table sameTable1 = pool.getTable(TABLENAME); - Table sameTable2 = pool.getTable(TABLENAME); - Table sameTable3 = pool.getTable(TABLENAME); - Assert.assertSame( - ((HTablePool.PooledHTable) table1).getWrappedTable(), - ((HTablePool.PooledHTable) sameTable1).getWrappedTable()); - Assert.assertSame( - ((HTablePool.PooledHTable) table2).getWrappedTable(), - ((HTablePool.PooledHTable) sameTable2).getWrappedTable()); - Assert.assertNotSame( - ((HTablePool.PooledHTable) table3).getWrappedTable(), - ((HTablePool.PooledHTable) sameTable3).getWrappedTable()); - } + // Request tables of the same name + Table sameTable1 = pool.getTable(TABLENAME); + Table sameTable2 = pool.getTable(TABLENAME); + Table sameTable3 = pool.getTable(TABLENAME); + Assert.assertSame( + ((HTablePool.PooledHTable) table1).getWrappedTable(), + ((HTablePool.PooledHTable) sameTable1).getWrappedTable()); + Assert.assertSame( + ((HTablePool.PooledHTable) table2).getWrappedTable(), + ((HTablePool.PooledHTable) sameTable2).getWrappedTable()); + Assert.assertNotSame( + ((HTablePool.PooledHTable) table3).getWrappedTable(), + ((HTablePool.PooledHTable) sameTable3).getWrappedTable()); + } - @Test - public void testCloseTablePool() throws IOException { - HTablePool pool = new HTablePool(TEST_UTIL.getConfiguration(), 4, - getPoolType()); - HBaseAdmin admin = new HBaseAdmin(TEST_UTIL.getConfiguration()); + @Test + public void testCloseTablePool() throws IOException { + HTablePool pool = new HTablePool(TEST_UTIL.getConfiguration(), 4, + getPoolType()); + HBaseAdmin admin = new HBaseAdmin(TEST_UTIL.getConfiguration()); - if (admin.tableExists(TABLENAME)) { - admin.disableTable(TABLENAME); - admin.deleteTable(TABLENAME); - } + if (admin.tableExists(TABLENAME)) { + admin.disableTable(TABLENAME); + admin.deleteTable(TABLENAME); + } - HTableDescriptor tableDescriptor = new HTableDescriptor(TableName.valueOf(TABLENAME)); - tableDescriptor.addFamily(new HColumnDescriptor("randomFamily")); - admin.createTable(tableDescriptor); + HTableDescriptor tableDescriptor = new HTableDescriptor(TableName.valueOf(TABLENAME)); + tableDescriptor.addFamily(new HColumnDescriptor("randomFamily")); + admin.createTable(tableDescriptor); - // Request tables from an empty pool - Table[] tables = new Table[4]; - for (int i = 0; i < 4; ++i) { - tables[i] = pool.getTable(TABLENAME); - } + // Request tables from an empty pool + Table[] tables = new Table[4]; + for (int i = 0; i < 4; ++i) { + tables[i] = pool.getTable(TABLENAME); + } - pool.closeTablePool(TABLENAME); + pool.closeTablePool(TABLENAME); - for (int i = 0; i < 4; ++i) { - tables[i].close(); - } + for (int i = 0; i < 4; ++i) { + tables[i].close(); + } - Assert.assertEquals(4, - pool.getCurrentPoolSize(TABLENAME)); + Assert.assertEquals(4, + pool.getCurrentPoolSize(TABLENAME)); - pool.closeTablePool(TABLENAME); + pool.closeTablePool(TABLENAME); - Assert.assertEquals(0, - pool.getCurrentPoolSize(TABLENAME)); - } - } + Assert.assertEquals(0, + pool.getCurrentPoolSize(TABLENAME)); + } + } @Category({ClientTests.class, MediumTests.class}) - public static class TestHTableThreadLocalPool extends TestHTablePoolType { - @Override - protected PoolType getPoolType() { - return PoolType.ThreadLocal; - } + public static class TestHTableThreadLocalPool extends TestHTablePoolType { + @Override + protected PoolType getPoolType() { + return PoolType.ThreadLocal; + } - @Test - public void testTableWithMaxSize() throws Exception { - HTablePool pool = new HTablePool(TEST_UTIL.getConfiguration(), 2, - getPoolType()); + @Test + public void testTableWithMaxSize() throws Exception { + HTablePool pool = new HTablePool(TEST_UTIL.getConfiguration(), 2, + getPoolType()); - // Request tables from an empty pool - Table table1 = pool.getTable(TABLENAME); - Table table2 = pool.getTable(TABLENAME); - Table table3 = pool.getTable(TABLENAME); + // Request tables from an empty pool + Table table1 = pool.getTable(TABLENAME); + Table table2 = pool.getTable(TABLENAME); + Table table3 = pool.getTable(TABLENAME); - // Close tables (returns tables to the pool) - table1.close(); - table2.close(); - // The pool should not reject this one since the number of threads - // <= 2 - table3.close(); + // Close tables (returns tables to the pool) + table1.close(); + table2.close(); + // The pool should not reject this one since the number of threads + // <= 2 + table3.close(); - // Request tables of the same name - Table sameTable1 = pool.getTable(TABLENAME); - Table sameTable2 = pool.getTable(TABLENAME); - Table sameTable3 = pool.getTable(TABLENAME); - Assert.assertSame( - ((HTablePool.PooledHTable) table3).getWrappedTable(), - ((HTablePool.PooledHTable) sameTable1).getWrappedTable()); - Assert.assertSame( - ((HTablePool.PooledHTable) table3).getWrappedTable(), - ((HTablePool.PooledHTable) sameTable2).getWrappedTable()); - Assert.assertSame( - ((HTablePool.PooledHTable) table3).getWrappedTable(), - ((HTablePool.PooledHTable) sameTable3).getWrappedTable()); - } + // Request tables of the same name + Table sameTable1 = pool.getTable(TABLENAME); + Table sameTable2 = pool.getTable(TABLENAME); + Table sameTable3 = pool.getTable(TABLENAME); + Assert.assertSame( + ((HTablePool.PooledHTable) table3).getWrappedTable(), + ((HTablePool.PooledHTable) sameTable1).getWrappedTable()); + Assert.assertSame( + ((HTablePool.PooledHTable) table3).getWrappedTable(), + ((HTablePool.PooledHTable) sameTable2).getWrappedTable()); + Assert.assertSame( + ((HTablePool.PooledHTable) table3).getWrappedTable(), + ((HTablePool.PooledHTable) sameTable3).getWrappedTable()); + } - @Test - public void testCloseTablePool() throws IOException { - HTablePool pool = new HTablePool(TEST_UTIL.getConfiguration(), 4, - getPoolType()); - HBaseAdmin admin = new HBaseAdmin(TEST_UTIL.getConfiguration()); + @Test + public void testCloseTablePool() throws IOException { + HTablePool pool = new HTablePool(TEST_UTIL.getConfiguration(), 4, + getPoolType()); + HBaseAdmin admin = new HBaseAdmin(TEST_UTIL.getConfiguration()); - if (admin.tableExists(TABLENAME)) { - admin.disableTable(TABLENAME); - admin.deleteTable(TABLENAME); - } + if (admin.tableExists(TABLENAME)) { + admin.disableTable(TABLENAME); + admin.deleteTable(TABLENAME); + } - HTableDescriptor tableDescriptor = new HTableDescriptor(TableName.valueOf(TABLENAME)); - tableDescriptor.addFamily(new HColumnDescriptor("randomFamily")); - admin.createTable(tableDescriptor); + HTableDescriptor tableDescriptor = new HTableDescriptor(TableName.valueOf(TABLENAME)); + tableDescriptor.addFamily(new HColumnDescriptor("randomFamily")); + admin.createTable(tableDescriptor); - // Request tables from an empty pool - Table[] tables = new Table[4]; - for (int i = 0; i < 4; ++i) { - tables[i] = pool.getTable(TABLENAME); - } + // Request tables from an empty pool + Table[] tables = new Table[4]; + for (int i = 0; i < 4; ++i) { + tables[i] = pool.getTable(TABLENAME); + } - pool.closeTablePool(TABLENAME); + pool.closeTablePool(TABLENAME); - for (int i = 0; i < 4; ++i) { - tables[i].close(); - } + for (int i = 0; i < 4; ++i) { + tables[i].close(); + } - Assert.assertEquals(1, - pool.getCurrentPoolSize(TABLENAME)); + Assert.assertEquals(1, + pool.getCurrentPoolSize(TABLENAME)); - pool.closeTablePool(TABLENAME); + pool.closeTablePool(TABLENAME); - Assert.assertEquals(0, - pool.getCurrentPoolSize(TABLENAME)); - } - } + Assert.assertEquals(0, + pool.getCurrentPoolSize(TABLENAME)); + } + } }