diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java index 0d3ead055d3..2f6cb3379d7 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java @@ -610,11 +610,11 @@ public class HColumnDescriptor implements Comparable { // TODO: Allow maxVersion of 0 to be the way you say "Keep all versions". // Until there is support, consider 0 or < 0 -- a configuration error. throw new IllegalArgumentException("Maximum versions must be positive"); - } - if (maxVersions < this.getMinVersions()) { + } + if (maxVersions < this.getMinVersions()) { throw new IllegalArgumentException("Set MaxVersion to " + maxVersions + " while minVersion is " + this.getMinVersions() - + ". Maximum versions must be >= minimum versions "); + + ". Maximum versions must be >= minimum versions "); } setValue(HConstants.VERSIONS, Integer.toString(maxVersions)); cachedMaxVersions = maxVersions; @@ -709,7 +709,7 @@ public class HColumnDescriptor implements Comparable { /** * Set whether the tags should be compressed along with DataBlockEncoding. When no * DataBlockEncoding is been used, this is having no effect. - * + * * @param compressTags * @return this (for chained invocation) */ @@ -751,7 +751,7 @@ public class HColumnDescriptor implements Comparable { } /** - * @return True if we are to favor keeping all values for this column family in the + * @return True if we are to favor keeping all values for this column family in the * HRegionServer cache. */ public boolean isInMemory() { @@ -1118,6 +1118,7 @@ public class HColumnDescriptor implements Comparable { } // Comparable + @Override public int compareTo(HColumnDescriptor o) { int result = Bytes.compareTo(this.name, o.getName()); if (result == 0) { @@ -1225,12 +1226,13 @@ public class HColumnDescriptor implements Comparable { * @param key Config key. Same as XML config key e.g. hbase.something.or.other. * @param value String value. If null, removes the configuration. */ - public void setConfiguration(String key, String value) { + public HColumnDescriptor setConfiguration(String key, String value) { if (value == null) { removeConfiguration(key); } else { configuration.put(key, value); } + return this; } /** diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java index 0e9e25c2d8c..cd8357f2a5b 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java @@ -254,6 +254,7 @@ public class HTableDescriptor implements Comparable { * INTERNAL Private constructor used internally creating table descriptors for * catalog tables, hbase:meta and -ROOT-. */ + @InterfaceAudience.Private protected HTableDescriptor(final TableName name, HColumnDescriptor[] families) { setName(name); for(HColumnDescriptor descriptor : families) { @@ -475,17 +476,19 @@ public class HTableDescriptor implements Comparable { * @param value The value. * @see #values */ - public void setValue(byte[] key, byte[] value) { + public HTableDescriptor setValue(byte[] key, byte[] value) { setValue(new Bytes(key), new Bytes(value)); + return this; } /* * @param key The key. * @param value The value. */ - private void setValue(final Bytes key, + private HTableDescriptor setValue(final Bytes key, final String value) { setValue(key, new Bytes(Bytes.toBytes(value))); + return this; } /* @@ -494,16 +497,17 @@ public class HTableDescriptor implements Comparable { * @param key The key. * @param value The value. */ - public void setValue(final Bytes key, + public HTableDescriptor setValue(final Bytes key, final Bytes value) { if (key.compareTo(DEFERRED_LOG_FLUSH_KEY) == 0) { boolean isDeferredFlush = Boolean.valueOf(Bytes.toString(value.get())); LOG.warn("HTableDescriptor property:" + DEFERRED_LOG_FLUSH + " is deprecated, " + "use " + DURABILITY + " instead"); setDurability(isDeferredFlush ? Durability.ASYNC_WAL : DEFAULT_DURABLITY); - return; + return this; } values.put(key, value); + return this; } /** @@ -513,12 +517,13 @@ public class HTableDescriptor implements Comparable { * @param value The value. * @see #values */ - public void setValue(String key, String value) { + public HTableDescriptor setValue(String key, String value) { if (value == null) { remove(key); } else { setValue(Bytes.toBytes(key), Bytes.toBytes(value)); } + return this; } /** @@ -569,8 +574,8 @@ public class HTableDescriptor implements Comparable { * @param readOnly True if all of the columns in the table should be read * only. */ - public void setReadOnly(final boolean readOnly) { - setValue(READONLY_KEY, readOnly? TRUE: FALSE); + public HTableDescriptor setReadOnly(final boolean readOnly) { + return setValue(READONLY_KEY, readOnly? TRUE: FALSE); } /** @@ -588,17 +593,19 @@ public class HTableDescriptor implements Comparable { * * @param isEnable True if enable compaction. */ - public void setCompactionEnabled(final boolean isEnable) { + public HTableDescriptor setCompactionEnabled(final boolean isEnable) { setValue(COMPACTION_ENABLED_KEY, isEnable ? TRUE : FALSE); + return this; } /** * Sets the {@link Durability} setting for the table. This defaults to Durability.USE_DEFAULT. * @param durability enum value */ - public void setDurability(Durability durability) { + public HTableDescriptor setDurability(Durability durability) { this.durability = durability; setValue(DURABILITY_KEY, durability.name()); + return this; } /** @@ -636,7 +643,9 @@ public class HTableDescriptor implements Comparable { * Get the name of the table as a byte array. * * @return name of table + * @deprecated Use {@link #getTableName()} instead */ + @Deprecated public byte[] getName() { return name.getName(); } @@ -656,8 +665,9 @@ public class HTableDescriptor implements Comparable { * default is defined in {@link org.apache.hadoop.hbase.regionserver.RegionSplitPolicy} * @param clazz the class name */ - public void setRegionSplitPolicyClassName(String clazz) { + public HTableDescriptor setRegionSplitPolicyClassName(String clazz) { setValue(SPLIT_POLICY, clazz); + return this; } /** @@ -678,14 +688,16 @@ public class HTableDescriptor implements Comparable { * @param name name of table */ @Deprecated - public void setName(byte[] name) { + public HTableDescriptor setName(byte[] name) { setName(TableName.valueOf(name)); + return this; } @Deprecated - public void setName(TableName name) { + public HTableDescriptor setName(TableName name) { this.name = name; setMetaFlags(this.name); + return this; } /** @@ -720,8 +732,9 @@ public class HTableDescriptor implements Comparable { * @param maxFileSize The maximum file size that a store file can grow to * before a split is triggered. */ - public void setMaxFileSize(long maxFileSize) { + public HTableDescriptor setMaxFileSize(long maxFileSize) { setValue(MAX_FILESIZE_KEY, Long.toString(maxFileSize)); + return this; } /** @@ -745,19 +758,21 @@ public class HTableDescriptor implements Comparable { * * @param memstoreFlushSize memory cache flush size for each hregion */ - public void setMemStoreFlushSize(long memstoreFlushSize) { + public HTableDescriptor setMemStoreFlushSize(long memstoreFlushSize) { setValue(MEMSTORE_FLUSHSIZE_KEY, Long.toString(memstoreFlushSize)); + return this; } /** * Adds a column family. * @param family HColumnDescriptor of family to add. */ - public void addFamily(final HColumnDescriptor family) { + public HTableDescriptor addFamily(final HColumnDescriptor family) { if (family.getName() == null || family.getName().length <= 0) { throw new NullPointerException("Family name cannot be null or empty"); } this.families.put(family.getName(), family); + return this; } /** @@ -999,9 +1014,10 @@ public class HTableDescriptor implements Comparable { * Sets the number of replicas per region. * @param regionReplication the replication factor per region */ - public void setRegionReplication(int regionReplication) { + public HTableDescriptor setRegionReplication(int regionReplication) { setValue(REGION_REPLICATION_KEY, new Bytes(Bytes.toBytes(Integer.toString(regionReplication)))); + return this; } /** @@ -1066,8 +1082,9 @@ public class HTableDescriptor implements Comparable { * @param className Full class name. * @throws IOException */ - public void addCoprocessor(String className) throws IOException { + public HTableDescriptor addCoprocessor(String className) throws IOException { addCoprocessor(className, null, Coprocessor.PRIORITY_USER, null); + return this; } @@ -1085,7 +1102,7 @@ public class HTableDescriptor implements Comparable { * @param kvs Arbitrary key-value parameter pairs passed into the coprocessor. * @throws IOException */ - public void addCoprocessor(String className, Path jarFilePath, + public HTableDescriptor addCoprocessor(String className, Path jarFilePath, int priority, final Map kvs) throws IOException { if (hasCoprocessor(className)) { @@ -1132,6 +1149,7 @@ public class HTableDescriptor implements Comparable { "|" + className + "|" + Integer.toString(priority) + "|" + kvString.toString(); setValue(key, value); + return this; } @@ -1291,18 +1309,19 @@ public class HTableDescriptor implements Comparable { }); @Deprecated - public void setOwner(User owner) { - setOwnerString(owner != null ? owner.getShortName() : null); + public HTableDescriptor setOwner(User owner) { + return setOwnerString(owner != null ? owner.getShortName() : null); } // used by admin.rb:alter(table_name,*args) to update owner. @Deprecated - public void setOwnerString(String ownerString) { + public HTableDescriptor setOwnerString(String ownerString) { if (ownerString != null) { setValue(OWNER_KEY, ownerString); } else { remove(OWNER_KEY); } + return this; } @Deprecated @@ -1414,12 +1433,13 @@ public class HTableDescriptor implements Comparable { * @param key Config key. Same as XML config key e.g. hbase.something.or.other. * @param value String value. If null, removes the setting. */ - public void setConfiguration(String key, String value) { + public HTableDescriptor setConfiguration(String key, String value) { if (value == null) { removeConfiguration(key); } else { configuration.put(key, value); } + return this; } /** diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java index d6fcb521697..95c21283c5c 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java @@ -61,9 +61,10 @@ public class UnmodifyableHTableDescriptor extends HTableDescriptor { /** * Does NOT add a column family. This object is immutable * @param family HColumnDescriptor of familyto add. + * @return */ @Override - public void addFamily(final HColumnDescriptor family) { + public HTableDescriptor addFamily(final HColumnDescriptor family) { throw new UnsupportedOperationException("HTableDescriptor is read-only"); } @@ -78,42 +79,47 @@ public class UnmodifyableHTableDescriptor extends HTableDescriptor { } /** + * @return * @see org.apache.hadoop.hbase.HTableDescriptor#setReadOnly(boolean) */ @Override - public void setReadOnly(boolean readOnly) { + public HTableDescriptor setReadOnly(boolean readOnly) { throw new UnsupportedOperationException("HTableDescriptor is read-only"); } /** + * @return * @see org.apache.hadoop.hbase.HTableDescriptor#setValue(byte[], byte[]) */ @Override - public void setValue(byte[] key, byte[] value) { + public HTableDescriptor setValue(byte[] key, byte[] value) { throw new UnsupportedOperationException("HTableDescriptor is read-only"); } /** + * @return * @see org.apache.hadoop.hbase.HTableDescriptor#setValue(java.lang.String, java.lang.String) */ @Override - public void setValue(String key, String value) { + public HTableDescriptor setValue(String key, String value) { throw new UnsupportedOperationException("HTableDescriptor is read-only"); } /** + * @return * @see org.apache.hadoop.hbase.HTableDescriptor#setMaxFileSize(long) */ @Override - public void setMaxFileSize(long maxFileSize) { + public HTableDescriptor setMaxFileSize(long maxFileSize) { throw new UnsupportedOperationException("HTableDescriptor is read-only"); } /** + * @return * @see org.apache.hadoop.hbase.HTableDescriptor#setMemStoreFlushSize(long) */ @Override - public void setMemStoreFlushSize(long memstoreFlushSize) { + public HTableDescriptor setMemStoreFlushSize(long memstoreFlushSize) { throw new UnsupportedOperationException("HTableDescriptor is read-only"); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptor.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptor.java similarity index 87% rename from hbase-server/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptor.java rename to hbase-client/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptor.java index 132004d1a3e..98a68d61fd7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptor.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptor.java @@ -27,8 +27,8 @@ import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.regionserver.BloomType; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.apache.hadoop.hbase.util.BuilderStyleTest; import org.junit.experimental.categories.Category; - import org.junit.Test; /** Tests the HColumnDescriptor with appropriate arguments */ @@ -96,4 +96,21 @@ public class TestHColumnDescriptor { desc.removeConfiguration(key); assertEquals(null, desc.getConfigurationValue(key)); } + + @Test + public void testClassMethodsAreBuilderStyle() { + /* HColumnDescriptor should have a builder style setup where setXXX/addXXX methods + * can be chainable together: + * . For example: + * HColumnDescriptor hcd + * = new HColumnDescriptor() + * .setFoo(foo) + * .setBar(bar) + * .setBuz(buz) + * + * This test ensures that all methods starting with "set" returns the declaring object + */ + + BuilderStyleTest.assertClassesAreBuilderStyle(HColumnDescriptor.class); + } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHTableDescriptor.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHTableDescriptor.java similarity index 89% rename from hbase-server/src/test/java/org/apache/hadoop/hbase/TestHTableDescriptor.java rename to hbase-client/src/test/java/org/apache/hadoop/hbase/TestHTableDescriptor.java index c52eecb08a4..0fa24b41428 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHTableDescriptor.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHTableDescriptor.java @@ -28,11 +28,10 @@ import java.util.regex.Pattern; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.client.Durability; -import org.apache.hadoop.hbase.coprocessor.BaseRegionObserver; -import org.apache.hadoop.hbase.coprocessor.SampleRegionWALObserver; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.apache.hadoop.hbase.util.BuilderStyleTest; import org.apache.hadoop.hbase.util.Bytes; import org.junit.Test; import org.junit.experimental.categories.Category; @@ -69,7 +68,7 @@ public class TestHTableDescriptor { public void testGetSetRemoveCP() throws Exception { HTableDescriptor desc = new HTableDescriptor(TableName.valueOf("table")); // simple CP - String className = BaseRegionObserver.class.getName(); + String className = "org.apache.hadoop.hbase.coprocessor.BaseRegionObserver"; // add and check that it is present desc.addCoprocessor(className); assertTrue(desc.hasCoprocessor(className)); @@ -86,8 +85,8 @@ public class TestHTableDescriptor { public void testSetListRemoveCP() throws Exception { HTableDescriptor desc = new HTableDescriptor(TableName.valueOf("testGetSetRemoveCP")); // simple CP - String className1 = BaseRegionObserver.class.getName(); - String className2 = SampleRegionWALObserver.class.getName(); + String className1 = "org.apache.hadoop.hbase.coprocessor.BaseRegionObserver"; + String className2 = "org.apache.hadoop.hbase.coprocessor.SampleRegionWALObserver"; // Check that any coprocessor is present. assertTrue(desc.getCoprocessors().size() == 0); @@ -209,4 +208,21 @@ public class TestHTableDescriptor { desc.removeConfiguration(key); assertEquals(null, desc.getConfigurationValue(key)); } + + @Test + public void testClassMethodsAreBuilderStyle() { + /* HTableDescriptor should have a builder style setup where setXXX/addXXX methods + * can be chainable together: + * . For example: + * HTableDescriptor htd + * = new HTableDescriptor() + * .setFoo(foo) + * .setBar(bar) + * .setBuz(buz) + * + * This test ensures that all methods starting with "set" returns the declaring object + */ + + BuilderStyleTest.assertClassesAreBuilderStyle(HTableDescriptor.class); + } } diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java index d85cffc2aca..ee65edee2c9 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java @@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.client; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; - import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.testclassification.ClientTests; @@ -32,7 +30,6 @@ import org.junit.Assert; import org.junit.Test; import java.io.IOException; -import java.lang.reflect.Method; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.HashMap; @@ -64,8 +61,8 @@ import org.apache.hadoop.hbase.filter.SkipFilter; import org.apache.hadoop.hbase.filter.TimestampsFilter; import org.apache.hadoop.hbase.filter.ValueFilter; import org.apache.hadoop.hbase.filter.WhileMatchFilter; +import org.apache.hadoop.hbase.util.BuilderStyleTest; import org.apache.hadoop.hbase.util.Bytes; - import org.codehaus.jackson.map.ObjectMapper; import org.junit.experimental.categories.Category; @@ -433,7 +430,7 @@ public class TestOperation { * .setBar(bar) * .setBuz(buz) * - * This test ensures that all methods starting with "set" returns an Operation object + * This test ensures that all methods starting with "set" returns the declaring object */ // TODO: We should ensure all subclasses of Operation is checked. @@ -449,22 +446,7 @@ public class TestOperation { Get.class, Scan.class}; - for (Class clazz : classes) { - System.out.println("Checking " + clazz); - Method[] methods = clazz.getDeclaredMethods(); - for (Method method : methods) { - Class ret = method.getReturnType(); - if (method.getName().startsWith("set") || method.getName().startsWith("add")) { - System.out.println(" " + clazz.getSimpleName() + "." + method.getName() + "() : " - + ret.getSimpleName()); - String errorMsg = "All setXXX() methods in " + clazz.getSimpleName() + " should return a " - + clazz.getSimpleName() + " object in builder style. Offending method:" - + method.getName(); - assertTrue(errorMsg, Operation.class.isAssignableFrom(ret) - || Attributes.class.isAssignableFrom(ret)); // for setAttributes() - } - } - } + BuilderStyleTest.assertClassesAreBuilderStyle(classes); } } diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/util/BuilderStyleTest.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/util/BuilderStyleTest.java new file mode 100644 index 00000000000..d2d0a53032a --- /dev/null +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/util/BuilderStyleTest.java @@ -0,0 +1,104 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.util; + +import static org.junit.Assert.assertTrue; + +import java.lang.reflect.Method; +import java.lang.reflect.Modifier; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + +/** + * Utility class to check whether a given class conforms to builder-style: + * Foo foo = + * new Foo() + * .setBar(bar) + * .setBaz(baz) + */ +public class BuilderStyleTest { + + /* + * If a base class Foo declares a method setFoo() returning Foo, then the subclass should + * re-declare the methods overriding the return class with the subclass: + * + * class Foo { + * Foo setFoo() { + * .. + * return this; + * } + * } + * + * class Bar { + * Bar setFoo() { + * return (Bar) super.setFoo(); + * } + * } + * + */ + @SuppressWarnings("rawtypes") + public static void assertClassesAreBuilderStyle(Class... classes) { + for (Class clazz : classes) { + System.out.println("Checking " + clazz); + Method[] methods = clazz.getDeclaredMethods(); + Map> methodsBySignature = new HashMap<>(); + for (Method method : methods) { + if (!Modifier.isPublic(method.getModifiers())) { + continue; // only public classes + } + Class ret = method.getReturnType(); + if (method.getName().startsWith("set") || method.getName().startsWith("add")) { + System.out.println(" " + clazz.getSimpleName() + "." + method.getName() + "() : " + + ret.getSimpleName()); + + // because of subclass / super class method overrides, we group the methods fitting the + // same signatures because we get two method definitions from java reflection: + // Mutation.setDurability() : Mutation + // Delete.setDurability() : Mutation + // Delete.setDurability() : Delete + String sig = method.getName(); + for (Class param : method.getParameterTypes()) { + sig += param.getName(); + } + Set sigMethods = methodsBySignature.get(sig); + if (sigMethods == null) { + sigMethods = new HashSet(); + methodsBySignature.put(sig, sigMethods); + } + sigMethods.add(method); + } + } + // now iterate over the methods by signatures + for (Map.Entry> e : methodsBySignature.entrySet()) { + // at least one of method sigs should return the declaring class + boolean found = false; + for (Method m : e.getValue()) { + found = clazz.isAssignableFrom(m.getReturnType()); + if (found) break; + } + String errorMsg = "All setXXX()|addXX() methods in " + clazz.getSimpleName() + + " should return a " + clazz.getSimpleName() + " object in builder style. " + + "Offending method:" + e.getValue().iterator().next().getName(); + assertTrue(errorMsg, found); + } + } + } +}