From 5732bdb483d6c6882f26a33a247f0544329745eb Mon Sep 17 00:00:00 2001 From: Lars Francke Date: Sun, 10 May 2015 19:49:30 +0200 Subject: [PATCH] Deprecate old methods Add proper JIRA Signed-off-by: stack --- .../hadoop/hbase/HColumnDescriptor.java | 41 ++++++++++++------- .../hbase/mapreduce/HFileOutputFormat2.java | 2 +- .../mapreduce/LoadIncrementalHFiles.java | 2 +- .../apache/hadoop/hbase/master/HMaster.java | 4 +- .../regionserver/DefaultStoreFlusher.java | 2 +- .../hadoop/hbase/regionserver/HRegion.java | 4 +- .../regionserver/StripeStoreFlusher.java | 2 +- .../regionserver/compactions/Compactor.java | 2 +- .../compactions/StripeCompactor.java | 2 +- .../mapreduce/TestHFileOutputFormat.java | 2 +- .../mapreduce/TestHFileOutputFormat2.java | 2 +- .../hadoop/hbase/regionserver/TestStore.java | 2 +- .../hadoop/hbase/thrift/ThriftUtilities.java | 2 +- 13 files changed, 40 insertions(+), 29 deletions(-) diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java index 6cbe8044d7b..4091c117762 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java @@ -453,23 +453,26 @@ public class HColumnDescriptor implements Comparable { return this; } - /** @return compression type being used for the column family */ + /** + * @return compression type being used for the column family + * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0 + * (HBASE-13655). + * Use {@link #getCompressionType()}. + */ + @Deprecated public Compression.Algorithm getCompression() { - String n = getValue(COMPRESSION); - if (n == null) { - return Compression.Algorithm.NONE; - } - return Compression.Algorithm.valueOf(n.toUpperCase()); + return getCompressionType(); } - /** @return compression type being used for the column family for major - compression */ + /** + * @return compression type being used for the column family for major compaction + * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0 + * (HBASE-13655). + * Use {@link #getCompactionCompressionType()}. + */ + @Deprecated public Compression.Algorithm getCompactionCompression() { - String n = getValue(COMPRESSION_COMPACT); - if (n == null) { - return getCompression(); - } - return Compression.Algorithm.valueOf(n.toUpperCase()); + return getCompactionCompressionType(); } /** @return maximum number of versions */ @@ -529,7 +532,11 @@ public class HColumnDescriptor implements Comparable { * @return Compression type setting. */ public Compression.Algorithm getCompressionType() { - return getCompression(); + String n = getValue(COMPRESSION); + if (n == null) { + return Compression.Algorithm.NONE; + } + return Compression.Algorithm.valueOf(n.toUpperCase()); } /** @@ -599,7 +606,11 @@ public class HColumnDescriptor implements Comparable { * @return Compression type setting. */ public Compression.Algorithm getCompactionCompressionType() { - return getCompactionCompression(); + String n = getValue(COMPRESSION_COMPACT); + if (n == null) { + return getCompressionType(); + } + return Compression.Algorithm.valueOf(n.toUpperCase()); } /** diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java index 4e943083980..015cca0b6f8 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java @@ -627,7 +627,7 @@ public class HFileOutputFormat2 familyDescriptor.getNameAsString(), "UTF-8")); compressionConfigValue.append('='); compressionConfigValue.append(URLEncoder.encode( - familyDescriptor.getCompression().getName(), "UTF-8")); + familyDescriptor.getCompressionType().getName(), "UTF-8")); } // Get rid of the last ampersand conf.set(COMPRESSION_FAMILIES_CONF_KEY, compressionConfigValue.toString()); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java index d9829a79af5..40601d67788 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java @@ -805,7 +805,7 @@ public class LoadIncrementalHFiles extends Configured implements Tool { Map fileInfo = halfReader.loadFileInfo(); int blocksize = familyDescriptor.getBlocksize(); - Algorithm compression = familyDescriptor.getCompression(); + Algorithm compression = familyDescriptor.getCompressionType(); BloomType bloomFilterType = familyDescriptor.getBloomFilterType(); HFileContext hFileContext = new HFileContextBuilder() .withCompression(compression) diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java index 995b979e9fc..a39b3eaae43 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java @@ -1561,8 +1561,8 @@ public class HMaster extends HRegionServer implements MasterServices, Server { private void checkCompression(final HColumnDescriptor hcd) throws IOException { if (!this.masterCheckCompression) return; - CompressionTest.testCompression(hcd.getCompression()); - CompressionTest.testCompression(hcd.getCompactionCompression()); + CompressionTest.testCompression(hcd.getCompressionType()); + CompressionTest.testCompression(hcd.getCompactionCompressionType()); } private void checkEncryption(final Configuration conf, final HTableDescriptor htd) diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFlusher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFlusher.java index 73b8cb9334f..e68d267d4c2 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFlusher.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFlusher.java @@ -64,7 +64,7 @@ public class DefaultStoreFlusher extends StoreFlusher { status.setStatus("Flushing " + store + ": creating writer"); // Write the map out to the disk writer = store.createWriterInTmp( - cellsCount, store.getFamily().getCompression(), false, true, true); + cellsCount, store.getFamily().getCompressionType(), false, true, true); writer.setTimeRangeTracker(snapshot.getTimeRangeTracker()); IOException e = null; try { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java index cf48619687a..cf20e629ba7 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java @@ -6083,8 +6083,8 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi private void checkCompressionCodecs() throws IOException { for (HColumnDescriptor fam: this.htableDescriptor.getColumnFamilies()) { - CompressionTest.testCompression(fam.getCompression()); - CompressionTest.testCompression(fam.getCompactionCompression()); + CompressionTest.testCompression(fam.getCompressionType()); + CompressionTest.testCompression(fam.getCompactionCompressionType()); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFlusher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFlusher.java index 136934c94ac..c87b24654a2 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFlusher.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFlusher.java @@ -109,7 +109,7 @@ public class StripeStoreFlusher extends StoreFlusher { @Override public Writer createWriter() throws IOException { StoreFile.Writer writer = store.createWriterInTmp( - kvCount, store.getFamily().getCompression(), false, true, true); + kvCount, store.getFamily().getCompressionType(), false, true, true); writer.setTimeRangeTracker(tracker); return writer; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java index d1bb65774e2..15ead14159b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java @@ -72,7 +72,7 @@ public abstract class Compactor { this.compactionKVMax = this.conf.getInt(HConstants.COMPACTION_KV_MAX, HConstants.COMPACTION_KV_MAX_DEFAULT); this.compactionCompression = (this.store.getFamily() == null) ? - Compression.Algorithm.NONE : this.store.getFamily().getCompactionCompression(); + Compression.Algorithm.NONE : this.store.getFamily().getCompactionCompressionType(); this.keepSeqIdPeriod = Math.max(this.conf.getInt(HConstants.KEEP_SEQID_PERIOD, HConstants.MIN_KEEP_SEQID_PERIOD), HConstants.MIN_KEEP_SEQID_PERIOD); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactor.java index 10e3cf09c4e..1f6290e9d70 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactor.java @@ -119,7 +119,7 @@ public class StripeCompactor extends Compactor { final boolean needMvcc = fd.maxMVCCReadpoint > 0; - final Compression.Algorithm compression = store.getFamily().getCompactionCompression(); + final Compression.Algorithm compression = store.getFamily().getCompactionCompressionType(); StripeMultiFileWriter.WriterFactory factory = new StripeMultiFileWriter.WriterFactory() { @Override public Writer createWriter() throws IOException { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java index 314b7b289a4..b3c29b774b1 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java @@ -859,7 +859,7 @@ public class TestHFileOutputFormat { "(reader: " + reader + ")", hcd.getBloomFilterType(), BloomType.valueOf(Bytes.toString(bloomFilter))); assertEquals("Incorrect compression used for column family " + familyStr + - "(reader: " + reader + ")", hcd.getCompression(), reader.getFileContext().getCompression()); + "(reader: " + reader + ")", hcd.getCompressionType(), reader.getFileContext().getCompression()); } } finally { dir.getFileSystem(conf).delete(dir, true); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java index 7aa5dc44260..3b066f230fc 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java @@ -862,7 +862,7 @@ public class TestHFileOutputFormat2 { "(reader: " + reader + ")", hcd.getBloomFilterType(), BloomType.valueOf(Bytes.toString(bloomFilter))); assertEquals("Incorrect compression used for column family " + familyStr + - "(reader: " + reader + ")", hcd.getCompression(), reader.getFileContext().getCompression()); + "(reader: " + reader + ")", hcd.getCompressionType(), reader.getFileContext().getCompression()); } } finally { dir.getFileSystem(conf).delete(dir, true); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java index 0517db7067a..3adef9d45b5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java @@ -268,7 +268,7 @@ public class TestStore { init(name.getMethodName(), conf, hcd); // Test createWriterInTmp() - StoreFile.Writer writer = store.createWriterInTmp(4, hcd.getCompression(), false, true, false); + StoreFile.Writer writer = store.createWriterInTmp(4, hcd.getCompressionType(), false, true, false); Path path = writer.getPath(); writer.append(new KeyValue(row, family, qf1, Bytes.toBytes(1))); writer.append(new KeyValue(row, family, qf2, Bytes.toBytes(2))); diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java index 57c237ef646..29db5bed784 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java @@ -89,7 +89,7 @@ public class ThriftUtilities { ColumnDescriptor col = new ColumnDescriptor(); col.name = ByteBuffer.wrap(Bytes.add(in.getName(), KeyValue.COLUMN_FAMILY_DELIM_ARRAY)); col.maxVersions = in.getMaxVersions(); - col.compression = in.getCompression().toString(); + col.compression = in.getCompressionType().toString(); col.inMemory = in.isInMemory(); col.blockCacheEnabled = in.isBlockCacheEnabled(); col.bloomFilterType = in.getBloomFilterType().toString();