Revert "Deprecate old methods"

Revert commit of HBASE-13655 Deprecate duplicate getCompression methods in HColumnDescriptor
I committed with bad commit message.

This reverts commit 5732bdb483.
This commit is contained in:
stack 2015-05-10 22:48:09 -07:00
parent 2ad4114149
commit 17b6f59a98
13 changed files with 29 additions and 40 deletions

View File

@ -453,26 +453,23 @@ public class HColumnDescriptor implements Comparable<HColumnDescriptor> {
return this; return this;
} }
/** /** @return compression type being used for the column family */
* @return compression type being used for the column family
* @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0
* (<a href="https://issues.apache.org/jira/browse/HBASE-13655">HBASE-13655</a>).
* Use {@link #getCompressionType()}.
*/
@Deprecated
public Compression.Algorithm getCompression() { public Compression.Algorithm getCompression() {
return getCompressionType(); String n = getValue(COMPRESSION);
if (n == null) {
return Compression.Algorithm.NONE;
}
return Compression.Algorithm.valueOf(n.toUpperCase());
} }
/** /** @return compression type being used for the column family for major
* @return compression type being used for the column family for major compaction compression */
* @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0
* (<a href="https://issues.apache.org/jira/browse/HBASE-13655">HBASE-13655</a>).
* Use {@link #getCompactionCompressionType()}.
*/
@Deprecated
public Compression.Algorithm getCompactionCompression() { public Compression.Algorithm getCompactionCompression() {
return getCompactionCompressionType(); String n = getValue(COMPRESSION_COMPACT);
if (n == null) {
return getCompression();
}
return Compression.Algorithm.valueOf(n.toUpperCase());
} }
/** @return maximum number of versions */ /** @return maximum number of versions */
@ -532,11 +529,7 @@ public class HColumnDescriptor implements Comparable<HColumnDescriptor> {
* @return Compression type setting. * @return Compression type setting.
*/ */
public Compression.Algorithm getCompressionType() { public Compression.Algorithm getCompressionType() {
String n = getValue(COMPRESSION); return getCompression();
if (n == null) {
return Compression.Algorithm.NONE;
}
return Compression.Algorithm.valueOf(n.toUpperCase());
} }
/** /**
@ -606,11 +599,7 @@ public class HColumnDescriptor implements Comparable<HColumnDescriptor> {
* @return Compression type setting. * @return Compression type setting.
*/ */
public Compression.Algorithm getCompactionCompressionType() { public Compression.Algorithm getCompactionCompressionType() {
String n = getValue(COMPRESSION_COMPACT); return getCompactionCompression();
if (n == null) {
return getCompressionType();
}
return Compression.Algorithm.valueOf(n.toUpperCase());
} }
/** /**

View File

@ -627,7 +627,7 @@ public class HFileOutputFormat2
familyDescriptor.getNameAsString(), "UTF-8")); familyDescriptor.getNameAsString(), "UTF-8"));
compressionConfigValue.append('='); compressionConfigValue.append('=');
compressionConfigValue.append(URLEncoder.encode( compressionConfigValue.append(URLEncoder.encode(
familyDescriptor.getCompressionType().getName(), "UTF-8")); familyDescriptor.getCompression().getName(), "UTF-8"));
} }
// Get rid of the last ampersand // Get rid of the last ampersand
conf.set(COMPRESSION_FAMILIES_CONF_KEY, compressionConfigValue.toString()); conf.set(COMPRESSION_FAMILIES_CONF_KEY, compressionConfigValue.toString());

View File

@ -805,7 +805,7 @@ public class LoadIncrementalHFiles extends Configured implements Tool {
Map<byte[], byte[]> fileInfo = halfReader.loadFileInfo(); Map<byte[], byte[]> fileInfo = halfReader.loadFileInfo();
int blocksize = familyDescriptor.getBlocksize(); int blocksize = familyDescriptor.getBlocksize();
Algorithm compression = familyDescriptor.getCompressionType(); Algorithm compression = familyDescriptor.getCompression();
BloomType bloomFilterType = familyDescriptor.getBloomFilterType(); BloomType bloomFilterType = familyDescriptor.getBloomFilterType();
HFileContext hFileContext = new HFileContextBuilder() HFileContext hFileContext = new HFileContextBuilder()
.withCompression(compression) .withCompression(compression)

View File

@ -1561,8 +1561,8 @@ public class HMaster extends HRegionServer implements MasterServices, Server {
private void checkCompression(final HColumnDescriptor hcd) private void checkCompression(final HColumnDescriptor hcd)
throws IOException { throws IOException {
if (!this.masterCheckCompression) return; if (!this.masterCheckCompression) return;
CompressionTest.testCompression(hcd.getCompressionType()); CompressionTest.testCompression(hcd.getCompression());
CompressionTest.testCompression(hcd.getCompactionCompressionType()); CompressionTest.testCompression(hcd.getCompactionCompression());
} }
private void checkEncryption(final Configuration conf, final HTableDescriptor htd) private void checkEncryption(final Configuration conf, final HTableDescriptor htd)

View File

@ -64,7 +64,7 @@ public class DefaultStoreFlusher extends StoreFlusher {
status.setStatus("Flushing " + store + ": creating writer"); status.setStatus("Flushing " + store + ": creating writer");
// Write the map out to the disk // Write the map out to the disk
writer = store.createWriterInTmp( writer = store.createWriterInTmp(
cellsCount, store.getFamily().getCompressionType(), false, true, true); cellsCount, store.getFamily().getCompression(), false, true, true);
writer.setTimeRangeTracker(snapshot.getTimeRangeTracker()); writer.setTimeRangeTracker(snapshot.getTimeRangeTracker());
IOException e = null; IOException e = null;
try { try {

View File

@ -6083,8 +6083,8 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
private void checkCompressionCodecs() throws IOException { private void checkCompressionCodecs() throws IOException {
for (HColumnDescriptor fam: this.htableDescriptor.getColumnFamilies()) { for (HColumnDescriptor fam: this.htableDescriptor.getColumnFamilies()) {
CompressionTest.testCompression(fam.getCompressionType()); CompressionTest.testCompression(fam.getCompression());
CompressionTest.testCompression(fam.getCompactionCompressionType()); CompressionTest.testCompression(fam.getCompactionCompression());
} }
} }

View File

@ -109,7 +109,7 @@ public class StripeStoreFlusher extends StoreFlusher {
@Override @Override
public Writer createWriter() throws IOException { public Writer createWriter() throws IOException {
StoreFile.Writer writer = store.createWriterInTmp( StoreFile.Writer writer = store.createWriterInTmp(
kvCount, store.getFamily().getCompressionType(), false, true, true); kvCount, store.getFamily().getCompression(), false, true, true);
writer.setTimeRangeTracker(tracker); writer.setTimeRangeTracker(tracker);
return writer; return writer;
} }

View File

@ -72,7 +72,7 @@ public abstract class Compactor {
this.compactionKVMax = this.compactionKVMax =
this.conf.getInt(HConstants.COMPACTION_KV_MAX, HConstants.COMPACTION_KV_MAX_DEFAULT); this.conf.getInt(HConstants.COMPACTION_KV_MAX, HConstants.COMPACTION_KV_MAX_DEFAULT);
this.compactionCompression = (this.store.getFamily() == null) ? this.compactionCompression = (this.store.getFamily() == null) ?
Compression.Algorithm.NONE : this.store.getFamily().getCompactionCompressionType(); Compression.Algorithm.NONE : this.store.getFamily().getCompactionCompression();
this.keepSeqIdPeriod = Math.max(this.conf.getInt(HConstants.KEEP_SEQID_PERIOD, this.keepSeqIdPeriod = Math.max(this.conf.getInt(HConstants.KEEP_SEQID_PERIOD,
HConstants.MIN_KEEP_SEQID_PERIOD), HConstants.MIN_KEEP_SEQID_PERIOD); HConstants.MIN_KEEP_SEQID_PERIOD), HConstants.MIN_KEEP_SEQID_PERIOD);
} }

View File

@ -119,7 +119,7 @@ public class StripeCompactor extends Compactor {
final boolean needMvcc = fd.maxMVCCReadpoint > 0; final boolean needMvcc = fd.maxMVCCReadpoint > 0;
final Compression.Algorithm compression = store.getFamily().getCompactionCompressionType(); final Compression.Algorithm compression = store.getFamily().getCompactionCompression();
StripeMultiFileWriter.WriterFactory factory = new StripeMultiFileWriter.WriterFactory() { StripeMultiFileWriter.WriterFactory factory = new StripeMultiFileWriter.WriterFactory() {
@Override @Override
public Writer createWriter() throws IOException { public Writer createWriter() throws IOException {

View File

@ -859,7 +859,7 @@ public class TestHFileOutputFormat {
"(reader: " + reader + ")", "(reader: " + reader + ")",
hcd.getBloomFilterType(), BloomType.valueOf(Bytes.toString(bloomFilter))); hcd.getBloomFilterType(), BloomType.valueOf(Bytes.toString(bloomFilter)));
assertEquals("Incorrect compression used for column family " + familyStr + assertEquals("Incorrect compression used for column family " + familyStr +
"(reader: " + reader + ")", hcd.getCompressionType(), reader.getFileContext().getCompression()); "(reader: " + reader + ")", hcd.getCompression(), reader.getFileContext().getCompression());
} }
} finally { } finally {
dir.getFileSystem(conf).delete(dir, true); dir.getFileSystem(conf).delete(dir, true);

View File

@ -862,7 +862,7 @@ public class TestHFileOutputFormat2 {
"(reader: " + reader + ")", "(reader: " + reader + ")",
hcd.getBloomFilterType(), BloomType.valueOf(Bytes.toString(bloomFilter))); hcd.getBloomFilterType(), BloomType.valueOf(Bytes.toString(bloomFilter)));
assertEquals("Incorrect compression used for column family " + familyStr + assertEquals("Incorrect compression used for column family " + familyStr +
"(reader: " + reader + ")", hcd.getCompressionType(), reader.getFileContext().getCompression()); "(reader: " + reader + ")", hcd.getCompression(), reader.getFileContext().getCompression());
} }
} finally { } finally {
dir.getFileSystem(conf).delete(dir, true); dir.getFileSystem(conf).delete(dir, true);

View File

@ -268,7 +268,7 @@ public class TestStore {
init(name.getMethodName(), conf, hcd); init(name.getMethodName(), conf, hcd);
// Test createWriterInTmp() // Test createWriterInTmp()
StoreFile.Writer writer = store.createWriterInTmp(4, hcd.getCompressionType(), false, true, false); StoreFile.Writer writer = store.createWriterInTmp(4, hcd.getCompression(), false, true, false);
Path path = writer.getPath(); Path path = writer.getPath();
writer.append(new KeyValue(row, family, qf1, Bytes.toBytes(1))); writer.append(new KeyValue(row, family, qf1, Bytes.toBytes(1)));
writer.append(new KeyValue(row, family, qf2, Bytes.toBytes(2))); writer.append(new KeyValue(row, family, qf2, Bytes.toBytes(2)));

View File

@ -89,7 +89,7 @@ public class ThriftUtilities {
ColumnDescriptor col = new ColumnDescriptor(); ColumnDescriptor col = new ColumnDescriptor();
col.name = ByteBuffer.wrap(Bytes.add(in.getName(), KeyValue.COLUMN_FAMILY_DELIM_ARRAY)); col.name = ByteBuffer.wrap(Bytes.add(in.getName(), KeyValue.COLUMN_FAMILY_DELIM_ARRAY));
col.maxVersions = in.getMaxVersions(); col.maxVersions = in.getMaxVersions();
col.compression = in.getCompressionType().toString(); col.compression = in.getCompression().toString();
col.inMemory = in.isInMemory(); col.inMemory = in.isInMemory();
col.blockCacheEnabled = in.isBlockCacheEnabled(); col.blockCacheEnabled = in.isBlockCacheEnabled();
col.bloomFilterType = in.getBloomFilterType().toString(); col.bloomFilterType = in.getBloomFilterType().toString();