HBASE-13864 HColumnDescriptor should parse the output from master and from describe for TTL (Ashu Pachauri)

This commit is contained in:
tedyu 2015-08-01 02:15:02 -07:00
parent f504e4b4ed
commit 4b6598e394
4 changed files with 128 additions and 1 deletions

View File

@ -27,6 +27,7 @@ import java.util.Set;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.exceptions.HBaseException;
import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
@ -716,6 +717,15 @@ public class HColumnDescriptor implements Comparable<HColumnDescriptor> {
return setValue(TTL, Integer.toString(timeToLive));
}
/**
* @param timeToLive Time to live of cell contents, in human readable format
* @see org.apache.hadoop.hbase.util.PrettyPrinter#format(String, Unit)
* @return this (for chained invocation)
*/
public HColumnDescriptor setTimeToLive(String timeToLive) throws HBaseException {
return setValue(TTL, PrettyPrinter.valueOf(timeToLive, Unit.TIME_INTERVAL));
}
/**
* @return The minimum number of versions to keep.
*/

View File

@ -21,6 +21,7 @@ import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.exceptions.HBaseException;
import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
@ -30,6 +31,7 @@ import org.apache.hadoop.hbase.util.PrettyPrinter;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.BuilderStyleTest;
import org.junit.Assert;
import org.junit.Test;
import org.junit.experimental.categories.Category;
@ -136,4 +138,38 @@ public class TestHColumnDescriptor {
BuilderStyleTest.assertClassesAreBuilderStyle(HColumnDescriptor.class);
}
@Test
public void testSetTimeToLive() throws HBaseException {
String ttl;
HColumnDescriptor desc = new HColumnDescriptor("foo");
ttl = "50000";
desc.setTimeToLive(ttl);
Assert.assertEquals(50000, desc.getTimeToLive());
ttl = "50000 seconds";
desc.setTimeToLive(ttl);
Assert.assertEquals(50000, desc.getTimeToLive());
ttl = "";
desc.setTimeToLive(ttl);
Assert.assertEquals(0, desc.getTimeToLive());
ttl = "FOREVER";
desc.setTimeToLive(ttl);
Assert.assertEquals(HConstants.FOREVER, desc.getTimeToLive());
ttl = "1 HOUR 10 minutes 1 second";
desc.setTimeToLive(ttl);
Assert.assertEquals(4201, desc.getTimeToLive());
ttl = "500 Days 23 HOURS";
desc.setTimeToLive(ttl);
Assert.assertEquals(43282800, desc.getTimeToLive());
ttl = "43282800 SECONDS (500 Days 23 hours)";
desc.setTimeToLive(ttl);
Assert.assertEquals(43282800, desc.getTimeToLive());
}
}

View File

@ -19,12 +19,26 @@
package org.apache.hadoop.hbase.util;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.exceptions.HBaseException;
@InterfaceAudience.Private
public class PrettyPrinter {
private static final Log LOG = LogFactory.getLog(PrettyPrinter.class);
private static final String INTERVAL_REGEX = "((\\d+)\\s*SECONDS?\\s*\\()?\\s*" +
"((\\d+)\\s*DAYS?)?\\s*((\\d+)\\s*HOURS?)?\\s*" +
"((\\d+)\\s*MINUTES?)?\\s*((\\d+)\\s*SECONDS?)?\\s*\\)?";
private static final Pattern INTERVAL_PATTERN = Pattern.compile(INTERVAL_REGEX,
Pattern.CASE_INSENSITIVE);
public enum Unit {
TIME_INTERVAL,
LONG,
@ -52,6 +66,25 @@ public class PrettyPrinter {
return human.toString();
}
/**
* Convert a human readable string to its value.
* @see org.apache.hadoop.hbase.util.PrettyPrinter#format(String, Unit)
* @param pretty
* @param unit
* @return the value corresponding to the human readable string
*/
public static String valueOf(final String pretty, final Unit unit) throws HBaseException {
StringBuilder value = new StringBuilder();
switch (unit) {
case TIME_INTERVAL:
value.append(humanReadableIntervalToSec(pretty));
break;
default:
value.append(pretty);
}
return value.toString();
}
@edu.umd.cs.findbugs.annotations.SuppressWarnings(value="ICAST_INTEGER_MULTIPLY_CAST_TO_LONG",
justification="Will not overflow")
private static String humanReadableTTL(final long interval){
@ -107,4 +140,52 @@ public class PrettyPrinter {
return sb.toString();
}
/**
* Convert a human readable time interval to seconds. Examples of the human readable
* time intervals are: 50 DAYS 1 HOUR 30 MINUTES , 25000 SECONDS etc.
* The units of time specified can be in uppercase as well as lowercase. Also, if a
* single number is specified without any time unit, it is assumed to be in seconds.
* @param humanReadableInterval
* @return value in seconds
*/
private static long humanReadableIntervalToSec(final String humanReadableInterval)
throws HBaseException {
if (humanReadableInterval == null || humanReadableInterval.equalsIgnoreCase("FOREVER")) {
return HConstants.FOREVER;
}
try {
return Long.parseLong(humanReadableInterval);
} catch(NumberFormatException ex) {
LOG.debug("Given interval value is not a number, parsing for human readable format");
}
String days = null;
String hours = null;
String minutes = null;
String seconds = null;
String expectedTtl = null;
long ttl;
Matcher matcher = PrettyPrinter.INTERVAL_PATTERN.matcher(humanReadableInterval);
if (matcher.matches()) {
expectedTtl = matcher.group(2);
days = matcher.group(4);
hours = matcher.group(6);
minutes = matcher.group(8);
seconds = matcher.group(10);
}
ttl = 0;
ttl += days != null ? Long.parseLong(days)*HConstants.DAY_IN_SECONDS:0;
ttl += hours != null ? Long.parseLong(hours)*HConstants.HOUR_IN_SECONDS:0;
ttl += minutes != null ? Long.parseLong(minutes)*HConstants.MINUTE_IN_SECONDS:0;
ttl += seconds != null ? Long.parseLong(seconds):0;
if (expectedTtl != null && Long.parseLong(expectedTtl) != ttl) {
throw new HBaseException("Malformed TTL string: TTL values in seconds and human readable" +
"format do not match");
}
return ttl;
}
}

View File

@ -750,7 +750,7 @@ module Hbase
family.setScope(JInteger.valueOf(arg.delete(org.apache.hadoop.hbase.HColumnDescriptor::REPLICATION_SCOPE))) if arg.include?(org.apache.hadoop.hbase.HColumnDescriptor::REPLICATION_SCOPE)
family.setCacheDataOnWrite(JBoolean.valueOf(arg.delete(org.apache.hadoop.hbase.HColumnDescriptor::CACHE_DATA_ON_WRITE))) if arg.include?(org.apache.hadoop.hbase.HColumnDescriptor::CACHE_DATA_ON_WRITE)
family.setInMemory(JBoolean.valueOf(arg.delete(org.apache.hadoop.hbase.HColumnDescriptor::IN_MEMORY))) if arg.include?(org.apache.hadoop.hbase.HColumnDescriptor::IN_MEMORY)
family.setTimeToLive(JInteger.valueOf(arg.delete(org.apache.hadoop.hbase.HColumnDescriptor::TTL))) if arg.include?(org.apache.hadoop.hbase.HColumnDescriptor::TTL)
family.setTimeToLive(arg.delete(org.apache.hadoop.hbase.HColumnDescriptor::TTL)) if arg.include?(org.apache.hadoop.hbase.HColumnDescriptor::TTL)
family.setDataBlockEncoding(org.apache.hadoop.hbase.io.encoding.DataBlockEncoding.valueOf(arg.delete(org.apache.hadoop.hbase.HColumnDescriptor::DATA_BLOCK_ENCODING))) if arg.include?(org.apache.hadoop.hbase.HColumnDescriptor::DATA_BLOCK_ENCODING)
family.setBlocksize(JInteger.valueOf(arg.delete(org.apache.hadoop.hbase.HColumnDescriptor::BLOCKSIZE))) if arg.include?(org.apache.hadoop.hbase.HColumnDescriptor::BLOCKSIZE)
family.setMaxVersions(JInteger.valueOf(arg.delete(org.apache.hadoop.hbase.HColumnDescriptor::VERSIONS))) if arg.include?(org.apache.hadoop.hbase.HColumnDescriptor::VERSIONS)