HBASE-14366 NPE in case visibility expression is not present in labels table during importtsv run. (Bhupendra)
This commit is contained in:
parent
14819ce853
commit
90f0644d9a
|
@ -58,4 +58,6 @@ public final class VisibilityConstants {
|
||||||
public static final String OPEN_PARAN = "(";
|
public static final String OPEN_PARAN = "(";
|
||||||
public static final String CLOSED_PARAN = ")";
|
public static final String CLOSED_PARAN = ")";
|
||||||
|
|
||||||
|
/** Label ordinal value for invalid labels */
|
||||||
|
public static final int NON_EXIST_LABEL_ORDINAL = 0;
|
||||||
}
|
}
|
||||||
|
|
|
@ -39,6 +39,7 @@ import org.apache.hadoop.hbase.client.ResultScanner;
|
||||||
import org.apache.hadoop.hbase.client.Scan;
|
import org.apache.hadoop.hbase.client.Scan;
|
||||||
import org.apache.hadoop.hbase.client.Table;
|
import org.apache.hadoop.hbase.client.Table;
|
||||||
import org.apache.hadoop.hbase.security.visibility.Authorizations;
|
import org.apache.hadoop.hbase.security.visibility.Authorizations;
|
||||||
|
import org.apache.hadoop.hbase.security.visibility.VisibilityConstants;
|
||||||
import org.apache.hadoop.hbase.security.visibility.VisibilityLabelOrdinalProvider;
|
import org.apache.hadoop.hbase.security.visibility.VisibilityLabelOrdinalProvider;
|
||||||
import org.apache.hadoop.hbase.security.visibility.VisibilityUtils;
|
import org.apache.hadoop.hbase.security.visibility.VisibilityUtils;
|
||||||
import org.apache.hadoop.hbase.util.Bytes;
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
|
@ -124,7 +125,12 @@ public class DefaultVisibilityExpressionResolver implements VisibilityExpression
|
||||||
VisibilityLabelOrdinalProvider provider = new VisibilityLabelOrdinalProvider() {
|
VisibilityLabelOrdinalProvider provider = new VisibilityLabelOrdinalProvider() {
|
||||||
@Override
|
@Override
|
||||||
public int getLabelOrdinal(String label) {
|
public int getLabelOrdinal(String label) {
|
||||||
return labels.get(label);
|
Integer ordinal = null;
|
||||||
|
ordinal = labels.get(label);
|
||||||
|
if (ordinal != null) {
|
||||||
|
return ordinal.intValue();
|
||||||
|
}
|
||||||
|
return VisibilityConstants.NON_EXIST_LABEL_ORDINAL;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -24,15 +24,16 @@ import java.util.List;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.TreeSet;
|
import java.util.TreeSet;
|
||||||
|
|
||||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
|
||||||
import org.apache.hadoop.hbase.classification.InterfaceStability;
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.hbase.Cell;
|
import org.apache.hadoop.hbase.Cell;
|
||||||
import org.apache.hadoop.hbase.KeyValue;
|
import org.apache.hadoop.hbase.KeyValue;
|
||||||
import org.apache.hadoop.hbase.KeyValueUtil;
|
import org.apache.hadoop.hbase.KeyValueUtil;
|
||||||
import org.apache.hadoop.hbase.Tag;
|
import org.apache.hadoop.hbase.Tag;
|
||||||
import org.apache.hadoop.hbase.TagType;
|
import org.apache.hadoop.hbase.TagType;
|
||||||
|
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||||
|
import org.apache.hadoop.hbase.classification.InterfaceStability;
|
||||||
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
|
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
|
||||||
|
import org.apache.hadoop.hbase.security.visibility.InvalidLabelException;
|
||||||
import org.apache.hadoop.hbase.util.Base64;
|
import org.apache.hadoop.hbase.util.Base64;
|
||||||
import org.apache.hadoop.hbase.util.Bytes;
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
|
@ -184,21 +185,15 @@ public class TextSortReducer extends
|
||||||
kvs.add(kv);
|
kvs.add(kv);
|
||||||
curSize += kv.heapSize();
|
curSize += kv.heapSize();
|
||||||
}
|
}
|
||||||
} catch (ImportTsv.TsvParser.BadTsvLineException badLine) {
|
} catch (ImportTsv.TsvParser.BadTsvLineException | IllegalArgumentException
|
||||||
|
| InvalidLabelException badLine) {
|
||||||
if (skipBadLines) {
|
if (skipBadLines) {
|
||||||
System.err.println("Bad line." + badLine.getMessage());
|
System.err.println("Bad line." + badLine.getMessage());
|
||||||
incrementBadLineCount(1);
|
incrementBadLineCount(1);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
throw new IOException(badLine);
|
throw new IOException(badLine);
|
||||||
} catch (IllegalArgumentException e) {
|
}
|
||||||
if (skipBadLines) {
|
|
||||||
System.err.println("Bad line." + e.getMessage());
|
|
||||||
incrementBadLineCount(1);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
throw new IOException(e);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
context.setStatus("Read " + kvs.size() + " entries of " + kvs.getClass()
|
context.setStatus("Read " + kvs.size() + " entries of " + kvs.getClass()
|
||||||
+ "(" + StringUtils.humanReadableInt(curSize) + ")");
|
+ "(" + StringUtils.humanReadableInt(curSize) + ")");
|
||||||
|
|
|
@ -21,17 +21,18 @@ import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
|
||||||
import org.apache.hadoop.hbase.classification.InterfaceStability;
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.hbase.Cell;
|
import org.apache.hadoop.hbase.Cell;
|
||||||
import org.apache.hadoop.hbase.KeyValue;
|
import org.apache.hadoop.hbase.KeyValue;
|
||||||
import org.apache.hadoop.hbase.Tag;
|
import org.apache.hadoop.hbase.Tag;
|
||||||
import org.apache.hadoop.hbase.TagType;
|
import org.apache.hadoop.hbase.TagType;
|
||||||
|
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||||
|
import org.apache.hadoop.hbase.classification.InterfaceStability;
|
||||||
import org.apache.hadoop.hbase.client.Put;
|
import org.apache.hadoop.hbase.client.Put;
|
||||||
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
|
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
|
||||||
import org.apache.hadoop.hbase.mapreduce.ImportTsv.TsvParser.BadTsvLineException;
|
import org.apache.hadoop.hbase.mapreduce.ImportTsv.TsvParser.BadTsvLineException;
|
||||||
import org.apache.hadoop.hbase.security.visibility.CellVisibility;
|
import org.apache.hadoop.hbase.security.visibility.CellVisibility;
|
||||||
|
import org.apache.hadoop.hbase.security.visibility.InvalidLabelException;
|
||||||
import org.apache.hadoop.hbase.util.Base64;
|
import org.apache.hadoop.hbase.util.Base64;
|
||||||
import org.apache.hadoop.hbase.util.Bytes;
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
import org.apache.hadoop.io.LongWritable;
|
import org.apache.hadoop.io.LongWritable;
|
||||||
|
@ -165,7 +166,8 @@ extends Mapper<LongWritable, Text, ImmutableBytesWritable, Put>
|
||||||
populatePut(lineBytes, parsed, put, i);
|
populatePut(lineBytes, parsed, put, i);
|
||||||
}
|
}
|
||||||
context.write(rowKey, put);
|
context.write(rowKey, put);
|
||||||
} catch (ImportTsv.TsvParser.BadTsvLineException|IllegalArgumentException badLine) {
|
} catch (ImportTsv.TsvParser.BadTsvLineException | IllegalArgumentException
|
||||||
|
| InvalidLabelException badLine) {
|
||||||
if (logBadLines) {
|
if (logBadLines) {
|
||||||
System.err.println(value);
|
System.err.println(value);
|
||||||
}
|
}
|
||||||
|
|
|
@ -49,7 +49,6 @@ import org.apache.zookeeper.KeeperException;
|
||||||
public class VisibilityLabelsCache implements VisibilityLabelOrdinalProvider {
|
public class VisibilityLabelsCache implements VisibilityLabelOrdinalProvider {
|
||||||
|
|
||||||
private static final Log LOG = LogFactory.getLog(VisibilityLabelsCache.class);
|
private static final Log LOG = LogFactory.getLog(VisibilityLabelsCache.class);
|
||||||
private static final int NON_EXIST_LABEL_ORDINAL = 0;
|
|
||||||
private static final List<String> EMPTY_LIST = Collections.emptyList();
|
private static final List<String> EMPTY_LIST = Collections.emptyList();
|
||||||
private static final Set<Integer> EMPTY_SET = Collections.emptySet();
|
private static final Set<Integer> EMPTY_SET = Collections.emptySet();
|
||||||
private static VisibilityLabelsCache instance;
|
private static VisibilityLabelsCache instance;
|
||||||
|
@ -175,7 +174,7 @@ public class VisibilityLabelsCache implements VisibilityLabelOrdinalProvider {
|
||||||
return ordinal.intValue();
|
return ordinal.intValue();
|
||||||
}
|
}
|
||||||
// 0 denotes not available
|
// 0 denotes not available
|
||||||
return NON_EXIST_LABEL_ORDINAL;
|
return VisibilityConstants.NON_EXIST_LABEL_ORDINAL;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -41,7 +41,6 @@ import org.apache.hadoop.hbase.Cell;
|
||||||
import org.apache.hadoop.hbase.CellUtil;
|
import org.apache.hadoop.hbase.CellUtil;
|
||||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||||
import org.apache.hadoop.hbase.HConstants;
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
|
||||||
import org.apache.hadoop.hbase.TableName;
|
import org.apache.hadoop.hbase.TableName;
|
||||||
import org.apache.hadoop.hbase.client.Admin;
|
import org.apache.hadoop.hbase.client.Admin;
|
||||||
import org.apache.hadoop.hbase.client.Connection;
|
import org.apache.hadoop.hbase.client.Connection;
|
||||||
|
@ -53,6 +52,9 @@ import org.apache.hadoop.hbase.client.Result;
|
||||||
import org.apache.hadoop.hbase.client.ResultScanner;
|
import org.apache.hadoop.hbase.client.ResultScanner;
|
||||||
import org.apache.hadoop.hbase.client.Scan;
|
import org.apache.hadoop.hbase.client.Scan;
|
||||||
import org.apache.hadoop.hbase.client.Table;
|
import org.apache.hadoop.hbase.client.Table;
|
||||||
|
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
|
||||||
|
import org.apache.hadoop.hbase.io.hfile.HFile;
|
||||||
|
import org.apache.hadoop.hbase.io.hfile.HFileScanner;
|
||||||
import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse;
|
import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse;
|
||||||
import org.apache.hadoop.hbase.security.User;
|
import org.apache.hadoop.hbase.security.User;
|
||||||
import org.apache.hadoop.hbase.security.visibility.Authorizations;
|
import org.apache.hadoop.hbase.security.visibility.Authorizations;
|
||||||
|
@ -63,6 +65,7 @@ import org.apache.hadoop.hbase.security.visibility.VisibilityClient;
|
||||||
import org.apache.hadoop.hbase.security.visibility.VisibilityConstants;
|
import org.apache.hadoop.hbase.security.visibility.VisibilityConstants;
|
||||||
import org.apache.hadoop.hbase.security.visibility.VisibilityController;
|
import org.apache.hadoop.hbase.security.visibility.VisibilityController;
|
||||||
import org.apache.hadoop.hbase.security.visibility.VisibilityUtils;
|
import org.apache.hadoop.hbase.security.visibility.VisibilityUtils;
|
||||||
|
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||||
import org.apache.hadoop.hbase.util.Bytes;
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
import org.apache.hadoop.mapred.Utils.OutputFileUtils.OutputFilesFilter;
|
import org.apache.hadoop.mapred.Utils.OutputFileUtils.OutputFilesFilter;
|
||||||
import org.apache.hadoop.util.Tool;
|
import org.apache.hadoop.util.Tool;
|
||||||
|
@ -273,6 +276,51 @@ public class TestImportTSVWithVisibilityLabels implements Configurable {
|
||||||
util.deleteTable(tableName);
|
util.deleteTable(tableName);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testBulkOutputWithInvalidLabels() throws Exception {
|
||||||
|
TableName tableName = TableName.valueOf("test-" + UUID.randomUUID());
|
||||||
|
Path hfiles = new Path(util.getDataTestDirOnTestFS(tableName.getNameAsString()), "hfiles");
|
||||||
|
// Prepare the arguments required for the test.
|
||||||
|
String[] args =
|
||||||
|
new String[] { "-D" + ImportTsv.BULK_OUTPUT_CONF_KEY + "=" + hfiles.toString(),
|
||||||
|
"-D" + ImportTsv.COLUMNS_CONF_KEY + "=HBASE_ROW_KEY,FAM:A,FAM:B,HBASE_CELL_VISIBILITY",
|
||||||
|
"-D" + ImportTsv.SEPARATOR_CONF_KEY + "=\u001b", tableName.getNameAsString() };
|
||||||
|
|
||||||
|
// 2 Data rows, one with valid label and one with invalid label
|
||||||
|
String data =
|
||||||
|
"KEY\u001bVALUE1\u001bVALUE2\u001bprivate\nKEY1\u001bVALUE1\u001bVALUE2\u001binvalid\n";
|
||||||
|
util.createTable(tableName, FAMILY);
|
||||||
|
doMROnTableTest(util, FAMILY, data, args, 1, 2);
|
||||||
|
util.deleteTable(tableName);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testBulkOutputWithTsvImporterTextMapperWithInvalidLabels() throws Exception {
|
||||||
|
TableName tableName = TableName.valueOf("test-" + UUID.randomUUID());
|
||||||
|
Path hfiles = new Path(util.getDataTestDirOnTestFS(tableName.getNameAsString()), "hfiles");
|
||||||
|
// Prepare the arguments required for the test.
|
||||||
|
String[] args =
|
||||||
|
new String[] {
|
||||||
|
"-D" + ImportTsv.MAPPER_CONF_KEY
|
||||||
|
+ "=org.apache.hadoop.hbase.mapreduce.TsvImporterTextMapper",
|
||||||
|
"-D" + ImportTsv.BULK_OUTPUT_CONF_KEY + "=" + hfiles.toString(),
|
||||||
|
"-D" + ImportTsv.COLUMNS_CONF_KEY + "=HBASE_ROW_KEY,FAM:A,FAM:B,HBASE_CELL_VISIBILITY",
|
||||||
|
"-D" + ImportTsv.SEPARATOR_CONF_KEY + "=\u001b", tableName.getNameAsString() };
|
||||||
|
|
||||||
|
// 2 Data rows, one with valid label and one with invalid label
|
||||||
|
String data =
|
||||||
|
"KEY\u001bVALUE1\u001bVALUE2\u001bprivate\nKEY1\u001bVALUE1\u001bVALUE2\u001binvalid\n";
|
||||||
|
util.createTable(tableName, FAMILY);
|
||||||
|
doMROnTableTest(util, FAMILY, data, args, 1, 2);
|
||||||
|
util.deleteTable(tableName);
|
||||||
|
}
|
||||||
|
|
||||||
|
protected static Tool doMROnTableTest(HBaseTestingUtility util, String family, String data,
|
||||||
|
String[] args, int valueMultiplier) throws Exception {
|
||||||
|
return doMROnTableTest(util, family, data, args, valueMultiplier, -1);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Run an ImportTsv job and perform basic validation on the results. Returns
|
* Run an ImportTsv job and perform basic validation on the results. Returns
|
||||||
* the ImportTsv <code>Tool</code> instance so that other tests can inspect it
|
* the ImportTsv <code>Tool</code> instance so that other tests can inspect it
|
||||||
|
@ -281,10 +329,12 @@ public class TestImportTSVWithVisibilityLabels implements Configurable {
|
||||||
*
|
*
|
||||||
* @param args
|
* @param args
|
||||||
* Any arguments to pass BEFORE inputFile path is appended.
|
* Any arguments to pass BEFORE inputFile path is appended.
|
||||||
|
* @param expectedKVCount Expected KV count. pass -1 to skip the kvcount check
|
||||||
|
*
|
||||||
* @return The Tool instance used to run the test.
|
* @return The Tool instance used to run the test.
|
||||||
*/
|
*/
|
||||||
protected static Tool doMROnTableTest(HBaseTestingUtility util, String family, String data,
|
protected static Tool doMROnTableTest(HBaseTestingUtility util, String family, String data,
|
||||||
String[] args, int valueMultiplier) throws Exception {
|
String[] args, int valueMultiplier, int expectedKVCount) throws Exception {
|
||||||
TableName table = TableName.valueOf(args[args.length - 1]);
|
TableName table = TableName.valueOf(args[args.length - 1]);
|
||||||
Configuration conf = new Configuration(util.getConfiguration());
|
Configuration conf = new Configuration(util.getConfiguration());
|
||||||
|
|
||||||
|
@ -327,7 +377,7 @@ public class TestImportTSVWithVisibilityLabels implements Configurable {
|
||||||
}
|
}
|
||||||
LOG.debug("validating the table " + createdHFiles);
|
LOG.debug("validating the table " + createdHFiles);
|
||||||
if (createdHFiles)
|
if (createdHFiles)
|
||||||
validateHFiles(fs, outputPath, family);
|
validateHFiles(fs, outputPath, family, expectedKVCount);
|
||||||
else
|
else
|
||||||
validateTable(conf, table, family, valueMultiplier);
|
validateTable(conf, table, family, valueMultiplier);
|
||||||
|
|
||||||
|
@ -341,14 +391,15 @@ public class TestImportTSVWithVisibilityLabels implements Configurable {
|
||||||
/**
|
/**
|
||||||
* Confirm ImportTsv via HFiles on fs.
|
* Confirm ImportTsv via HFiles on fs.
|
||||||
*/
|
*/
|
||||||
private static void validateHFiles(FileSystem fs, String outputPath, String family)
|
private static void validateHFiles(FileSystem fs, String outputPath, String family,
|
||||||
throws IOException {
|
int expectedKVCount) throws IOException {
|
||||||
|
|
||||||
// validate number and content of output columns
|
// validate number and content of output columns
|
||||||
LOG.debug("Validating HFiles.");
|
LOG.debug("Validating HFiles.");
|
||||||
Set<String> configFamilies = new HashSet<String>();
|
Set<String> configFamilies = new HashSet<String>();
|
||||||
configFamilies.add(family);
|
configFamilies.add(family);
|
||||||
Set<String> foundFamilies = new HashSet<String>();
|
Set<String> foundFamilies = new HashSet<String>();
|
||||||
|
int actualKVCount = 0;
|
||||||
for (FileStatus cfStatus : fs.listStatus(new Path(outputPath), new OutputFilesFilter())) {
|
for (FileStatus cfStatus : fs.listStatus(new Path(outputPath), new OutputFilesFilter())) {
|
||||||
LOG.debug("The output path has files");
|
LOG.debug("The output path has files");
|
||||||
String[] elements = cfStatus.getPath().toString().split(Path.SEPARATOR);
|
String[] elements = cfStatus.getPath().toString().split(Path.SEPARATOR);
|
||||||
|
@ -360,8 +411,37 @@ public class TestImportTSVWithVisibilityLabels implements Configurable {
|
||||||
for (FileStatus hfile : fs.listStatus(cfStatus.getPath())) {
|
for (FileStatus hfile : fs.listStatus(cfStatus.getPath())) {
|
||||||
assertTrue(String.format("HFile %s appears to contain no data.", hfile.getPath()),
|
assertTrue(String.format("HFile %s appears to contain no data.", hfile.getPath()),
|
||||||
hfile.getLen() > 0);
|
hfile.getLen() > 0);
|
||||||
|
if (expectedKVCount > -1) {
|
||||||
|
actualKVCount += getKVCountFromHfile(fs, hfile.getPath());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if (expectedKVCount > -1) {
|
||||||
|
assertTrue(String.format(
|
||||||
|
"KV count in output hfile=<%d> doesn't match with expected KV count=<%d>", actualKVCount,
|
||||||
|
expectedKVCount), actualKVCount == expectedKVCount);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Method returns the total KVs in given hfile
|
||||||
|
* @param fs File System
|
||||||
|
* @param p HFile path
|
||||||
|
* @return KV count in the given hfile
|
||||||
|
* @throws IOException
|
||||||
|
*/
|
||||||
|
private static int getKVCountFromHfile(FileSystem fs, Path p) throws IOException {
|
||||||
|
Configuration conf = util.getConfiguration();
|
||||||
|
HFile.Reader reader = HFile.createReader(fs, p, new CacheConfig(conf), conf);
|
||||||
|
reader.loadFileInfo();
|
||||||
|
HFileScanner scanner = reader.getScanner(false, false);
|
||||||
|
scanner.seekTo();
|
||||||
|
int count = 0;
|
||||||
|
do {
|
||||||
|
count++;
|
||||||
|
} while (scanner.next());
|
||||||
|
reader.close();
|
||||||
|
return count;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
Loading…
Reference in New Issue