HBASE-19472 Remove ArrayUtil Class

Signed-off-by: Chia-Ping Tsai <chia7712@gmail.com>
This commit is contained in:
BELUGA BEHR 2017-12-13 19:01:08 +08:00 committed by Chia-Ping Tsai
parent 677c1f2c63
commit 536187446d
7 changed files with 23 additions and 156 deletions

View File

@ -33,6 +33,7 @@ import java.util.stream.Collectors;
import java.util.TreeMap;
import java.util.TreeSet;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@ -51,7 +52,6 @@ import org.apache.hadoop.hbase.backup.BackupInfo.BackupState;
import org.apache.hadoop.hbase.backup.BackupRestoreConstants;
import org.apache.hadoop.hbase.backup.BackupType;
import org.apache.hadoop.hbase.backup.util.BackupUtils;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.Delete;
@ -62,12 +62,12 @@ import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.SnapshotDescription;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
import org.apache.hadoop.hbase.util.ArrayUtils;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
/**
* This class provides API to access backup system table<br>

View File

@ -20,12 +20,11 @@ package org.apache.hadoop.hbase.client;
import java.util.Arrays;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.util.ArrayUtils;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;

View File

@ -17,8 +17,8 @@
*/
package org.apache.hadoop.hbase;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.util.ArrayUtils;
@InterfaceAudience.Private
public abstract class ExtendedCellBuilderImpl implements ExtendedCellBuilder {
@ -49,7 +49,7 @@ public abstract class ExtendedCellBuilderImpl implements ExtendedCellBuilder {
@Override
public ExtendedCellBuilder setRow(final byte[] row) {
return setRow(row, 0, ArrayUtils.length(row));
return setRow(row, 0, ArrayUtils.getLength(row));
}
@Override
@ -62,7 +62,7 @@ public abstract class ExtendedCellBuilderImpl implements ExtendedCellBuilder {
@Override
public ExtendedCellBuilder setFamily(final byte[] family) {
return setFamily(family, 0, ArrayUtils.length(family));
return setFamily(family, 0, ArrayUtils.getLength(family));
}
@Override
@ -75,7 +75,7 @@ public abstract class ExtendedCellBuilderImpl implements ExtendedCellBuilder {
@Override
public ExtendedCellBuilder setQualifier(final byte[] qualifier) {
return setQualifier(qualifier, 0, ArrayUtils.length(qualifier));
return setQualifier(qualifier, 0, ArrayUtils.getLength(qualifier));
}
@Override
@ -106,7 +106,7 @@ public abstract class ExtendedCellBuilderImpl implements ExtendedCellBuilder {
@Override
public ExtendedCellBuilder setValue(final byte[] value) {
return setValue(value, 0, ArrayUtils.length(value));
return setValue(value, 0, ArrayUtils.getLength(value));
}
@Override
@ -119,7 +119,7 @@ public abstract class ExtendedCellBuilderImpl implements ExtendedCellBuilder {
@Override
public ExtendedCellBuilder setTags(final byte[] tags) {
return setTags(tags, 0, ArrayUtils.length(tags));
return setTags(tags, 0, ArrayUtils.getLength(tags));
}
@Override

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.hbase;
import org.apache.hadoop.hbase.util.ArrayUtils;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;
import org.apache.yetus.audience.InterfaceAudience;
@ -61,12 +61,12 @@ public class IndividualBytesFieldCell implements ExtendedCell {
public IndividualBytesFieldCell(byte[] row, byte[] family, byte[] qualifier,
long timestamp, KeyValue.Type type, long seqId, byte[] value, byte[] tags) {
this(row, 0, ArrayUtils.length(row),
family, 0, ArrayUtils.length(family),
qualifier, 0, ArrayUtils.length(qualifier),
this(row, 0, ArrayUtils.getLength(row),
family, 0, ArrayUtils.getLength(family),
qualifier, 0, ArrayUtils.getLength(qualifier),
timestamp, type, seqId,
value, 0, ArrayUtils.length(value),
tags, 0, ArrayUtils.length(tags));
value, 0, ArrayUtils.getLength(value),
tags, 0, ArrayUtils.getLength(tags));
}
public IndividualBytesFieldCell(byte[] row, int rOffset, int rLength,

View File

@ -1,133 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.util;
import java.util.ArrayList;
import java.util.Arrays;
import org.apache.yetus.audience.InterfaceAudience;
/**
* A set of array utility functions that return reasonable values in cases where an array is
* allocated or if it is null
*/
@InterfaceAudience.Private
public class ArrayUtils {
public static int length(byte[] a) {
if (a == null) {
return 0;
}
return a.length;
}
public static int length(long[] a) {
if (a == null) {
return 0;
}
return a.length;
}
public static int length(Object[] a) {
if (a == null) {
return 0;
}
return a.length;
}
public static boolean isEmpty(byte[] a) {
if (a == null) {
return true;
}
if (a.length == 0) {
return true;
}
return false;
}
public static boolean isEmpty(long[] a) {
if (a == null) {
return true;
}
if (a.length == 0) {
return true;
}
return false;
}
public static boolean isEmpty(Object[] a) {
if (a == null) {
return true;
}
if (a.length == 0) {
return true;
}
return false;
}
public static long getFirst(long[] a) {
return a[0];
}
public static long getLast(long[] a) {
return a[a.length - 1];
}
public static int getTotalLengthOfArrays(Iterable<byte[]> arrays) {
if (arrays == null) {
return 0;
}
int length = 0;
for (byte[] bytes : arrays) {
length += length(bytes);
}
return length;
}
public static ArrayList<Long> toList(long[] array){
int length = length(array);
ArrayList<Long> list = new ArrayList<>(length);
for(int i=0; i < length; ++i){
list.add(array[i]);
}
return list;
}
public static byte[] growIfNecessary(byte[] array, int minLength, int numAdditionalBytes) {
if(array.length >= minLength){
return array;
}
return Arrays.copyOf(array, minLength + numAdditionalBytes);
}
public static int[] growIfNecessary(int[] array, int minLength, int numAdditionalInts) {
if(array.length >= minLength){
return array;
}
return Arrays.copyOf(array, minLength + numAdditionalInts);
}
public static long[] growIfNecessary(long[] array, int minLength, int numAdditionalLongs) {
if(array.length >= minLength){
return array;
}
return Arrays.copyOf(array, minLength + numAdditionalLongs);
}
}

View File

@ -27,6 +27,8 @@ import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
@ -60,7 +62,6 @@ import org.apache.hadoop.hbase.regionserver.RegionScanner;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.security.UserProvider;
import org.apache.hadoop.hbase.security.token.FsDelegationToken;
import org.apache.hadoop.hbase.util.ArrayUtils;
import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Triple;
@ -107,7 +108,7 @@ public class Export extends ExportProtos.ExportService implements RegionCoproces
static Map<byte[], Response> run(final Configuration conf, final String[] args) throws Throwable {
String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
if (!ExportUtils.isValidArguements(args)) {
ExportUtils.usage("Wrong number of arguments: " + ArrayUtils.length(otherArgs));
ExportUtils.usage("Wrong number of arguments: " + ArrayUtils.getLength(otherArgs));
return null;
}
Triple<TableName, Scan, Path> arguments = ExportUtils.getArgumentsFromCommandLine(conf, otherArgs);

View File

@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.mapreduce;
import java.io.IOException;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
@ -29,13 +29,13 @@ import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.util.ArrayUtils;
import org.apache.hadoop.hbase.util.Triple;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.yetus.audience.InterfaceAudience;
/**
* Export an HBase table.
@ -78,7 +78,7 @@ public class Export extends Configured implements Tool {
@Override
public int run(String[] args) throws Exception {
if (!ExportUtils.isValidArguements(args)) {
ExportUtils.usage("Wrong number of arguments: " + ArrayUtils.length(args));
ExportUtils.usage("Wrong number of arguments: " + ArrayUtils.getLength(args));
System.err.println(" -D " + JOB_NAME_CONF_KEY
+ "=jobName - use the specified mapreduce job name for the export");
System.err.println("For MR performance consider the following properties:");