HBASE-11118 non environment variable solution for "IllegalAccessError: class com.google.protobuf.ZeroCopyLiteralByteString cannot access its superclass com.google.protobuf.LiteralByteString". -Addendum

This commit is contained in:
anoopsjohn 2014-07-12 22:02:26 +05:30
parent 5a408af65f
commit f5e13c7460
4 changed files with 7 additions and 10 deletions

View File

@ -137,7 +137,6 @@ import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.Lists;
import com.google.protobuf.ByteString;
import com.google.protobuf.HBaseZeroCopyByteString;
import com.google.protobuf.InvalidProtocolBufferException;
import com.google.protobuf.Message;
import com.google.protobuf.Parser;
@ -2496,7 +2495,7 @@ public final class ProtobufUtil {
for (Path outputPath : outputPaths) {
builder.addCompactionOutput(outputPath.getName());
}
builder.setRegionName(HBaseZeroCopyByteString.wrap(info.getRegionName()));
builder.setRegionName(ByteStringer.wrap(info.getRegionName()));
return builder.build();
}

View File

@ -152,6 +152,7 @@ import org.apache.hadoop.hbase.regionserver.wal.HLog;
import org.apache.hadoop.hbase.regionserver.wal.HLogKey;
import org.apache.hadoop.hbase.regionserver.wal.HLogSplitter;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Counter;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
@ -161,7 +162,6 @@ import org.apache.hadoop.net.DNS;
import org.apache.zookeeper.KeeperException;
import com.google.protobuf.ByteString;
import com.google.protobuf.HBaseZeroCopyByteString;
import com.google.protobuf.Message;
import com.google.protobuf.RpcController;
import com.google.protobuf.ServiceException;
@ -1443,7 +1443,7 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
RollWALWriterResponse.Builder builder = RollWALWriterResponse.newBuilder();
if (regionsToFlush != null) {
for (byte[] region: regionsToFlush) {
builder.addRegionToFlush(HBaseZeroCopyByteString.wrap(region));
builder.addRegionToFlush(ByteStringer.wrap(region));
}
}
return builder.build();

View File

@ -40,11 +40,10 @@ import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptio
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
import org.apache.hadoop.hbase.regionserver.HRegionFileSystem;
import org.apache.hadoop.hbase.regionserver.StoreFileInfo;
import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.FSUtils;
import com.google.protobuf.HBaseZeroCopyByteString;
/**
* DO NOT USE DIRECTLY. USE {@link SnapshotManifest}.
*
@ -185,7 +184,7 @@ public class SnapshotManifestV1 {
// 2.1. build the snapshot reference for the store
SnapshotRegionManifest.FamilyFiles.Builder family =
SnapshotRegionManifest.FamilyFiles.newBuilder();
family.setFamilyName(HBaseZeroCopyByteString.wrap(Bytes.toBytes(familyName)));
family.setFamilyName(ByteStringer.wrap(Bytes.toBytes(familyName)));
if (LOG.isDebugEnabled()) {
LOG.debug("Adding snapshot references for " + storeFiles + " hfiles");

View File

@ -41,10 +41,9 @@ import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
import org.apache.hadoop.hbase.regionserver.StoreFileInfo;
import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.hbase.util.FSUtils;
import com.google.protobuf.HBaseZeroCopyByteString;
/**
* DO NOT USE DIRECTLY. USE {@link SnapshotManifest}.
*
@ -93,7 +92,7 @@ public class SnapshotManifestV2 {
final SnapshotRegionManifest.Builder region, final byte[] familyName) {
SnapshotRegionManifest.FamilyFiles.Builder family =
SnapshotRegionManifest.FamilyFiles.newBuilder();
family.setFamilyName(HBaseZeroCopyByteString.wrap(familyName));
family.setFamilyName(ByteStringer.wrap(familyName));
return family;
}