From 813cd30de5b6d56eb6e5c316088f479ada0b174d Mon Sep 17 00:00:00 2001 From: Michael Stack Date: Fri, 30 Nov 2012 06:27:21 +0000 Subject: [PATCH] HBASE-7232 Remove HbaseMapWritable git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1415507 13f79535-47bb-0310-9956-ffa450edef68 --- .../hbase/protobuf/generated/HFileProtos.java | 647 ++++++++++++++++++ hbase-protocol/src/main/protobuf/HFile.proto | 29 + .../hadoop/hbase/io/HbaseMapWritable.java | 222 ------ .../hadoop/hbase/io/HbaseObjectWritable.java | 2 +- .../hbase/io/hfile/AbstractHFileReader.java | 1 - .../hbase/io/hfile/AbstractHFileWriter.java | 8 +- .../apache/hadoop/hbase/io/hfile/HFile.java | 206 +++++- .../hadoop/hbase/io/hfile/HFileBlock.java | 21 - .../hbase/io/hfile/HFileBlockIndex.java | 2 +- .../hadoop/hbase/io/hfile/HFileReaderV1.java | 2 +- .../hadoop/hbase/io/hfile/HFileReaderV2.java | 6 +- .../hadoop/hbase/io/hfile/HFileWriterV1.java | 2 - .../hadoop/hbase/io/hfile/HFileWriterV2.java | 5 +- .../hbase/io/hfile/InlineBlockWriter.java | 2 +- .../hbase/mapred/TableMapReduceUtil.java | 2 - .../hbase/mapreduce/TableMapReduceUtil.java | 1 - .../hadoop/hbase/protobuf/ProtobufUtil.java | 2 +- .../hbase/util/CompoundBloomFilterWriter.java | 5 +- .../hadoop/hbase/TestSerialization.java | 14 - .../hbase/io/TestHbaseObjectWritable.java | 2 +- .../hbase/io/hfile/TestHFileReaderV1.java | 14 +- .../hbase/io/hfile/TestHFileWriterV2.java | 28 +- 22 files changed, 906 insertions(+), 317 deletions(-) create mode 100644 hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HFileProtos.java create mode 100644 hbase-protocol/src/main/protobuf/HFile.proto delete mode 100644 hbase-server/src/main/java/org/apache/hadoop/hbase/io/HbaseMapWritable.java diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HFileProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HFileProtos.java new file mode 100644 index 00000000000..b5bd3596acb --- /dev/null +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HFileProtos.java @@ -0,0 +1,647 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: HFile.proto + +package org.apache.hadoop.hbase.protobuf.generated; + +public final class HFileProtos { + private HFileProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + } + public interface FileInfoProtoOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated .BytesBytesPair mapEntry = 1; + java.util.List + getMapEntryList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getMapEntry(int index); + int getMapEntryCount(); + java.util.List + getMapEntryOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getMapEntryOrBuilder( + int index); + } + public static final class FileInfoProto extends + com.google.protobuf.GeneratedMessage + implements FileInfoProtoOrBuilder { + // Use FileInfoProto.newBuilder() to construct. + private FileInfoProto(Builder builder) { + super(builder); + } + private FileInfoProto(boolean noInit) {} + + private static final FileInfoProto defaultInstance; + public static FileInfoProto getDefaultInstance() { + return defaultInstance; + } + + public FileInfoProto getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HFileProtos.internal_static_FileInfoProto_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HFileProtos.internal_static_FileInfoProto_fieldAccessorTable; + } + + // repeated .BytesBytesPair mapEntry = 1; + public static final int MAPENTRY_FIELD_NUMBER = 1; + private java.util.List mapEntry_; + public java.util.List getMapEntryList() { + return mapEntry_; + } + public java.util.List + getMapEntryOrBuilderList() { + return mapEntry_; + } + public int getMapEntryCount() { + return mapEntry_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getMapEntry(int index) { + return mapEntry_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getMapEntryOrBuilder( + int index) { + return mapEntry_.get(index); + } + + private void initFields() { + mapEntry_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + for (int i = 0; i < getMapEntryCount(); i++) { + if (!getMapEntry(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < mapEntry_.size(); i++) { + output.writeMessage(1, mapEntry_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < mapEntry_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, mapEntry_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto other = (org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto) obj; + + boolean result = true; + result = result && getMapEntryList() + .equals(other.getMapEntryList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getMapEntryCount() > 0) { + hash = (37 * hash) + MAPENTRY_FIELD_NUMBER; + hash = (53 * hash) + getMapEntryList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProtoOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HFileProtos.internal_static_FileInfoProto_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HFileProtos.internal_static_FileInfoProto_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getMapEntryFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (mapEntryBuilder_ == null) { + mapEntry_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + mapEntryBuilder_.clear(); + } + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto build() { + org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto result = new org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto(this); + int from_bitField0_ = bitField0_; + if (mapEntryBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + mapEntry_ = java.util.Collections.unmodifiableList(mapEntry_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.mapEntry_ = mapEntry_; + } else { + result.mapEntry_ = mapEntryBuilder_.build(); + } + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto.getDefaultInstance()) return this; + if (mapEntryBuilder_ == null) { + if (!other.mapEntry_.isEmpty()) { + if (mapEntry_.isEmpty()) { + mapEntry_ = other.mapEntry_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureMapEntryIsMutable(); + mapEntry_.addAll(other.mapEntry_); + } + onChanged(); + } + } else { + if (!other.mapEntry_.isEmpty()) { + if (mapEntryBuilder_.isEmpty()) { + mapEntryBuilder_.dispose(); + mapEntryBuilder_ = null; + mapEntry_ = other.mapEntry_; + bitField0_ = (bitField0_ & ~0x00000001); + mapEntryBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getMapEntryFieldBuilder() : null; + } else { + mapEntryBuilder_.addAllMessages(other.mapEntry_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + for (int i = 0; i < getMapEntryCount(); i++) { + if (!getMapEntry(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addMapEntry(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // repeated .BytesBytesPair mapEntry = 1; + private java.util.List mapEntry_ = + java.util.Collections.emptyList(); + private void ensureMapEntryIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + mapEntry_ = new java.util.ArrayList(mapEntry_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> mapEntryBuilder_; + + public java.util.List getMapEntryList() { + if (mapEntryBuilder_ == null) { + return java.util.Collections.unmodifiableList(mapEntry_); + } else { + return mapEntryBuilder_.getMessageList(); + } + } + public int getMapEntryCount() { + if (mapEntryBuilder_ == null) { + return mapEntry_.size(); + } else { + return mapEntryBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getMapEntry(int index) { + if (mapEntryBuilder_ == null) { + return mapEntry_.get(index); + } else { + return mapEntryBuilder_.getMessage(index); + } + } + public Builder setMapEntry( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value) { + if (mapEntryBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureMapEntryIsMutable(); + mapEntry_.set(index, value); + onChanged(); + } else { + mapEntryBuilder_.setMessage(index, value); + } + return this; + } + public Builder setMapEntry( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) { + if (mapEntryBuilder_ == null) { + ensureMapEntryIsMutable(); + mapEntry_.set(index, builderForValue.build()); + onChanged(); + } else { + mapEntryBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addMapEntry(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value) { + if (mapEntryBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureMapEntryIsMutable(); + mapEntry_.add(value); + onChanged(); + } else { + mapEntryBuilder_.addMessage(value); + } + return this; + } + public Builder addMapEntry( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value) { + if (mapEntryBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureMapEntryIsMutable(); + mapEntry_.add(index, value); + onChanged(); + } else { + mapEntryBuilder_.addMessage(index, value); + } + return this; + } + public Builder addMapEntry( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) { + if (mapEntryBuilder_ == null) { + ensureMapEntryIsMutable(); + mapEntry_.add(builderForValue.build()); + onChanged(); + } else { + mapEntryBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addMapEntry( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) { + if (mapEntryBuilder_ == null) { + ensureMapEntryIsMutable(); + mapEntry_.add(index, builderForValue.build()); + onChanged(); + } else { + mapEntryBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllMapEntry( + java.lang.Iterable values) { + if (mapEntryBuilder_ == null) { + ensureMapEntryIsMutable(); + super.addAll(values, mapEntry_); + onChanged(); + } else { + mapEntryBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearMapEntry() { + if (mapEntryBuilder_ == null) { + mapEntry_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + mapEntryBuilder_.clear(); + } + return this; + } + public Builder removeMapEntry(int index) { + if (mapEntryBuilder_ == null) { + ensureMapEntryIsMutable(); + mapEntry_.remove(index); + onChanged(); + } else { + mapEntryBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder getMapEntryBuilder( + int index) { + return getMapEntryFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getMapEntryOrBuilder( + int index) { + if (mapEntryBuilder_ == null) { + return mapEntry_.get(index); } else { + return mapEntryBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getMapEntryOrBuilderList() { + if (mapEntryBuilder_ != null) { + return mapEntryBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(mapEntry_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder addMapEntryBuilder() { + return getMapEntryFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder addMapEntryBuilder( + int index) { + return getMapEntryFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.getDefaultInstance()); + } + public java.util.List + getMapEntryBuilderList() { + return getMapEntryFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> + getMapEntryFieldBuilder() { + if (mapEntryBuilder_ == null) { + mapEntryBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder>( + mapEntry_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + mapEntry_ = null; + } + return mapEntryBuilder_; + } + + // @@protoc_insertion_point(builder_scope:FileInfoProto) + } + + static { + defaultInstance = new FileInfoProto(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:FileInfoProto) + } + + private static com.google.protobuf.Descriptors.Descriptor + internal_static_FileInfoProto_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_FileInfoProto_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\013HFile.proto\032\013hbase.proto\"2\n\rFileInfoPr" + + "oto\022!\n\010mapEntry\030\001 \003(\0132\017.BytesBytesPairBA" + + "\n*org.apache.hadoop.hbase.protobuf.gener" + + "atedB\013HFileProtosH\001\210\001\001\240\001\001" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + internal_static_FileInfoProto_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_FileInfoProto_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_FileInfoProto_descriptor, + new java.lang.String[] { "MapEntry", }, + org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto.class, + org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto.Builder.class); + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(), + }, assigner); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/hbase-protocol/src/main/protobuf/HFile.proto b/hbase-protocol/src/main/protobuf/HFile.proto new file mode 100644 index 00000000000..8e9e6fa61a1 --- /dev/null +++ b/hbase-protocol/src/main/protobuf/HFile.proto @@ -0,0 +1,29 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +option java_package = "org.apache.hadoop.hbase.protobuf.generated"; +option java_outer_classname = "HFileProtos"; +option java_generic_services = true; +option java_generate_equals_and_hash = true; +option optimize_for = SPEED; + +import "hbase.proto"; + +// Map of name/values +message FileInfoProto { + repeated BytesBytesPair mapEntry = 1; +} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HbaseMapWritable.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HbaseMapWritable.java deleted file mode 100644 index e72d39740aa..00000000000 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HbaseMapWritable.java +++ /dev/null @@ -1,222 +0,0 @@ -/** - * - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hbase.io; - -import java.io.DataInput; -import java.io.DataOutput; -import java.io.IOException; -import java.util.Collection; -import java.util.Comparator; -import java.util.Map; -import java.util.Set; -import java.util.SortedMap; -import java.util.TreeMap; -import java.util.concurrent.atomic.AtomicReference; - -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.conf.Configurable; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hadoop.io.Writable; -import org.apache.hadoop.util.ReflectionUtils; - -/** - * A Writable Map. - * Like {@link org.apache.hadoop.io.MapWritable} but dumb. It will fail - * if passed a value type that it has not already been told about. Its been - * primed with hbase Writables and byte []. Keys are always byte arrays. - * - * @param key TODO: Parameter K is never used, could be removed. - * @param value Expects a Writable or byte []. - */ -@InterfaceAudience.Private -public class HbaseMapWritable -implements SortedMap, Configurable, Writable, CodeToClassAndBack{ - private AtomicReference conf = null; - protected SortedMap instance = null; - - /** - * The default contructor where a TreeMap is used - **/ - public HbaseMapWritable(){ - this (new TreeMap(Bytes.BYTES_COMPARATOR)); - } - - /** - * Contructor where another SortedMap can be used - * - * @param map the SortedMap to be used - */ - public HbaseMapWritable(SortedMap map){ - conf = new AtomicReference(); - instance = map; - } - - - /** @return the conf */ - public Configuration getConf() { - return conf.get(); - } - - /** @param conf the conf to set */ - public void setConf(Configuration conf) { - this.conf.set(conf); - } - - public void clear() { - instance.clear(); - } - - public boolean containsKey(Object key) { - return instance.containsKey(key); - } - - public boolean containsValue(Object value) { - return instance.containsValue(value); - } - - public Set> entrySet() { - return instance.entrySet(); - } - - public V get(Object key) { - return instance.get(key); - } - - public boolean isEmpty() { - return instance.isEmpty(); - } - - public Set keySet() { - return instance.keySet(); - } - - public int size() { - return instance.size(); - } - - public Collection values() { - return instance.values(); - } - - public void putAll(Map m) { - this.instance.putAll(m); - } - - public V remove(Object key) { - return this.instance.remove(key); - } - - public V put(byte [] key, V value) { - return this.instance.put(key, value); - } - - public Comparator comparator() { - return this.instance.comparator(); - } - - public byte[] firstKey() { - return this.instance.firstKey(); - } - - public SortedMap headMap(byte[] toKey) { - return this.instance.headMap(toKey); - } - - public byte[] lastKey() { - return this.instance.lastKey(); - } - - public SortedMap subMap(byte[] fromKey, byte[] toKey) { - return this.instance.subMap(fromKey, toKey); - } - - public SortedMap tailMap(byte[] fromKey) { - return this.instance.tailMap(fromKey); - } - - // Writable - - /** @return the Class class for the specified id */ - @SuppressWarnings("boxing") - protected Class getClass(byte id) { - return CODE_TO_CLASS.get(id); - } - - /** @return the id for the specified Class */ - @SuppressWarnings("boxing") - protected byte getId(Class clazz) { - Byte b = CLASS_TO_CODE.get(clazz); - if (b == null) { - throw new NullPointerException("Nothing for : " + clazz); - } - return b; - } - - /** - * @see java.lang.Object#toString() - */ - @Override - public String toString() { - return this.instance.toString(); - } - - public void write(DataOutput out) throws IOException { - // Write out the number of entries in the map - out.writeInt(this.instance.size()); - // Then write out each key/value pair - for (Map.Entry e: instance.entrySet()) { - Bytes.writeByteArray(out, e.getKey()); - Byte id = getId(e.getValue().getClass()); - out.writeByte(id); - Object value = e.getValue(); - if (value instanceof byte []) { - Bytes.writeByteArray(out, (byte [])value); - } else { - ((Writable)value).write(out); - } - } - } - - @SuppressWarnings("unchecked") - public void readFields(DataInput in) throws IOException { - // First clear the map. Otherwise we will just accumulate - // entries every time this method is called. - this.instance.clear(); - // Read the number of entries in the map - int entries = in.readInt(); - // Then read each key/value pair - for (int i = 0; i < entries; i++) { - byte [] key = Bytes.readByteArray(in); - byte id = in.readByte(); - Class clazz = getClass(id); - V value = null; - if (clazz.equals(byte [].class)) { - byte [] bytes = Bytes.readByteArray(in); - value = (V)bytes; - } else { - Writable w = (Writable)ReflectionUtils. - newInstance(clazz, getConf()); - w.readFields(in); - value = (V)w; - } - this.instance.put(key, value); - } - } -} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java index 110869a6ad5..2c71139b247 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java @@ -167,7 +167,7 @@ public class HbaseObjectWritable implements Writable, WritableWithSize, Configur addToMap(Text.class, code++); addToMap(Writable.class, code++); addToMap(Writable [].class, code++); - addToMap(HbaseMapWritable.class, code++); + code++; // Removed addToMap(NullInstance.class, code++); // Hbase types diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileReader.java index e96a70f1281..54caffd82e4 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileReader.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileReader.java @@ -20,7 +20,6 @@ package org.apache.hadoop.hbase.io.hfile; import java.io.IOException; import java.nio.ByteBuffer; -import java.util.concurrent.atomic.AtomicLong; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.fs.FSDataInputStream; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileWriter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileWriter.java index 000e11a9b0e..8632328eb51 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileWriter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileWriter.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hbase.io.hfile; -import java.io.DataOutput; +import java.io.DataOutputStream; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; @@ -134,7 +134,7 @@ public abstract class AbstractHFileWriter implements HFile.Writer { */ protected void finishFileInfo() throws IOException { if (lastKeyBuffer != null) { - // Make a copy. The copy is stuffed into HMapWritable. Needs a clean + // Make a copy. The copy is stuffed into our fileinfo map. Needs a clean // byte buffer. Won't take a tuple. fileInfo.append(FileInfo.LASTKEY, Arrays.copyOfRange(lastKeyBuffer, lastKeyOffset, lastKeyOffset + lastKeyLength), false); @@ -175,8 +175,8 @@ public abstract class AbstractHFileWriter implements HFile.Writer { * @param out the data output to write the file info to * @throws IOException */ - protected final void writeFileInfo(FixedFileTrailer trailer, DataOutput out) - throws IOException { + protected final void writeFileInfo(FixedFileTrailer trailer, DataOutputStream out) + throws IOException { trailer.setFileInfoOffset(outputStream.getPos()); finishFileInfo(); fileInfo.write(out); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java index 376dc23ce13..013733731b1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java @@ -18,14 +18,22 @@ */ package org.apache.hadoop.hbase.io.hfile; +import java.io.ByteArrayInputStream; import java.io.Closeable; import java.io.DataInput; +import java.io.DataInputStream; +import java.io.DataOutputStream; import java.io.IOException; +import java.io.SequenceInputStream; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Collection; +import java.util.Comparator; import java.util.List; import java.util.Map; +import java.util.Set; +import java.util.SortedMap; +import java.util.TreeMap; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.BlockingQueue; import java.util.concurrent.atomic.AtomicInteger; @@ -46,9 +54,12 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.KeyComparator; import org.apache.hadoop.hbase.fs.HFileSystem; -import org.apache.hadoop.hbase.io.HbaseMapWritable; import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; +import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos; +import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair; +import org.apache.hadoop.hbase.protobuf.generated.HFileProtos; import org.apache.hadoop.hbase.util.BloomFilterWriter; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ChecksumType; @@ -58,6 +69,7 @@ import org.apache.hadoop.io.Writable; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; +import com.google.protobuf.ByteString; /** * File format for hbase. @@ -145,7 +157,8 @@ public class HFile { /** Minimum supported HFile format version */ public static final int MIN_FORMAT_VERSION = 1; - /** Maximum supported HFile format version */ + /** Maximum supported HFile format version + */ public static final int MAX_FORMAT_VERSION = 2; /** Default compression name: none. */ @@ -282,14 +295,19 @@ public class HFile { /** @return the path to this {@link HFile} */ Path getPath(); - void appendMetaBlock(String bloomFilterMetaKey, Writable metaWriter); - /** * Adds an inline block writer such as a multi-level block index writer or * a compound Bloom filter writer. */ void addInlineBlockWriter(InlineBlockWriter bloomWriter); + // The below three methods take Writables. We'd like to undo Writables but undoing the below would be pretty + // painful. Could take a byte [] or a Message but we want to be backward compatible around hfiles so would need + // to map between Message and Writable or byte [] and current Writable serialization. This would be a bit of work + // to little gain. Thats my thinking at moment. St.Ack 20121129 + + void appendMetaBlock(String bloomFilterMetaKey, Writable metaWriter); + /** * Store general Bloom filter in the file. This does not deal with Bloom filter * internals but is necessary, since Bloom filters are stored differently @@ -642,19 +660,21 @@ public class HFile { DataBlockEncoding.NONE, null); } - /* + /** * Metadata for this file. Conjured by the writer. Read in by the reader. */ - static class FileInfo extends HbaseMapWritable { + static class FileInfo implements SortedMap { static final String RESERVED_PREFIX = "hfile."; static final byte[] RESERVED_PREFIX_BYTES = Bytes.toBytes(RESERVED_PREFIX); static final byte [] LASTKEY = Bytes.toBytes(RESERVED_PREFIX + "LASTKEY"); - static final byte [] AVG_KEY_LEN = - Bytes.toBytes(RESERVED_PREFIX + "AVG_KEY_LEN"); - static final byte [] AVG_VALUE_LEN = - Bytes.toBytes(RESERVED_PREFIX + "AVG_VALUE_LEN"); - static final byte [] COMPARATOR = - Bytes.toBytes(RESERVED_PREFIX + "COMPARATOR"); + static final byte [] AVG_KEY_LEN = Bytes.toBytes(RESERVED_PREFIX + "AVG_KEY_LEN"); + static final byte [] AVG_VALUE_LEN = Bytes.toBytes(RESERVED_PREFIX + "AVG_VALUE_LEN"); + static final byte [] COMPARATOR = Bytes.toBytes(RESERVED_PREFIX + "COMPARATOR"); + private final SortedMap map = new TreeMap(Bytes.BYTES_COMPARATOR); + + public FileInfo() { + super(); + } /** * Append the given key/value pair to the file info, optionally checking the @@ -680,6 +700,167 @@ public class HFile { return this; } + public void clear() { + this.map.clear(); + } + + public Comparator comparator() { + return map.comparator(); + } + + public boolean containsKey(Object key) { + return map.containsKey(key); + } + + public boolean containsValue(Object value) { + return map.containsValue(value); + } + + public Set> entrySet() { + return map.entrySet(); + } + + public boolean equals(Object o) { + return map.equals(o); + } + + public byte[] firstKey() { + return map.firstKey(); + } + + public byte[] get(Object key) { + return map.get(key); + } + + public int hashCode() { + return map.hashCode(); + } + + public SortedMap headMap(byte[] toKey) { + return this.map.headMap(toKey); + } + + public boolean isEmpty() { + return map.isEmpty(); + } + + public Set keySet() { + return map.keySet(); + } + + public byte[] lastKey() { + return map.lastKey(); + } + + public byte[] put(byte[] key, byte[] value) { + return this.map.put(key, value); + } + + public void putAll(Map m) { + this.map.putAll(m); + } + + public byte[] remove(Object key) { + return this.map.remove(key); + } + + public int size() { + return map.size(); + } + + public SortedMap subMap(byte[] fromKey, byte[] toKey) { + return this.map.subMap(fromKey, toKey); + } + + public SortedMap tailMap(byte[] fromKey) { + return this.map.tailMap(fromKey); + } + + public Collection values() { + return map.values(); + } + + /** + * Write out this instance on the passed in out stream. + * We write it as a protobuf. + * @param out + * @throws IOException + * @see {@link #read(DataInputStream)} + */ + void write(final DataOutputStream out) throws IOException { + HFileProtos.FileInfoProto.Builder builder = HFileProtos.FileInfoProto.newBuilder(); + for (Map.Entry e: this.map.entrySet()) { + HBaseProtos.BytesBytesPair.Builder bbpBuilder = HBaseProtos.BytesBytesPair.newBuilder(); + bbpBuilder.setFirst(ByteString.copyFrom(e.getKey())); + bbpBuilder.setSecond(ByteString.copyFrom(e.getValue())); + builder.addMapEntry(bbpBuilder.build()); + } + out.write(ProtobufUtil.PB_MAGIC); + builder.build().writeDelimitedTo(out); + } + + /** + * Populate this instance with what we find on the passed in in stream. + * Can deserialize protobuf of old Writables format. + * @param in + * @throws IOException + * @see {@link #write(DataOutputStream)} + */ + void read(final DataInputStream in) throws IOException { + // This code is tested over in TestHFileReaderV1 where we read an old hfile w/ this new code. + int pblen = ProtobufUtil.lengthOfPBMagic(); + byte [] pbuf = new byte[pblen]; + if (in.markSupported()) in.mark(pblen); + int read = in.read(pbuf); + if (read != pblen) throw new IOException("read=" + read + ", wanted=" + pblen); + if (ProtobufUtil.isPBMagicPrefix(pbuf)) { + parsePB(HFileProtos.FileInfoProto.parseDelimitedFrom(in)); + } else { + if (in.markSupported()) { + in.reset(); + parseWritable(in); + } else { + // We cannot use BufferedInputStream, it consumes more than we read from the underlying IS + ByteArrayInputStream bais = new ByteArrayInputStream(pbuf); + SequenceInputStream sis = new SequenceInputStream(bais, in); // Concatenate input streams + // TODO: Am I leaking anything here wrapping the passed in stream? We are not calling close on the wrapped + // streams but they should be let go after we leave this context? I see that we keep a reference to the + // passed in inputstream but since we no longer have a reference to this after we leave, we should be ok. + parseWritable(new DataInputStream(sis)); + } + } + } + + /** Now parse the old Writable format. It was a list of Map entries. Each map entry was a key and a value of + * a byte []. The old map format had a byte before each entry that held a code which was short for the key or + * value type. We know it was a byte [] so in below we just read and dump it. + * @throws IOException + */ + void parseWritable(final DataInputStream in) throws IOException { + // First clear the map. Otherwise we will just accumulate entries every time this method is called. + this.map.clear(); + // Read the number of entries in the map + int entries = in.readInt(); + // Then read each key/value pair + for (int i = 0; i < entries; i++) { + byte [] key = Bytes.readByteArray(in); + // We used to read a byte that encoded the class type. Read and ignore it because it is always byte [] in hfile + in.readByte(); + byte [] value = Bytes.readByteArray(in); + this.map.put(key, value); + } + } + + /** + * Fill our map with content of the pb we read off disk + * @param fip protobuf message to read + */ + void parsePB(final HFileProtos.FileInfoProto fip) { + this.map.clear(); + for (BytesBytesPair pair: fip.getMapEntryList()) { + this.map.put(pair.getFirst().toByteArray(), pair.getSecond().toByteArray()); + } + } } /** Return true if the given file info key is reserved for internal use. */ @@ -761,5 +942,4 @@ public class HFile { + MAX_FORMAT_VERSION + ")"); } } - } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java index 8a665271cc7..97459b40209 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java @@ -46,9 +46,7 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ChecksumType; import org.apache.hadoop.hbase.util.ClassSize; import org.apache.hadoop.hbase.util.CompoundBloomFilter; -import org.apache.hadoop.hbase.util.Writables; import org.apache.hadoop.io.IOUtils; -import org.apache.hadoop.io.Writable; import com.google.common.base.Preconditions; @@ -370,20 +368,6 @@ public class HFileBlock implements Cacheable { return dupBuf; } - /** - * Deserializes fields of the given writable using the data portion of this - * block. Does not check that all the block data has been read. - */ - void readInto(Writable w) throws IOException { - Preconditions.checkNotNull(w); - - if (Writables.getWritable(buf.array(), buf.arrayOffset() + headerSize(), - buf.limit() - headerSize(), w) == null) { - throw new IOException("Failed to deserialize block " + this + " into a " - + w.getClass().getSimpleName()); - } - } - private void sanityCheckAssertion(long valueFromBuf, long valueFromField, String fieldName) throws IOException { if (valueFromBuf != valueFromField) { @@ -634,9 +618,6 @@ public class HFileBlock implements Cacheable { /** Writer state. Used to ensure the correct usage protocol. */ private State state = State.INIT; - /** Compression algorithm for all blocks this instance writes. */ - private final Compression.Algorithm compressAlgo; - /** Data block encoder used for data blocks */ private final HFileDataBlockEncoder dataBlockEncoder; @@ -720,8 +701,6 @@ public class HFileBlock implements Cacheable { public Writer(Compression.Algorithm compressionAlgorithm, HFileDataBlockEncoder dataBlockEncoder, boolean includesMemstoreTS, ChecksumType checksumType, int bytesPerChecksum) { - compressAlgo = compressionAlgorithm == null ? Compression.Algorithm.NONE : - compressionAlgorithm; this.dataBlockEncoder = dataBlockEncoder != null ? dataBlockEncoder : NoOpDataBlockEncoder.INSTANCE; defaultBlockEncodingCtx = diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java index c7cf8742e64..ab2ef657f77 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java @@ -1127,7 +1127,7 @@ public class HFileBlockIndex { * or null. */ @Override - public boolean cacheOnWrite() { + public boolean getCacheOnWrite() { return blockCache != null; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV1.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV1.java index 629b3461ac6..125dcdc8845 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV1.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV1.java @@ -94,7 +94,7 @@ public class HFileReaderV1 extends AbstractHFileReader { // Read in the fileinfo and get what we need from it. istream.seek(trailer.getFileInfoOffset()); fileInfo = new FileInfo(); - fileInfo.readFields(istream); + fileInfo.read(istream); lastKey = fileInfo.get(FileInfo.LASTKEY); avgKeyLen = Bytes.toInt(fileInfo.get(FileInfo.AVG_KEY_LEN)); avgValueLen = Bytes.toInt(fileInfo.get(FileInfo.AVG_VALUE_LEN)); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV2.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV2.java index 6eecd4265c5..b9a79f96d9b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV2.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV2.java @@ -77,7 +77,9 @@ public class HFileReaderV2 extends AbstractHFileReader { static final int MIN_MINOR_VERSION = 0; /** Maximum minor version supported by this HFile format */ - static final int MAX_MINOR_VERSION = 1; + // We went to version 2 when we moved to pb'ing the fileinfo trailer on the file. This version can read Writables + // version 1 too. + static final int MAX_MINOR_VERSION = 2; /** * Opens a HFile. You must load the index before you can use it by calling @@ -135,7 +137,7 @@ public class HFileReaderV2 extends AbstractHFileReader { // File info fileInfo = new FileInfo(); - fileInfo.readFields(blockIter.nextBlockWithBlockType(BlockType.FILE_INFO).getByteStream()); + fileInfo.read(blockIter.nextBlockWithBlockType(BlockType.FILE_INFO).getByteStream()); lastKey = fileInfo.get(FileInfo.LASTKEY); avgKeyLen = Bytes.toInt(fileInfo.get(FileInfo.AVG_KEY_LEN)); avgValueLen = Bytes.toInt(fileInfo.get(FileInfo.AVG_VALUE_LEN)); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV1.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV1.java index d272255bc21..c61be26f8b6 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV1.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV1.java @@ -81,7 +81,6 @@ public class HFileWriterV1 extends AbstractHFileWriter { // Additional byte array output stream used to fill block cache private ByteArrayOutputStream baos; private DataOutputStream baosDos; - private int blockNumber = 0; static class WriterFactoryV1 extends HFile.WriterFactory { WriterFactoryV1(Configuration conf, CacheConfig cacheConf) { @@ -161,7 +160,6 @@ public class HFileWriterV1 extends AbstractHFileWriter { block.getBlockType()), block); baosDos.close(); } - blockNumber++; } /** diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV2.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV2.java index 2d7b0d8a074..79a1e8742d8 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV2.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV2.java @@ -189,7 +189,7 @@ public class HFileWriterV2 extends AbstractHFileWriter { for (InlineBlockWriter ibw : inlineBlockWriters) { while (ibw.shouldWriteBlock(closing)) { long offset = outputStream.getPos(); - boolean cacheThisBlock = ibw.cacheOnWrite(); + boolean cacheThisBlock = ibw.getCacheOnWrite(); ibw.writeInlineBlock(fsBlockWriter.startWriting( ibw.getInlineBlockType())); fsBlockWriter.writeHeaderAndData(outputStream); @@ -463,5 +463,4 @@ public class HFileWriterV2 extends AbstractHFileWriter { } }); } - -} +} \ No newline at end of file diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/InlineBlockWriter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/InlineBlockWriter.java index 0c0cef9646d..d0c52d0d9ea 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/InlineBlockWriter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/InlineBlockWriter.java @@ -69,5 +69,5 @@ public interface InlineBlockWriter { /** * @return true if inline blocks produced by this writer should be cached */ - boolean cacheOnWrite(); + boolean getCacheOnWrite(); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableMapReduceUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableMapReduceUtil.java index 3c4527785d6..3daf068ea62 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableMapReduceUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableMapReduceUtil.java @@ -27,8 +27,6 @@ import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.mapreduce.MutationSerialization; import org.apache.hadoop.hbase.mapreduce.ResultSerialization; import org.apache.hadoop.hbase.security.User; -import org.apache.hadoop.io.Writable; -import org.apache.hadoop.io.WritableComparable; import org.apache.hadoop.mapred.FileInputFormat; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.InputFormat; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java index fb2fe4e55b5..29c0fb2c0d5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java @@ -32,7 +32,6 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseConfiguration; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java index 41fc4feba9f..8dfa59dd022 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java @@ -168,7 +168,7 @@ public final class ProtobufUtil { * For example, all znode content is protobuf messages with the below magic * for preamble. */ - static final byte [] PB_MAGIC = new byte [] {'P', 'B', 'U', 'F'}; + public static final byte [] PB_MAGIC = new byte [] {'P', 'B', 'U', 'F'}; private static final String PB_MAGIC_STR = Bytes.toString(PB_MAGIC); /** diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompoundBloomFilterWriter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompoundBloomFilterWriter.java index 4ca41ff4746..9f214ee2c28 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompoundBloomFilterWriter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompoundBloomFilterWriter.java @@ -271,8 +271,7 @@ public class CompoundBloomFilterWriter extends CompoundBloomFilterBase } @Override - public boolean cacheOnWrite() { + public boolean getCacheOnWrite() { return cacheOnWrite; } - -} +} \ No newline at end of file diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestSerialization.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestSerialization.java index 71e5d623c08..54942405f90 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestSerialization.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestSerialization.java @@ -36,7 +36,6 @@ import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.filter.PrefixFilter; import org.apache.hadoop.hbase.filter.RowFilter; -import org.apache.hadoop.hbase.io.HbaseMapWritable; import org.apache.hadoop.hbase.io.TimeRange; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos; @@ -78,19 +77,6 @@ public class TestSerialization { assertTrue(KeyValue.COMPARATOR.compare(original, newone) == 0); } - @SuppressWarnings("unchecked") - @Test public void testHbaseMapWritable() throws Exception { - HbaseMapWritable hmw = - new HbaseMapWritable(); - hmw.put("key".getBytes(), "value".getBytes()); - byte [] bytes = Writables.getBytes(hmw); - hmw = (HbaseMapWritable) - Writables.getWritable(bytes, new HbaseMapWritable()); - assertTrue(hmw.size() == 1); - assertTrue(Bytes.equals("value".getBytes(), hmw.get("key".getBytes()))); - } - - @Test public void testTableDescriptor() throws Exception { final String name = "testTableDescriptor"; HTableDescriptor htd = createTableDescriptor(name); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHbaseObjectWritable.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHbaseObjectWritable.java index a58510b20e2..cf77f43c6a2 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHbaseObjectWritable.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHbaseObjectWritable.java @@ -475,7 +475,7 @@ public class TestHbaseObjectWritable extends TestCase { assertEquals(13,HbaseObjectWritable.getClassCode(Text.class).intValue()); assertEquals(14,HbaseObjectWritable.getClassCode(Writable.class).intValue()); assertEquals(15,HbaseObjectWritable.getClassCode(Writable [].class).intValue()); - assertEquals(16,HbaseObjectWritable.getClassCode(HbaseMapWritable.class).intValue()); + // 16 Removed // 17 is NullInstance which isn't visible from here // Hbase types diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileReaderV1.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileReaderV1.java index a36fb546bb7..336426b7341 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileReaderV1.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileReaderV1.java @@ -19,9 +19,11 @@ package org.apache.hadoop.hbase.io.hfile; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + import java.io.IOException; import java.net.URL; -import java.util.Map; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; @@ -30,13 +32,10 @@ import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.util.Bytes; - -import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; - -import static org.junit.Assert.*; +import org.mortbay.log.Log; @Category(SmallTests.class) public class TestHFileReaderV1 { @@ -90,7 +89,4 @@ public class TestHFileReaderV1 { } reader.close(); } - - -} - +} \ No newline at end of file diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV2.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV2.java index 310f66274e9..d94af7ca61c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV2.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV2.java @@ -19,7 +19,9 @@ package org.apache.hadoop.hbase.io.hfile; -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; import java.io.ByteArrayInputStream; import java.io.DataInputStream; @@ -35,11 +37,14 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.*; +import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.compress.Compression.Algorithm; import org.apache.hadoop.hbase.io.hfile.HFile.FileInfo; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.Writables; import org.apache.hadoop.io.RawComparator; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.WritableUtils; @@ -70,14 +75,12 @@ public class TestHFileWriterV2 { @Test public void testHFileFormatV2() throws IOException { - Path hfilePath = new Path(TEST_UTIL.getDataTestDir(), - "testHFileFormatV2"); + Path hfilePath = new Path(TEST_UTIL.getDataTestDir(), "testHFileFormatV2"); final Compression.Algorithm compressAlgo = Compression.Algorithm.GZ; final int entryCount = 10000; writeDataAndReadFromHFile(hfilePath, compressAlgo, entryCount, false); } - - + @Test public void testMidKeyInHFile() throws IOException{ Path hfilePath = new Path(TEST_UTIL.getDataTestDir(), @@ -98,9 +101,6 @@ public class TestHFileWriterV2 { .withComparator(KeyValue.KEY_COMPARATOR) .create(); - long totalKeyLength = 0; - long totalValueLength = 0; - Random rand = new Random(9713312); // Just a fixed seed. List keys = new ArrayList(); @@ -113,9 +113,6 @@ public class TestHFileWriterV2 { byte[] valueBytes = randomValue(rand); writer.append(keyBytes, valueBytes); - totalKeyLength += keyBytes.length; - totalValueLength += valueBytes.length; - keys.add(keyBytes); values.add(valueBytes); } @@ -172,7 +169,7 @@ public class TestHFileWriterV2 { trailer.getMetaIndexCount()); // File info FileInfo fileInfo = new FileInfo(); - fileInfo.readFields(blockIter.nextBlockWithBlockType(BlockType.FILE_INFO).getByteStream()); + fileInfo.read(blockIter.nextBlockWithBlockType(BlockType.FILE_INFO).getByteStream()); byte [] keyValueFormatVersion = fileInfo.get( HFileWriterV2.KEY_VALUE_VERSION); boolean includeMemstoreTS = keyValueFormatVersion != null && @@ -233,7 +230,10 @@ public class TestHFileWriterV2 { HFileBlock block = blockReader.readBlockData(curBlockPos, -1, -1, false); assertEquals(BlockType.META, block.getBlockType()); Text t = new Text(); - block.readInto(t); + ByteBuffer buf = block.getBufferWithoutHeader(); + if (Writables.getWritable(buf.array(), buf.arrayOffset(), buf.limit(), t) == null) { + throw new IOException("Failed to deserialize block " + this + " into a " + t.getClass().getSimpleName()); + } Text expectedText = (metaCounter == 0 ? new Text("Paris") : metaCounter == 1 ? new Text( "Moscow") : new Text("Washington, D.C."));