HBASE-15789 PB related changes to work with offheap

Adds means of patching our shaded protobuf. Does it using
the Anoop patch attached to HBASE-15789 that adds ByteInput
to protobuf. This patch gets applied after protobuf has been
downloaded, relocated, and then unpacked over src/main/java.

Also fixes a few small build WARNINGs because of duplicate
mentions of dependencies.
This commit is contained in:
Michael Stack 2016-10-14 17:59:04 -07:00
parent 76e7c05474
commit fb15e3d4f5
7 changed files with 1627 additions and 12 deletions

View File

@ -161,10 +161,6 @@
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-server</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-thrift</artifactId>
</dependency>
<!-- To dump tools in hbase-procedure into cached_classpath.txt. -->
<dependency>
<groupId>org.apache.hbase</groupId>

View File

@ -16,6 +16,10 @@ protobuf Message class is at
org.apache.hadoop.hbase.shaded.com.google.protobuf.Message
rather than at com.google.protobuf.Message.
Finally, this module also includes patches applied on top of
protobuf to add functionality not yet in protobuf that we
need now.
Below we describe how to generate the java files for this
module. Run this step any time you change the proto files
in this module or if you change the protobuf version. If you
@ -44,5 +48,9 @@ profile. When finished, the content of
src/main/java/org/apache/hadoop/hbase/shaded will have
been updated. Check in the changes.
If you have patches for the protobuf, add them to
src/main/patches directory. They will be applied after
protobuf is shaded and unbundled into src/main/java.
See the pom.xml under the generate-shaded-classes profile
for more info on how this step works.

View File

@ -191,7 +191,7 @@
<!--When the compile for this profile runs, make sure it makes jars that
can be related back to this shading profile. Give them a shading prefix.
-->
<jar.finalName>${profile.id}.${artifactId}-${project.version}</jar.finalName>
<jar.finalName>${profile.id}.${project.artifactId}-${project.version}</jar.finalName>
</properties>
<build>
<plugins>
@ -337,9 +337,32 @@
</execution>
</executions>
</plugin>
<!--Patch the files here!!!
Use maven-patch-plugin
-->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-patch-plugin</artifactId>
<version>1.2</version>
<configuration>
<!--Patches are made at top-level-->
<targetDirectory>${basedir}/..</targetDirectory>
<skipApplication>false</skipApplication>
</configuration>
<executions>
<execution>
<id>patch</id>
<configuration>
<strip>1</strip>
<patchDirectory>src/main/patches</patchDirectory>
<patchTrackingFile>${project.build.directory}/patches-applied.txt</patchTrackingFile>
<naturalOrderProcessing>true</naturalOrderProcessing>
</configuration>
<phase>package</phase>
<goals>
<!--This should run after the above unpack phase-->
<goal>apply</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>

View File

@ -0,0 +1,81 @@
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// https://developers.google.com/protocol-buffers/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
import java.io.IOException;
import java.nio.ByteBuffer;
/**
* An input for raw bytes. This is similar to an InputStream but it is offset addressable. All the
* read APIs are relative.
*/
@ExperimentalApi
public abstract class ByteInput {
/**
* Reads a single byte from the given offset.
* @param offset The offset from where byte to be read
* @return The byte of data at given offset
*/
public abstract byte read(int offset);
/**
* Reads bytes of data from the given offset into an array of bytes.
* @param offset The src offset within this ByteInput from where data to be read.
* @param out Destination byte array to read data into.
* @return The number of bytes read from ByteInput
*/
public int read(int offset, byte b[]) throws IOException {
return read(offset, b, 0, b.length);
}
/**
* Reads up to <code>len</code> bytes of data from the given offset into an array of bytes.
* @param offset The src offset within this ByteInput from where data to be read.
* @param out Destination byte array to read data into.
* @param outOffset Offset within the the out byte[] where data to be read into.
* @param len The number of bytes to read.
* @return The number of bytes read from ByteInput
*/
public abstract int read(int offset, byte[] out, int outOffset, int len);
/**
* Reads bytes of data from the given offset into given {@link ByteBuffer}.
* @param offset he src offset within this ByteInput from where data to be read.
* @param out Destination {@link ByteBuffer} to read data into.
* @return The number of bytes read from ByteInput
*/
public abstract int read(int offset, ByteBuffer out);
/**
* @return Total number of bytes in this ByteInput.
*/
public abstract int size();
}

View File

@ -0,0 +1,249 @@
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// https://developers.google.com/protocol-buffers/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
import java.io.IOException;
import java.io.InputStream;
import java.io.InvalidObjectException;
import java.io.ObjectInputStream;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.nio.charset.Charset;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
/**
* A {@link ByteString} that wraps around a {@link ByteInput}.
*/
final class ByteInputByteString extends ByteString.LeafByteString {
private final ByteInput buffer;
private final int offset, length;
ByteInputByteString(ByteInput buffer, int offset, int length) {
if (buffer == null) {
throw new NullPointerException("buffer");
}
this.buffer = buffer;
this.offset = offset;
this.length = length;
}
// =================================================================
// Serializable
/**
* Magic method that lets us override serialization behavior.
*/
private Object writeReplace() {
return ByteString.wrap(toByteArray());
}
/**
* Magic method that lets us override deserialization behavior.
*/
private void readObject(@SuppressWarnings("unused") ObjectInputStream in) throws IOException {
throw new InvalidObjectException("ByteInputByteString instances are not to be serialized directly");// TODO check here
}
// =================================================================
@Override
public byte byteAt(int index) {
return buffer.read(getAbsoluteOffset(index));
}
private int getAbsoluteOffset(int relativeOffset) {
return this.offset + relativeOffset;
}
@Override
public int size() {
return length;
}
@Override
public ByteString substring(int beginIndex, int endIndex) {
if (beginIndex < 0 || beginIndex >= size() || endIndex < beginIndex || endIndex >= size()) {
throw new IllegalArgumentException(
String.format("Invalid indices [%d, %d]", beginIndex, endIndex));
}
return new ByteInputByteString(this.buffer, getAbsoluteOffset(beginIndex), endIndex - beginIndex);
}
@Override
protected void copyToInternal(
byte[] target, int sourceOffset, int targetOffset, int numberToCopy) {
this.buffer.read(getAbsoluteOffset(sourceOffset), target, targetOffset, numberToCopy);
}
@Override
public void copyTo(ByteBuffer target) {
this.buffer.read(this.offset, target);
}
@Override
public void writeTo(OutputStream out) throws IOException {
out.write(toByteArray());// TODO
}
@Override
boolean equalsRange(ByteString other, int offset, int length) {
return substring(0, length).equals(other.substring(offset, offset + length));
}
@Override
void writeToInternal(OutputStream out, int sourceOffset, int numberToWrite) throws IOException {
byte[] buf = ByteBufferWriter.getOrCreateBuffer(numberToWrite);
this.buffer.read(getAbsoluteOffset(sourceOffset), buf, 0, numberToWrite);
out.write(buf, 0, numberToWrite);
}
@Override
void writeTo(ByteOutput output) throws IOException {
output.writeLazy(toByteArray(), 0, length);
}
@Override
public ByteBuffer asReadOnlyByteBuffer() {
return ByteBuffer.wrap(toByteArray()).asReadOnlyBuffer();
}
@Override
public List<ByteBuffer> asReadOnlyByteBufferList() {
return Collections.singletonList(asReadOnlyByteBuffer());
}
@Override
protected String toStringInternal(Charset charset) {
byte[] bytes = toByteArray();
return new String(bytes, 0, bytes.length, charset);
}
@Override
public boolean isValidUtf8() {
return Utf8.isValidUtf8(buffer, offset, offset + length);
}
@Override
protected int partialIsValidUtf8(int state, int offset, int length) {
int off = getAbsoluteOffset(offset);
return Utf8.partialIsValidUtf8(state, buffer, off, off + length);
}
@Override
public boolean equals(Object other) {
if (other == this) {
return true;
}
if (!(other instanceof ByteString)) {
return false;
}
ByteString otherString = ((ByteString) other);
if (size() != otherString.size()) {
return false;
}
if (size() == 0) {
return true;
}
if (other instanceof RopeByteString) {
return other.equals(this);
}
return Arrays.equals(this.toByteArray(), otherString.toByteArray());
}
@Override
protected int partialHash(int h, int offset, int length) {
offset = getAbsoluteOffset(offset);
int end = offset + length;
for (int i = offset; i < end; i++) {
h = h * 31 + buffer.read(i);
}
return h;
}
@Override
public InputStream newInput() {
return new InputStream() {
private final ByteInput buf = buffer;
private int pos = offset;
private int limit = pos + length;
private int mark = pos;
@Override
public void mark(int readlimit) {
this.mark = readlimit;
}
@Override
public boolean markSupported() {
return true;
}
@Override
public void reset() throws IOException {
this.pos = this.mark;
}
@Override
public int available() throws IOException {
return this.limit - this.pos;
}
@Override
public int read() throws IOException {
if (available() <= 0) {
return -1;
}
return this.buf.read(pos++) & 0xFF;
}
@Override
public int read(byte[] bytes, int off, int len) throws IOException {
int remain = available();
if (remain <= 0) {
return -1;
}
len = Math.min(len, remain);
buf.read(pos, bytes, off, len);
pos += len;
return len;
}
};
}
@Override
public CodedInputStream newCodedInput() {
// We trust CodedInputStream not to modify the bytes, or to give anyone
// else access to them.
return CodedInputStream.newInstance(buffer, offset, length, true);
}
}

File diff suppressed because it is too large Load Diff

View File

@ -393,10 +393,6 @@
<artifactId>hbase-resource-bundle</artifactId>
<version>${project.version}</version>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId>
</dependency>
<dependency>
<groupId>commons-codec</groupId>