From a8ef734dddb656de6210218065e67791686b7bd0 Mon Sep 17 00:00:00 2001 From: Michael Stack Date: Thu, 28 Dec 2017 13:04:41 -0800 Subject: [PATCH] HBASE-19651 Remove LimitInputStream Signed-off-by: Beluga Behr --- .../hbase/shaded/protobuf/ProtobufUtil.java | 4 +- .../hadoop/hbase/io/LimitInputStream.java | 105 ------------------ .../regionserver/wal/ProtobufLogReader.java | 10 +- 3 files changed, 7 insertions(+), 112 deletions(-) delete mode 100644 hbase-common/src/main/java/org/apache/hadoop/hbase/io/LimitInputStream.java diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java index 7a5efb118c8..cc30e53dd9f 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java @@ -38,6 +38,7 @@ import java.util.function.Function; import java.util.regex.Pattern; import java.util.stream.Collectors; +import org.apache.commons.io.input.BoundedInputStream; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.ByteBufferExtendedCell; @@ -87,7 +88,6 @@ import org.apache.hadoop.hbase.client.security.SecurityCapability; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.filter.ByteArrayComparable; import org.apache.hadoop.hbase.filter.Filter; -import org.apache.hadoop.hbase.io.LimitInputStream; import org.apache.hadoop.hbase.io.TimeRange; import org.apache.hadoop.hbase.protobuf.ProtobufMagic; import org.apache.hadoop.hbase.protobuf.ProtobufMessageConverter; @@ -2626,7 +2626,7 @@ public final class ProtobufUtil { final int firstByte = in.read(); if (firstByte != -1) { final int size = CodedInputStream.readRawVarint32(firstByte, in); - final InputStream limitedInput = new LimitInputStream(in, size); + final InputStream limitedInput = new BoundedInputStream(in, size); final CodedInputStream codedInput = CodedInputStream.newInstance(limitedInput); codedInput.setSizeLimit(size); builder.mergeFrom(codedInput); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/LimitInputStream.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/LimitInputStream.java deleted file mode 100644 index 6eb710a13eb..00000000000 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/LimitInputStream.java +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Copyright (C) 2007 The Guava Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package org.apache.hadoop.hbase.io; - -import static org.apache.hbase.thirdparty.com.google.common.base.Preconditions.checkArgument; -import static org.apache.hbase.thirdparty.com.google.common.base.Preconditions.checkNotNull; - -import java.io.FilterInputStream; -import java.io.IOException; -import java.io.InputStream; - -import org.apache.yetus.audience.InterfaceAudience; - -/** - * Copied from guava source code v15 (LimitedInputStream) - * Guava deprecated LimitInputStream in v14 and removed it in v15. Copying this class here - * allows to be compatible with guava 11 to 15+. - */ -@InterfaceAudience.Private -public final class LimitInputStream extends FilterInputStream { - private long left; - private long mark = -1; - - public LimitInputStream(InputStream in, long limit) { - super(in); - checkNotNull(in); - checkArgument(limit >= 0, "limit must be non-negative"); - left = limit; - } - - @Override - public int available() throws IOException { - return (int) Math.min(in.available(), left); - } - - // it's okay to mark even if mark isn't supported, as reset won't work - @Override - public synchronized void mark(int readLimit) { - in.mark(readLimit); - mark = left; - } - - @Override - public int read() throws IOException { - if (left == 0) { - return -1; - } - - int result = in.read(); - if (result != -1) { - --left; - } - return result; - } - - @Override - public int read(byte[] b, int off, int len) throws IOException { - if (left == 0) { - return -1; - } - - len = (int) Math.min(len, left); - int result = in.read(b, off, len); - if (result != -1) { - left -= result; - } - return result; - } - - @Override - public synchronized void reset() throws IOException { - if (!in.markSupported()) { - throw new IOException("Mark not supported"); - } - if (mark == -1) { - throw new IOException("Mark not set"); - } - - in.reset(); - left = mark; - } - - @Override - public long skip(long n) throws IOException { - n = Math.min(n, left); - long skipped = in.skip(n); - left -= skipped; - return skipped; - } -} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java index cba2c61959b..7babb55e437 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java @@ -26,15 +26,12 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; -import org.apache.yetus.audience.InterfaceAudience; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import org.apache.commons.io.input.BoundedInputStream; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.hbase.codec.Codec; -import org.apache.hadoop.hbase.io.LimitInputStream; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos; @@ -43,6 +40,9 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey; import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.wal.WAL.Entry; +import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream; import org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException; @@ -350,7 +350,7 @@ public class ProtobufLogReader extends ReaderBase { "inputStream.available()= " + this.inputStream.available() + ", " + "entry size= " + size + " at offset = " + this.inputStream.getPos()); } - ProtobufUtil.mergeFrom(builder, new LimitInputStream(this.inputStream, size), + ProtobufUtil.mergeFrom(builder, new BoundedInputStream(this.inputStream, size), (int)size); } catch (InvalidProtocolBufferException ipbe) { throw (EOFException) new EOFException("Invalid PB, EOF? Ignoring; originalPosition=" +