diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index 11b4979df58..fa46641f0b6 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -12,6 +12,9 @@ Release 2.0.1-alpha - UNRELEASED HDFS-3042. Automatic failover support for NameNode HA (todd) (see dedicated section below for breakdown of subtasks) + HADOOP-8135. Add ByteBufferReadable interface to FSDataInputStream. (Henry + Robinson via atm) + IMPROVEMENTS HADOOP-8340. SNAPSHOT build versions should compare as less than their eventual diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ByteBufferReadable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ByteBufferReadable.java new file mode 100644 index 00000000000..f47269c9c97 --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ByteBufferReadable.java @@ -0,0 +1,45 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.fs; + +import java.io.IOException; +import java.nio.ByteBuffer; + +/** + * Implementers of this interface provide a read API that writes to a + * ByteBuffer, not a byte[]. + */ +public interface ByteBufferReadable { + /** + * Reads up to buf.remaining() bytes into buf. Callers should use + * buf.limit(..) to control the size of the desired read. + * + * After the call, buf.position() should be unchanged, and therefore any data + * can be immediately read from buf. + * + * Many implementations will throw {@link UnsupportedOperationException}, so + * callers that are not confident in support for this method from the + * underlying filesystem should be prepared to handle that exception. + * + * @param buf + * the ByteBuffer to receive the results of the read operation + * @return the number of bytes available to read from buf + * @throws IOException if there is some error performing the read + */ + public int read(ByteBuffer buf) throws IOException; +} diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataInputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataInputStream.java index b93506c638d..3b14cc77e1f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataInputStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataInputStream.java @@ -18,6 +18,7 @@ package org.apache.hadoop.fs; import java.io.*; +import java.nio.ByteBuffer; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; @@ -27,7 +28,7 @@ import org.apache.hadoop.classification.InterfaceStability; @InterfaceAudience.Public @InterfaceStability.Stable public class FSDataInputStream extends DataInputStream - implements Seekable, PositionedReadable, Closeable { + implements Seekable, PositionedReadable, Closeable, ByteBufferReadable { public FSDataInputStream(InputStream in) throws IOException { @@ -116,4 +117,12 @@ public class FSDataInputStream extends DataInputStream public InputStream getWrappedStream() { return in; } + + public int read(ByteBuffer buf) throws IOException { + if (in instanceof ByteBufferReadable) { + return ((ByteBufferReadable)in).read(buf); + } + + throw new UnsupportedOperationException("Byte-buffer read unsupported by input stream"); + } }