HADOOP-6169. Removing deprecated method calls in TFile. (hong tang via mahadev)
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@800199 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
e5ea0ce4a6
commit
7be6fc4e5d
|
@ -487,7 +487,9 @@ Trunk (unreleased changes)
|
|||
|
||||
HADOOP-6160. Fix releaseaudit target to run on specific directories.
|
||||
(gkesavan)
|
||||
|
||||
|
||||
HADOOP-6169. Removing deprecated method calls in TFile. (hong tang via mahadev)
|
||||
|
||||
OPTIMIZATIONS
|
||||
|
||||
HADOOP-5595. NameNode does not need to run a replicator to choose a
|
||||
|
|
|
@ -125,7 +125,7 @@ final class BCFile {
|
|||
fsOutputBuffer.setCapacity(TFile.getFSOutputBufferSize(conf));
|
||||
|
||||
this.fsBufferedOutput =
|
||||
new SimpleBufferedOutputStream(this.fsOut, fsOutputBuffer.get());
|
||||
new SimpleBufferedOutputStream(this.fsOut, fsOutputBuffer.getBytes());
|
||||
this.compressor = compressAlgo.getCompressor();
|
||||
|
||||
try {
|
||||
|
|
|
@ -116,12 +116,12 @@ class BoundedRangeFileInputStream extends InputStream {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void mark(int readlimit) {
|
||||
public synchronized void mark(int readlimit) {
|
||||
mark = pos;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void reset() throws IOException {
|
||||
public synchronized void reset() throws IOException {
|
||||
if (mark < 0) throw new IOException("Resetting to invalid mark");
|
||||
pos = mark;
|
||||
}
|
||||
|
|
|
@ -34,7 +34,7 @@ public final class ByteArray implements RawComparable {
|
|||
* @param other
|
||||
*/
|
||||
public ByteArray(BytesWritable other) {
|
||||
this(other.get(), 0, other.getSize());
|
||||
this(other.getBytes(), 0, other.getLength());
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -16,6 +16,7 @@
|
|||
*/
|
||||
package org.apache.hadoop.io.file.tfile;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Comparator;
|
||||
|
||||
import org.apache.hadoop.io.RawComparator;
|
||||
|
@ -72,7 +73,7 @@ class CompareUtils {
|
|||
}
|
||||
}
|
||||
|
||||
public static final class ScalarComparator implements Comparator<Scalar> {
|
||||
public static final class ScalarComparator implements Comparator<Scalar>, Serializable {
|
||||
@Override
|
||||
public int compare(Scalar o1, Scalar o2) {
|
||||
long diff = o1.magnitude() - o2.magnitude();
|
||||
|
@ -83,7 +84,7 @@ class CompareUtils {
|
|||
}
|
||||
|
||||
public static final class MemcmpRawComparator implements
|
||||
RawComparator<Object> {
|
||||
RawComparator<Object>, Serializable {
|
||||
@Override
|
||||
public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
|
||||
return WritableComparator.compareBytes(b1, s1, l1, b2, s2, l2);
|
||||
|
|
|
@ -325,10 +325,7 @@ public class TFile {
|
|||
outIndex.close();
|
||||
}
|
||||
|
||||
if (writerBCF != null) {
|
||||
writerBCF.close();
|
||||
writerBCF = null;
|
||||
}
|
||||
writerBCF.close();
|
||||
}
|
||||
} finally {
|
||||
IOUtils.cleanup(LOG, blkAppender, writerBCF);
|
||||
|
@ -1583,8 +1580,8 @@ public class TFile {
|
|||
*/
|
||||
public int getKey(BytesWritable key) throws IOException {
|
||||
key.setSize(getKeyLength());
|
||||
getKey(key.get());
|
||||
return key.getSize();
|
||||
getKey(key.getBytes());
|
||||
return key.getLength();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -1603,10 +1600,10 @@ public class TFile {
|
|||
int remain;
|
||||
while ((remain = valueBufferInputStream.getRemain()) > 0) {
|
||||
value.setSize(size + remain);
|
||||
dis.readFully(value.get(), size, remain);
|
||||
dis.readFully(value.getBytes(), size, remain);
|
||||
size += remain;
|
||||
}
|
||||
return value.getSize();
|
||||
return value.getLength();
|
||||
} finally {
|
||||
dis.close();
|
||||
}
|
||||
|
@ -1645,8 +1642,8 @@ public class TFile {
|
|||
while ((chunkSize = valueBufferInputStream.getRemain()) > 0) {
|
||||
chunkSize = Math.min(chunkSize, MAX_VAL_TRANSFER_BUF_SIZE);
|
||||
valTransferBuffer.setSize(chunkSize);
|
||||
dis.readFully(valTransferBuffer.get(), 0, chunkSize);
|
||||
out.write(valTransferBuffer.get(), 0, chunkSize);
|
||||
dis.readFully(valTransferBuffer.getBytes(), 0, chunkSize);
|
||||
out.write(valTransferBuffer.getBytes(), 0, chunkSize);
|
||||
size += chunkSize;
|
||||
}
|
||||
return size;
|
||||
|
|
|
@ -353,6 +353,7 @@ public final class Utils {
|
|||
/**
|
||||
* Return a string representation of the version.
|
||||
*/
|
||||
@Override
|
||||
public String toString() {
|
||||
return new StringBuilder("v").append(major).append(".").append(minor)
|
||||
.toString();
|
||||
|
|
|
@ -42,9 +42,9 @@ public class TestTFile extends TestCase {
|
|||
System.getProperty("test.build.data", "/tmp/tfile-test");
|
||||
private FileSystem fs;
|
||||
private Configuration conf;
|
||||
private final int minBlockSize = 512;
|
||||
private final int largeVal = 3 * 1024 * 1024;
|
||||
private static String localFormatter = "%010d";
|
||||
private static final int minBlockSize = 512;
|
||||
private static final int largeVal = 3 * 1024 * 1024;
|
||||
private static final String localFormatter = "%010d";
|
||||
|
||||
@Override
|
||||
public void setUp() throws IOException {
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
package org.apache.hadoop.io.file.tfile;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.Serializable;
|
||||
|
||||
import org.apache.hadoop.io.RawComparator;
|
||||
import org.apache.hadoop.io.WritableComparator;
|
||||
|
@ -42,7 +43,7 @@ public class TestTFileJClassComparatorByteArrays extends TestTFileByteArrays {
|
|||
}
|
||||
}
|
||||
|
||||
class MyComparator implements RawComparator<byte[]> {
|
||||
class MyComparator implements RawComparator<byte[]>, Serializable {
|
||||
|
||||
@Override
|
||||
public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
|
||||
|
|
|
@ -140,7 +140,7 @@ public class TestVLong extends TestCase {
|
|||
for (int i = 0; i < data.length; ++i) {
|
||||
int shift = rng.nextInt(Long.SIZE) + 1;
|
||||
long mask = (1L << shift) - 1;
|
||||
long a = rng.nextInt() << 32;
|
||||
long a = ((long) rng.nextInt()) << 32;
|
||||
long b = ((long) rng.nextInt()) & 0xffffffff;
|
||||
data[i] = (a + b) & mask;
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue