HADOOP-6169. Removing deprecated method calls in TFile. (hong tang via mahadev)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@800199 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Mahadev Konar 2009-08-03 05:06:59 +00:00
parent e5ea0ce4a6
commit 7be6fc4e5d
10 changed files with 24 additions and 22 deletions

View File

@ -487,7 +487,9 @@ Trunk (unreleased changes)
HADOOP-6160. Fix releaseaudit target to run on specific directories. HADOOP-6160. Fix releaseaudit target to run on specific directories.
(gkesavan) (gkesavan)
HADOOP-6169. Removing deprecated method calls in TFile. (hong tang via mahadev)
OPTIMIZATIONS OPTIMIZATIONS
HADOOP-5595. NameNode does not need to run a replicator to choose a HADOOP-5595. NameNode does not need to run a replicator to choose a

View File

@ -125,7 +125,7 @@ final class BCFile {
fsOutputBuffer.setCapacity(TFile.getFSOutputBufferSize(conf)); fsOutputBuffer.setCapacity(TFile.getFSOutputBufferSize(conf));
this.fsBufferedOutput = this.fsBufferedOutput =
new SimpleBufferedOutputStream(this.fsOut, fsOutputBuffer.get()); new SimpleBufferedOutputStream(this.fsOut, fsOutputBuffer.getBytes());
this.compressor = compressAlgo.getCompressor(); this.compressor = compressAlgo.getCompressor();
try { try {

View File

@ -116,12 +116,12 @@ class BoundedRangeFileInputStream extends InputStream {
} }
@Override @Override
public void mark(int readlimit) { public synchronized void mark(int readlimit) {
mark = pos; mark = pos;
} }
@Override @Override
public void reset() throws IOException { public synchronized void reset() throws IOException {
if (mark < 0) throw new IOException("Resetting to invalid mark"); if (mark < 0) throw new IOException("Resetting to invalid mark");
pos = mark; pos = mark;
} }

View File

@ -34,7 +34,7 @@ public final class ByteArray implements RawComparable {
* @param other * @param other
*/ */
public ByteArray(BytesWritable other) { public ByteArray(BytesWritable other) {
this(other.get(), 0, other.getSize()); this(other.getBytes(), 0, other.getLength());
} }
/** /**

View File

@ -16,6 +16,7 @@
*/ */
package org.apache.hadoop.io.file.tfile; package org.apache.hadoop.io.file.tfile;
import java.io.Serializable;
import java.util.Comparator; import java.util.Comparator;
import org.apache.hadoop.io.RawComparator; import org.apache.hadoop.io.RawComparator;
@ -72,7 +73,7 @@ class CompareUtils {
} }
} }
public static final class ScalarComparator implements Comparator<Scalar> { public static final class ScalarComparator implements Comparator<Scalar>, Serializable {
@Override @Override
public int compare(Scalar o1, Scalar o2) { public int compare(Scalar o1, Scalar o2) {
long diff = o1.magnitude() - o2.magnitude(); long diff = o1.magnitude() - o2.magnitude();
@ -83,7 +84,7 @@ class CompareUtils {
} }
public static final class MemcmpRawComparator implements public static final class MemcmpRawComparator implements
RawComparator<Object> { RawComparator<Object>, Serializable {
@Override @Override
public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) { public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
return WritableComparator.compareBytes(b1, s1, l1, b2, s2, l2); return WritableComparator.compareBytes(b1, s1, l1, b2, s2, l2);

View File

@ -325,10 +325,7 @@ public class TFile {
outIndex.close(); outIndex.close();
} }
if (writerBCF != null) { writerBCF.close();
writerBCF.close();
writerBCF = null;
}
} }
} finally { } finally {
IOUtils.cleanup(LOG, blkAppender, writerBCF); IOUtils.cleanup(LOG, blkAppender, writerBCF);
@ -1583,8 +1580,8 @@ public class TFile {
*/ */
public int getKey(BytesWritable key) throws IOException { public int getKey(BytesWritable key) throws IOException {
key.setSize(getKeyLength()); key.setSize(getKeyLength());
getKey(key.get()); getKey(key.getBytes());
return key.getSize(); return key.getLength();
} }
/** /**
@ -1603,10 +1600,10 @@ public class TFile {
int remain; int remain;
while ((remain = valueBufferInputStream.getRemain()) > 0) { while ((remain = valueBufferInputStream.getRemain()) > 0) {
value.setSize(size + remain); value.setSize(size + remain);
dis.readFully(value.get(), size, remain); dis.readFully(value.getBytes(), size, remain);
size += remain; size += remain;
} }
return value.getSize(); return value.getLength();
} finally { } finally {
dis.close(); dis.close();
} }
@ -1645,8 +1642,8 @@ public class TFile {
while ((chunkSize = valueBufferInputStream.getRemain()) > 0) { while ((chunkSize = valueBufferInputStream.getRemain()) > 0) {
chunkSize = Math.min(chunkSize, MAX_VAL_TRANSFER_BUF_SIZE); chunkSize = Math.min(chunkSize, MAX_VAL_TRANSFER_BUF_SIZE);
valTransferBuffer.setSize(chunkSize); valTransferBuffer.setSize(chunkSize);
dis.readFully(valTransferBuffer.get(), 0, chunkSize); dis.readFully(valTransferBuffer.getBytes(), 0, chunkSize);
out.write(valTransferBuffer.get(), 0, chunkSize); out.write(valTransferBuffer.getBytes(), 0, chunkSize);
size += chunkSize; size += chunkSize;
} }
return size; return size;

View File

@ -353,6 +353,7 @@ public final class Utils {
/** /**
* Return a string representation of the version. * Return a string representation of the version.
*/ */
@Override
public String toString() { public String toString() {
return new StringBuilder("v").append(major).append(".").append(minor) return new StringBuilder("v").append(major).append(".").append(minor)
.toString(); .toString();

View File

@ -42,9 +42,9 @@ public class TestTFile extends TestCase {
System.getProperty("test.build.data", "/tmp/tfile-test"); System.getProperty("test.build.data", "/tmp/tfile-test");
private FileSystem fs; private FileSystem fs;
private Configuration conf; private Configuration conf;
private final int minBlockSize = 512; private static final int minBlockSize = 512;
private final int largeVal = 3 * 1024 * 1024; private static final int largeVal = 3 * 1024 * 1024;
private static String localFormatter = "%010d"; private static final String localFormatter = "%010d";
@Override @Override
public void setUp() throws IOException { public void setUp() throws IOException {

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.io.file.tfile; package org.apache.hadoop.io.file.tfile;
import java.io.IOException; import java.io.IOException;
import java.io.Serializable;
import org.apache.hadoop.io.RawComparator; import org.apache.hadoop.io.RawComparator;
import org.apache.hadoop.io.WritableComparator; import org.apache.hadoop.io.WritableComparator;
@ -42,7 +43,7 @@ public class TestTFileJClassComparatorByteArrays extends TestTFileByteArrays {
} }
} }
class MyComparator implements RawComparator<byte[]> { class MyComparator implements RawComparator<byte[]>, Serializable {
@Override @Override
public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) { public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {

View File

@ -140,7 +140,7 @@ public class TestVLong extends TestCase {
for (int i = 0; i < data.length; ++i) { for (int i = 0; i < data.length; ++i) {
int shift = rng.nextInt(Long.SIZE) + 1; int shift = rng.nextInt(Long.SIZE) + 1;
long mask = (1L << shift) - 1; long mask = (1L << shift) - 1;
long a = rng.nextInt() << 32; long a = ((long) rng.nextInt()) << 32;
long b = ((long) rng.nextInt()) & 0xffffffff; long b = ((long) rng.nextInt()) & 0xffffffff;
data[i] = (a + b) & mask; data[i] = (a + b) & mask;
} }