From ad3c964f421de5c3bd86d7032859753e1d589252 Mon Sep 17 00:00:00 2001 From: Michael Stack Date: Fri, 31 Oct 2008 18:16:23 +0000 Subject: [PATCH] HBASE-969 Won't when storefile > 2G. git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@709519 13f79535-47bb-0310-9956-ffa450edef68 --- CHANGES.txt | 1 + src/java/org/apache/hadoop/hbase/io/BlockFSInputStream.java | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGES.txt b/CHANGES.txt index b6d5a71fc0f..183d684d5e4 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -56,6 +56,7 @@ Release 0.19.0 - Unreleased HBASE-973 [doc] In getting started, make it clear that hbase needs to create its directory in hdfs HBASE-963 Fix the retries in HTable.flushCommit + HBASE-969 Won't when storefile > 2G. IMPROVEMENTS HBASE-901 Add a limit to key length, check key and value length on client side diff --git a/src/java/org/apache/hadoop/hbase/io/BlockFSInputStream.java b/src/java/org/apache/hadoop/hbase/io/BlockFSInputStream.java index 1f765151ef4..5684fb1cd56 100644 --- a/src/java/org/apache/hadoop/hbase/io/BlockFSInputStream.java +++ b/src/java/org/apache/hadoop/hbase/io/BlockFSInputStream.java @@ -151,7 +151,7 @@ public class BlockFSInputStream extends FSInputStream { } private synchronized void blockSeekTo(long target) throws IOException { - int targetBlock = (int) (target / blockSize); + long targetBlock = target/blockSize; long targetBlockStart = targetBlock * blockSize; long targetBlockEnd = Math.min(targetBlockStart + blockSize, fileLength) - 1; long blockLength = targetBlockEnd - targetBlockStart + 1;