From 381a151d14e5fe540f9ac9967115255a85d07b3c Mon Sep 17 00:00:00 2001 From: Ramkrishna Date: Thu, 6 Jul 2017 11:20:00 +0530 Subject: [PATCH] HBASE-18002 Investigate why bucket cache filling up in file mode in an exisiting file is slower (Ram) --- .../hadoop/hbase/io/hfile/bucket/BucketCache.java | 7 ++++--- .../hbase/io/hfile/bucket/FileIOEngine.java | 15 ++++++++++++++- .../hbase/io/hfile/bucket/TestFileIOEngine.java | 2 +- 3 files changed, 19 insertions(+), 5 deletions(-) diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java index 3c27f14eff1..489c80578a5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java @@ -227,7 +227,7 @@ public class BucketCache implements BlockCache, HeapSize { public BucketCache(String ioEngineName, long capacity, int blockSize, int[] bucketSizes, int writerThreadNum, int writerQLen, String persistencePath, int ioErrorsTolerationDuration) throws FileNotFoundException, IOException { - this.ioEngine = getIOEngineFromName(ioEngineName, capacity); + this.ioEngine = getIOEngineFromName(ioEngineName, capacity, persistencePath); this.writerThreads = new WriterThread[writerThreadNum]; long blockNumCapacity = capacity / blockSize; if (blockNumCapacity >= Integer.MAX_VALUE) { @@ -309,10 +309,11 @@ public class BucketCache implements BlockCache, HeapSize { * Get the IOEngine from the IO engine name * @param ioEngineName * @param capacity + * @param persistencePath * @return the IOEngine * @throws IOException */ - private IOEngine getIOEngineFromName(String ioEngineName, long capacity) + private IOEngine getIOEngineFromName(String ioEngineName, long capacity, String persistencePath) throws IOException { if (ioEngineName.startsWith("file:") || ioEngineName.startsWith("files:")) { // In order to make the usage simple, we only need the prefix 'files:' in @@ -320,7 +321,7 @@ public class BucketCache implements BlockCache, HeapSize { // the compatibility String[] filePaths = ioEngineName.substring(ioEngineName.indexOf(":") + 1) .split(FileIOEngine.FILE_DELIMITER); - return new FileIOEngine(capacity, filePaths); + return new FileIOEngine(capacity, persistencePath != null, filePaths); } else if (ioEngineName.startsWith("offheap")) { return new ByteBufferIOEngine(capacity, true); } else if (ioEngineName.startsWith("heap")) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java index 7586d5748b1..a847bfe126d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java @@ -52,11 +52,24 @@ public class FileIOEngine implements IOEngine { private FileReadAccessor readAccessor = new FileReadAccessor(); private FileWriteAccessor writeAccessor = new FileWriteAccessor(); - public FileIOEngine(long capacity, String... filePaths) throws IOException { + public FileIOEngine(long capacity, boolean maintainPersistence, String... filePaths) + throws IOException { this.sizePerFile = capacity / filePaths.length; this.capacity = this.sizePerFile * filePaths.length; this.filePaths = filePaths; this.fileChannels = new FileChannel[filePaths.length]; + if (!maintainPersistence) { + for (String filePath : filePaths) { + File file = new File(filePath); + if (file.exists()) { + if (LOG.isDebugEnabled()) { + LOG.debug("File " + filePath + " already exists. Deleting!!"); + } + file.delete(); + // If deletion fails still we can manage with the writes + } + } + } this.rafs = new RandomAccessFile[filePaths.length]; for (int i = 0; i < filePaths.length; i++) { String filePath = filePaths[i]; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java index d1f3dfe3b67..d13022d006e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java @@ -58,7 +58,7 @@ public class TestFileIOEngine { boundaryStopPositions.add(sizePerFile * i + 1); } boundaryStopPositions.add(sizePerFile * filePaths.length - 1); - FileIOEngine fileIOEngine = new FileIOEngine(totalCapacity, filePaths); + FileIOEngine fileIOEngine = new FileIOEngine(totalCapacity, false, filePaths); try { for (int i = 0; i < 500; i++) { int len = (int) Math.floor(Math.random() * 100);