diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/HFileArchiver.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/HFileArchiver.java
index ed787e42d0f..c23974c047c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/HFileArchiver.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/HFileArchiver.java
@@ -204,7 +204,7 @@ public class HFileArchiver {
* Archive the given files and resolve any conflicts with existing files via appending the time
* archiving started (so all conflicts in the same group have the same timestamp appended).
*
- * If any of the passed files to archive are directories, archives the all files under that
+ * If any of the passed files to archive are directories, archives all the files under that
* directory. Archive directory structure for children is the base archive directory name + the
* parent directory and is built recursively is passed files are directories themselves.
* @param fs {@link FileSystem} on which to archive the files
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/HFileArchiveManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/HFileArchiveManager.java
index dcbc9dfedf3..4b0e3b543b7 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/HFileArchiveManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/HFileArchiveManager.java
@@ -47,7 +47,7 @@ class HFileArchiveManager {
public HFileArchiveManager(HConnection connection, Configuration conf)
throws ZooKeeperConnectionException, IOException {
- this.zooKeeper = new ZooKeeperWatcher(conf, "hfileArchiveManger-on-" + connection.toString(),
+ this.zooKeeper = new ZooKeeperWatcher(conf, "hfileArchiveManager-on-" + connection.toString(),
connection);
this.archiveZnode = ZKTableArchiveClient.getArchiveZNode(this.zooKeeper.getConfiguration(),
this.zooKeeper);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FileLink.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FileLink.java
index 409ec9dd0d2..bfa63fb285f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FileLink.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FileLink.java
@@ -39,7 +39,7 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.util.FSUtils;
/**
- * The FileLink is a sort of hardlink, that allows to access a file given a set of locations.
+ * The FileLink is a sort of hardlink, that allows access to a file given a set of locations.
*
*
The Problem:
*
@@ -49,7 +49,7 @@ import org.apache.hadoop.hbase.util.FSUtils;
*
* -
* HBase store files in one location (e.g. table/region/family/) and when the file is not
- * needed anymore (e.g. compaction, region deletetion, ...) moves it to an archive directory.
+ * needed anymore (e.g. compaction, region deletion, ...) moves it to an archive directory.
*
*
* If we want to create a reference to a file, we need to remember that it can be in its
@@ -83,7 +83,7 @@ import org.apache.hadoop.hbase.util.FSUtils;
* (Back-reference to the link in table-cloned)
*
*
- * /hbase/.archive/table/region-x/.links-file-k/region-z.table-cloned
+ * /hbase/.archive/table/region-x/.links-file-k/region-z.table-2nd-cloned
* (Back-reference to the link in table-2nd-cloned)
*
*
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java
index 3163fa95a1b..2a49f832cd6 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java
@@ -73,7 +73,7 @@ public abstract class CleanerChore extends Chore
protected abstract boolean validate(Path file);
/**
- * Instanitate and initialize all the file cleaners set in the configuration
+ * Instantiate and initialize all the file cleaners set in the configuration
* @param confKey key to get the file cleaner classes from the configuration
*/
private void initCleanerChain(String confKey) {
@@ -129,7 +129,7 @@ public abstract class CleanerChore extends Chore
}
}
} catch (IOException e) {
- LOG.warn("Failed to get status of:" + oldFileDir);
+ LOG.warn("Failed to get status of: " + oldFileDir);
}
}
@@ -184,7 +184,7 @@ public abstract class CleanerChore extends Chore
LOG.warn("Found a wrongly formatted file: " + filePath.getName() + " deleting it.");
boolean success = this.fs.delete(filePath, true);
if(!success)
- LOG.warn("Attempted to delete:" + filePath
+ LOG.warn("Attempted to delete: " + filePath
+ ", but couldn't. Run cleaner chain and attempt to delete on next pass.");
return success;
@@ -205,7 +205,7 @@ public abstract class CleanerChore extends Chore
}
}
// delete this file if it passes all the cleaners
- LOG.debug("Removing:" + filePath + " from archive");
+ LOG.debug("Removing: " + filePath + " from archive");
boolean success = this.fs.delete(filePath, false);
if (!success) {
LOG.warn("Attempted to delete:" + filePath
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/HFileLinkCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/HFileLinkCleaner.java
index 4da1b9b6fe7..19b4f901ea9 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/HFileLinkCleaner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/HFileLinkCleaner.java
@@ -54,7 +54,7 @@ public class HFileLinkCleaner extends BaseHFileCleanerDelegate {
// HFile Link is always deletable
if (HFileLink.isHFileLink(filePath)) return true;
- // If the file is inside a link references directory, means that is a back ref link.
+ // If the file is inside a link references directory, means that it is a back ref link.
// The back ref can be deleted only if the referenced file doesn't exists.
Path parentDir = filePath.getParent();
if (HFileLink.isBackReferencesDir(parentDir)) {