diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java index d719796c481..5cd714ba5c7 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java @@ -1165,6 +1165,17 @@ public abstract class FSUtils extends CommonFSUtils { } } + /** + * Called every so-often by storefile map builder getTableStoreFilePathMap to + * report progress. + */ + interface ProgressReporter { + /** + * @param status File or directory we are about to process. + */ + void progress(FileStatus status); + } + /** * Runs through the HBase rootdir/tablename and creates a reverse lookup map for * table StoreFile names to the full Path. @@ -1184,7 +1195,8 @@ public abstract class FSUtils extends CommonFSUtils { public static Map getTableStoreFilePathMap(Map map, final FileSystem fs, final Path hbaseRootDir, TableName tableName) throws IOException, InterruptedException { - return getTableStoreFilePathMap(map, fs, hbaseRootDir, tableName, null, null, null); + return getTableStoreFilePathMap(map, fs, hbaseRootDir, tableName, null, null, + (ProgressReporter)null); } /** @@ -1204,15 +1216,55 @@ public abstract class FSUtils extends CommonFSUtils { * @param tableName name of the table to scan. * @param sfFilter optional path filter to apply to store files * @param executor optional executor service to parallelize this operation - * @param errors ErrorReporter instance or null + * @param progressReporter Instance or null; gets called every time we move to new region of + * family dir and for each store file. + * @return Map keyed by StoreFile name with a value of the full Path. + * @throws IOException When scanning the directory fails. + * @deprecated Since 2.3.0. For removal in hbase4. Use ProgressReporter override instead. + */ + @Deprecated + public static Map getTableStoreFilePathMap(Map resultMap, + final FileSystem fs, final Path hbaseRootDir, TableName tableName, final PathFilter sfFilter, + ExecutorService executor, final HbckErrorReporter progressReporter) + throws IOException, InterruptedException { + return getTableStoreFilePathMap(resultMap, fs, hbaseRootDir, tableName, sfFilter, executor, + new ProgressReporter() { + @Override + public void progress(FileStatus status) { + // status is not used in this implementation. + progressReporter.progress(); + } + }); + } + + /* + * Runs through the HBase rootdir/tablename and creates a reverse lookup map for + * table StoreFile names to the full Path. Note that because this method can be called + * on a 'live' HBase system that we will skip files that no longer exist by the time + * we traverse them and similarly the user of the result needs to consider that some + * entries in this map may not exist by the time this call completes. + *
+ * Example...
+ * Key = 3944417774205889744
+ * Value = hdfs://localhost:51169/user/userid/-ROOT-/70236052/info/3944417774205889744 + * + * @param resultMap map to add values. If null, this method will create and populate one + * to return + * @param fs The file system to use. + * @param hbaseRootDir The root directory to scan. + * @param tableName name of the table to scan. + * @param sfFilter optional path filter to apply to store files + * @param executor optional executor service to parallelize this operation + * @param progressReporter Instance or null; gets called every time we move to new region of + * family dir and for each store file. * @return Map keyed by StoreFile name with a value of the full Path. * @throws IOException When scanning the directory fails. * @throws InterruptedException */ public static Map getTableStoreFilePathMap(Map resultMap, final FileSystem fs, final Path hbaseRootDir, TableName tableName, final PathFilter sfFilter, - ExecutorService executor, final HbckErrorReporter errors) - throws IOException, InterruptedException { + ExecutorService executor, final ProgressReporter progressReporter) + throws IOException, InterruptedException { final Map finalResultMap = resultMap == null ? new ConcurrentHashMap<>(128, 0.75f, 32) : resultMap; @@ -1233,8 +1285,8 @@ public abstract class FSUtils extends CommonFSUtils { final List> futures = new ArrayList<>(regionDirs.size()); for (FileStatus regionDir : regionDirs) { - if (null != errors) { - errors.progress(); + if (null != progressReporter) { + progressReporter.progress(regionDir); } final Path dd = regionDir.getPath(); @@ -1257,8 +1309,8 @@ public abstract class FSUtils extends CommonFSUtils { return; } for (FileStatus familyDir : familyDirs) { - if (null != errors) { - errors.progress(); + if (null != progressReporter) { + progressReporter.progress(familyDir); } Path family = familyDir.getPath(); if (family.getName().equals(HConstants.RECOVERED_EDITS_DIR)) { @@ -1268,8 +1320,8 @@ public abstract class FSUtils extends CommonFSUtils { // put in map FileStatus[] familyStatus = fs.listStatus(family); for (FileStatus sfStatus : familyStatus) { - if (null != errors) { - errors.progress(); + if (null != progressReporter) { + progressReporter.progress(sfStatus); } Path sf = sfStatus.getPath(); if (sfFilter == null || sfFilter.accept(sf)) { @@ -1348,12 +1400,11 @@ public abstract class FSUtils extends CommonFSUtils { * @param hbaseRootDir The root directory to scan. * @return Map keyed by StoreFile name with a value of the full Path. * @throws IOException When scanning the directory fails. - * @throws InterruptedException */ - public static Map getTableStoreFilePathMap( - final FileSystem fs, final Path hbaseRootDir) + public static Map getTableStoreFilePathMap(final FileSystem fs, + final Path hbaseRootDir) throws IOException, InterruptedException { - return getTableStoreFilePathMap(fs, hbaseRootDir, null, null, null); + return getTableStoreFilePathMap(fs, hbaseRootDir, null, null, (ProgressReporter)null); } /** @@ -1368,14 +1419,49 @@ public abstract class FSUtils extends CommonFSUtils { * @param hbaseRootDir The root directory to scan. * @param sfFilter optional path filter to apply to store files * @param executor optional executor service to parallelize this operation - * @param errors ErrorReporter instance or null + * @param progressReporter Instance or null; gets called every time we move to new region of + * family dir and for each store file. + * @return Map keyed by StoreFile name with a value of the full Path. + * @throws IOException When scanning the directory fails. + * @deprecated Since 2.3.0. Will be removed in hbase4. Used {@link + * #getTableStoreFilePathMap(FileSystem, Path, PathFilter, ExecutorService, ProgressReporter)} + */ + @Deprecated + public static Map getTableStoreFilePathMap(final FileSystem fs, + final Path hbaseRootDir, PathFilter sfFilter, ExecutorService executor, + HbckErrorReporter progressReporter) + throws IOException, InterruptedException { + return getTableStoreFilePathMap(fs, hbaseRootDir, sfFilter, executor, + new ProgressReporter() { + @Override + public void progress(FileStatus status) { + // status is not used in this implementation. + progressReporter.progress(); + } + }); + } + + /** + * Runs through the HBase rootdir and creates a reverse lookup map for + * table StoreFile names to the full Path. + *
+ * Example...
+ * Key = 3944417774205889744
+ * Value = hdfs://localhost:51169/user/userid/-ROOT-/70236052/info/3944417774205889744 + * + * @param fs The file system to use. + * @param hbaseRootDir The root directory to scan. + * @param sfFilter optional path filter to apply to store files + * @param executor optional executor service to parallelize this operation + * @param progressReporter Instance or null; gets called every time we move to new region of + * family dir and for each store file. * @return Map keyed by StoreFile name with a value of the full Path. * @throws IOException When scanning the directory fails. * @throws InterruptedException */ public static Map getTableStoreFilePathMap( final FileSystem fs, final Path hbaseRootDir, PathFilter sfFilter, - ExecutorService executor, HbckErrorReporter errors) + ExecutorService executor, ProgressReporter progressReporter) throws IOException, InterruptedException { ConcurrentHashMap map = new ConcurrentHashMap<>(1024, 0.75f, 32); @@ -1385,7 +1471,7 @@ public abstract class FSUtils extends CommonFSUtils { // only include the directory paths to tables for (Path tableDir : FSUtils.getTableDirs(fs, hbaseRootDir)) { getTableStoreFilePathMap(map, fs, hbaseRootDir, - FSUtils.getTableName(tableDir), sfFilter, executor, errors); + FSUtils.getTableName(tableDir), sfFilter, executor, progressReporter); } return map; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java index 266b7dc823f..8a29a5a9d42 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java @@ -153,7 +153,9 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminServic * HBaseFsck (hbck) is a tool for checking and repairing region consistency and * table integrity problems in a corrupted HBase. This tool was written for hbase-1.x. It does not * work with hbase-2.x; it can read state but is not allowed to change state; i.e. effect 'repair'. - * See hbck2 (HBASE-19121) for a hbck tool for hbase2. + * Even though it can 'read' state, given how so much has changed in how hbase1 and hbase2 operate, + * it will often misread. See hbck2 (HBASE-19121) for a hbck tool for hbase2. This class is + * deprecated. * *

* Region consistency checks verify that hbase:meta, region deployment on region @@ -196,7 +198,9 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminServic * If hbck is run from the command line, there are a handful of arguments that * can be used to limit the kinds of repairs hbck will do. See the code in * {@link #printUsageAndExit()} for more details. + * @deprecated For removal in hbase-4.0.0. Use HBCK2 instead. */ +@Deprecated @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS) @InterfaceStability.Evolving public class HBaseFsck extends Configured implements Closeable { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HbckErrorReporter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HbckErrorReporter.java index 500b690051b..52012dfa235 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HbckErrorReporter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HbckErrorReporter.java @@ -1,4 +1,4 @@ -/** +/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information @@ -20,12 +20,15 @@ package org.apache.hadoop.hbase.util; import java.util.ArrayList; import org.apache.yetus.audience.InterfaceAudience; -import org.apache.yetus.audience.InterfaceStability; +/** + * Used by {@link HBaseFsck} reporting system. + * @deprecated Since 2.3.0. To be removed in hbase4. Use HBCK2 instead. Remove when + * {@link HBaseFsck} is removed. + */ +@Deprecated @InterfaceAudience.Private -@InterfaceStability.Evolving public interface HbckErrorReporter { - enum ERROR_CODE { UNKNOWN, NO_META_REGION, NULL_META_REGION, NO_VERSION_FILE, NOT_IN_META_HDFS, NOT_IN_META, NOT_IN_META_OR_DEPLOYED, NOT_IN_HDFS_OR_DEPLOYED, NOT_IN_HDFS, SERVER_DOES_NOT_MATCH_META,