diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/TableInfoMissingException.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/TableInfoMissingException.java new file mode 100644 index 00000000000..bc8bc7fc194 --- /dev/null +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/TableInfoMissingException.java @@ -0,0 +1,47 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase; + +import org.apache.hadoop.classification.InterfaceAudience; + +/** + * + * Failed to find .tableinfo file under table dir + * + */ +@InterfaceAudience.Private +@SuppressWarnings("serial") +public class TableInfoMissingException extends HBaseIOException { + + public TableInfoMissingException() { + super(); + } + + public TableInfoMissingException( String message ) { + super(message); + } + + public TableInfoMissingException( String message, Throwable t ) { + super(message, t); + } + + public TableInfoMissingException( Throwable t ) { + super(t); + } + +} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java index 4a1dac50d88..3cfccb91edb 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java @@ -43,6 +43,7 @@ import org.apache.hadoop.hbase.DeserializationException; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.TableDescriptors; +import org.apache.hadoop.hbase.TableInfoMissingException; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import com.google.common.primitives.Ints; @@ -163,7 +164,18 @@ public class FSTableDescriptors implements TableDescriptors { return tdm.getTableDescriptor(); } } - HTableDescriptor htd = getTableDescriptor(this.fs, this.rootdir, tablename); + + HTableDescriptor htd = null; + try { + htd = getTableDescriptor(this.fs, this.rootdir, tablename); + } catch (NullPointerException e) { + LOG.debug("Exception during readTableDecriptor. Current table name = " + + tablename, e); + } catch (IOException ioe) { + LOG.debug("Exception during readTableDecriptor. Current table name = " + + tablename, ioe); + } + if (htd == null) { LOG.warn("The following folder is in HBase's root directory and " + "doesn't contain a table descriptor, " + @@ -258,7 +270,7 @@ public class FSTableDescriptors implements TableDescriptors { * @return The 'current' tableinfo file. * @throws IOException */ - private static FileStatus getTableInfoPath(final FileSystem fs, + public static FileStatus getTableInfoPath(final FileSystem fs, final Path tabledir) throws IOException { FileStatus [] status = FSUtils.listStatus(fs, tabledir, new PathFilter() { @@ -375,21 +387,25 @@ public class FSTableDescriptors implements TableDescriptors { public static HTableDescriptor getTableDescriptor(FileSystem fs, Path hbaseRootDir, byte[] tableName) throws IOException { - return getTableDescriptor(fs, hbaseRootDir, Bytes.toString(tableName)); + HTableDescriptor htd = null; + try { + htd = getTableDescriptor(fs, hbaseRootDir, Bytes.toString(tableName)); + } catch (NullPointerException e) { + LOG.debug("Exception during readTableDecriptor. Current table name = " + + Bytes.toString(tableName), e); + } + return htd; } static HTableDescriptor getTableDescriptor(FileSystem fs, - Path hbaseRootDir, String tableName) { + Path hbaseRootDir, String tableName) throws NullPointerException, IOException{ HTableDescriptor htd = null; - try { - htd = getTableDescriptor(fs, FSUtils.getTablePath(hbaseRootDir, tableName)); - } catch (NullPointerException e) { - LOG.debug("Exception during readTableDecriptor. Current table name = " + - tableName , e); - } catch (IOException ioe) { - LOG.debug("Exception during readTableDecriptor. Current table name = " + - tableName , ioe); + // ignore both -ROOT- and .META. tables + if (Bytes.compareTo(Bytes.toBytes(tableName), HConstants.ROOT_TABLE_NAME) == 0 + || Bytes.compareTo(Bytes.toBytes(tableName), HConstants.META_TABLE_NAME) == 0) { + return null; } + htd = getTableDescriptor(fs, FSUtils.getTablePath(hbaseRootDir, tableName)); return htd; } @@ -397,7 +413,10 @@ public class FSTableDescriptors implements TableDescriptors { throws IOException, NullPointerException { if (tableDir == null) throw new NullPointerException(); FileStatus status = getTableInfoPath(fs, tableDir); - if (status == null) return null; + if (status == null) { + throw new TableInfoMissingException("No .tableinfo file under " + + tableDir.toUri()); + } int len = Ints.checkedCast(status.getLen()); byte [] content = new byte[len]; FSDataInputStream fsDataInputStream = fs.open(status.getPath()); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java index 71d5648da81..e3c51465342 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java @@ -712,8 +712,9 @@ public class HBaseFsck { hbaseRoot, tableName); modTInfo.htds.add(htd); } catch (IOException ioe) { - LOG.error("Unable to read .tableinfo from " + hbaseRoot, ioe); - throw ioe; + LOG.warn("Unable to read .tableinfo from " + hbaseRoot, ioe); + errors.reportError(ERROR_CODE.NO_TABLEINFO_FILE, + "Unable to read .tableinfo from " + hbaseRoot); } } modTInfo.addRegionInfo(hbi); @@ -2640,7 +2641,7 @@ public class HBaseFsck { MULTI_DEPLOYED, SHOULD_NOT_BE_DEPLOYED, MULTI_META_REGION, RS_CONNECT_FAILURE, FIRST_REGION_STARTKEY_NOT_EMPTY, LAST_REGION_ENDKEY_NOT_EMPTY, DUPE_STARTKEYS, HOLE_IN_REGION_CHAIN, OVERLAP_IN_REGION_CHAIN, REGION_CYCLE, DEGENERATE_REGION, - ORPHAN_HDFS_REGION, LINGERING_SPLIT_PARENT + ORPHAN_HDFS_REGION, LINGERING_SPLIT_PARENT, NO_TABLEINFO_FILE } public void clear(); public void report(String message); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsck.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsck.java index 4a4e5ab56b8..10fa5bbc2a6 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsck.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsck.java @@ -414,6 +414,27 @@ public class TestHBaseFsck { } } + @Test + public void testHbckMissingTableinfo() throws Exception { + String table = "tableInfo"; + FileSystem fs = null; + Path tableinfo = null; + try { + setupTable(table); + Path hbaseTableDir = new Path(conf.get(HConstants.HBASE_DIR) + "/" + table ); + fs = hbaseTableDir.getFileSystem(conf); + FileStatus status = FSTableDescriptors.getTableInfoPath(fs, hbaseTableDir); + tableinfo = status.getPath(); + fs.rename(tableinfo, new Path("/.tableinfo")); + + HBaseFsck hbck = doFsck(conf, false); + assertErrors(hbck, new ERROR_CODE[] { ERROR_CODE.NO_TABLEINFO_FILE }); + } finally { + fs.rename(new Path("/.tableinfo"), tableinfo); + deleteTable(table); + } + } + /** * This create and fixes a bad table with regions that have a duplicate * start key