HBASE-13990 make maven site generation work with jdk8
* includes additiona branch-1 specific fixes to javadocs * includes rollback of 2 changes from HBASE-13898 that didn't apply to branch-1 Conflicts: hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java
This commit is contained in:
parent
1db4e8a24b
commit
a58848507a
|
@ -303,7 +303,7 @@ public class HColumnDescriptor implements WritableComparable<HColumnDescriptor>
|
|||
* The other attributes are defaulted.
|
||||
*
|
||||
* @param familyName Column family name. Must be 'printable' -- digit or
|
||||
* letter -- and may not contain a <code>:<code>
|
||||
* letter -- and may not contain a <code>:</code>
|
||||
*/
|
||||
public HColumnDescriptor(final String familyName) {
|
||||
this(Bytes.toBytes(familyName));
|
||||
|
@ -314,7 +314,7 @@ public class HColumnDescriptor implements WritableComparable<HColumnDescriptor>
|
|||
* The other attributes are defaulted.
|
||||
*
|
||||
* @param familyName Column family name. Must be 'printable' -- digit or
|
||||
* letter -- and may not contain a <code>:<code>
|
||||
* letter -- and may not contain a <code>:</code>
|
||||
*/
|
||||
public HColumnDescriptor(final byte [] familyName) {
|
||||
this (familyName == null || familyName.length <= 0?
|
||||
|
@ -345,7 +345,7 @@ public class HColumnDescriptor implements WritableComparable<HColumnDescriptor>
|
|||
/**
|
||||
* Constructor
|
||||
* @param familyName Column family name. Must be 'printable' -- digit or
|
||||
* letter -- and may not contain a <code>:<code>
|
||||
* letter -- and may not contain a <code>:</code>
|
||||
* @param maxVersions Maximum number of versions to keep
|
||||
* @param compression Compression type
|
||||
* @param inMemory If true, column data should be kept in an HRegionServer's
|
||||
|
@ -376,7 +376,7 @@ public class HColumnDescriptor implements WritableComparable<HColumnDescriptor>
|
|||
/**
|
||||
* Constructor
|
||||
* @param familyName Column family name. Must be 'printable' -- digit or
|
||||
* letter -- and may not contain a <code>:<code>
|
||||
* letter -- and may not contain a <code>:</code>
|
||||
* @param maxVersions Maximum number of versions to keep
|
||||
* @param compression Compression type
|
||||
* @param inMemory If true, column data should be kept in an HRegionServer's
|
||||
|
@ -413,7 +413,7 @@ public class HColumnDescriptor implements WritableComparable<HColumnDescriptor>
|
|||
/**
|
||||
* Constructor
|
||||
* @param familyName Column family name. Must be 'printable' -- digit or
|
||||
* letter -- and may not contain a <code>:<code>
|
||||
* letter -- and may not contain a <code>:</code>
|
||||
* @param minVersions Minimum number of versions to keep
|
||||
* @param maxVersions Maximum number of versions to keep
|
||||
* @param keepDeletedCells Whether to retain deleted cells until they expire
|
||||
|
|
|
@ -249,7 +249,7 @@ public interface Admin extends Abortable, Closeable {
|
|||
* are repeated and if the split key has empty byte array.
|
||||
*
|
||||
* @param desc table descriptor for table
|
||||
* @throws MasterNotRunningException if master is not running
|
||||
* @throws org.apache.hadoop.hbase.MasterNotRunningException if master is not running
|
||||
* @throws org.apache.hadoop.hbase.TableExistsException if table already exists (If concurrent
|
||||
* threads, the table may have been created between test-for-existence and attempt-at-creation).
|
||||
* @throws IOException if a remote or network exception occurs
|
||||
|
|
|
@ -109,7 +109,7 @@ public class HConnectionManager extends ConnectionFactory {
|
|||
* {@link HConnectionKey}.
|
||||
* @param conf configuration
|
||||
* @return HConnection object for <code>conf</code>
|
||||
* @throws ZooKeeperConnectionException
|
||||
* @throws org.apache.hadoop.hbase.ZooKeeperConnectionException
|
||||
*/
|
||||
@Deprecated
|
||||
public static HConnection getConnection(final Configuration conf) throws IOException {
|
||||
|
@ -137,7 +137,7 @@ public class HConnectionManager extends ConnectionFactory {
|
|||
*
|
||||
* @param conf configuration
|
||||
* @return HConnection object for <code>conf</code>
|
||||
* @throws ZooKeeperConnectionException
|
||||
* @throws org.apache.hadoop.hbase.ZooKeeperConnectionException
|
||||
*/
|
||||
@Deprecated
|
||||
public static HConnection createConnection(Configuration conf) throws IOException {
|
||||
|
@ -163,7 +163,7 @@ public class HConnectionManager extends ConnectionFactory {
|
|||
* @param conf configuration
|
||||
* @param pool the thread pool to use for batch operation in HTables used via this HConnection
|
||||
* @return HConnection object for <code>conf</code>
|
||||
* @throws ZooKeeperConnectionException
|
||||
* @throws org.apache.hadoop.hbase.ZooKeeperConnectionException
|
||||
*/
|
||||
@Deprecated
|
||||
public static HConnection createConnection(Configuration conf, ExecutorService pool)
|
||||
|
@ -189,7 +189,7 @@ public class HConnectionManager extends ConnectionFactory {
|
|||
* @param conf configuration
|
||||
* @param user the user the connection is for
|
||||
* @return HConnection object for <code>conf</code>
|
||||
* @throws ZooKeeperConnectionException
|
||||
* @throws org.apache.hadoop.hbase.ZooKeeperConnectionException
|
||||
*/
|
||||
@Deprecated
|
||||
public static HConnection createConnection(Configuration conf, User user)
|
||||
|
@ -216,7 +216,7 @@ public class HConnectionManager extends ConnectionFactory {
|
|||
* @param pool the thread pool to use for batch operation in HTables used via this HConnection
|
||||
* @param user the user the connection is for
|
||||
* @return HConnection object for <code>conf</code>
|
||||
* @throws ZooKeeperConnectionException
|
||||
* @throws org.apache.hadoop.hbase.ZooKeeperConnectionException
|
||||
*/
|
||||
@Deprecated
|
||||
public static HConnection createConnection(Configuration conf, ExecutorService pool, User user)
|
||||
|
|
|
@ -170,8 +170,6 @@ public class HTablePool implements Closeable {
|
|||
|
||||
/**
|
||||
* Get a reference to the specified table from the pool.
|
||||
* <p>
|
||||
* <p/>
|
||||
*
|
||||
* @param tableName
|
||||
* table name
|
||||
|
@ -189,7 +187,6 @@ public class HTablePool implements Closeable {
|
|||
|
||||
/**
|
||||
* Get a reference to the specified table from the pool.
|
||||
* <p>
|
||||
*
|
||||
* Create a new one if one is not available.
|
||||
*
|
||||
|
|
|
@ -242,7 +242,7 @@ public class Result implements CellScannable, CellScanner {
|
|||
*
|
||||
* WARNING do not use, expensive. This does an arraycopy of the cell[]'s value.
|
||||
*
|
||||
* Added to ease transition from 0.94 -> 0.96.
|
||||
* Added to ease transition from 0.94 -> 0.96.
|
||||
*
|
||||
* @deprecated as of 0.96, use {@link #rawCells()}
|
||||
* @return array of KeyValues, empty array if nothing in result.
|
||||
|
@ -272,7 +272,7 @@ public class Result implements CellScannable, CellScanner {
|
|||
*
|
||||
* WARNING do not use, expensive. This does an arraycopy of the cell[]'s value.
|
||||
*
|
||||
* Added to ease transition from 0.94 -> 0.96.
|
||||
* Added to ease transition from 0.94 -> 0.96.
|
||||
*
|
||||
* @deprecated as of 0.96, use {@link #listCells()}
|
||||
* @return all sorted List of KeyValues; can be null if no cells in the result
|
||||
|
|
|
@ -353,7 +353,6 @@ public class ReplicationAdmin implements Closeable {
|
|||
* Append the replicable table-cf config of the specified peer
|
||||
* @param id a short that identifies the cluster
|
||||
* @param tableCfs table-cfs config str
|
||||
* @throws KeeperException
|
||||
*/
|
||||
public void appendPeerTableCFs(String id, String tableCfs) throws ReplicationException {
|
||||
appendPeerTableCFs(id, parseTableCFsFromConfig(tableCfs));
|
||||
|
@ -363,7 +362,6 @@ public class ReplicationAdmin implements Closeable {
|
|||
* Append the replicable table-cf config of the specified peer
|
||||
* @param id a short that identifies the cluster
|
||||
* @param tableCfs A map from tableName to column family names
|
||||
* @throws KeeperException
|
||||
*/
|
||||
public void appendPeerTableCFs(String id, Map<TableName, ? extends Collection<String>> tableCfs)
|
||||
throws ReplicationException {
|
||||
|
|
|
@ -137,7 +137,7 @@ public abstract class Filter {
|
|||
|
||||
/**
|
||||
* WARNING: please to not override this method. Instead override {@link #transformCell(Cell)}.
|
||||
* This is for transition from 0.94 -> 0.96
|
||||
* This is for transition from 0.94 -> 0.96
|
||||
**/
|
||||
@Deprecated // use Cell transformCell(final Cell)
|
||||
abstract public KeyValue transform(final KeyValue currentKV) throws IOException;
|
||||
|
|
|
@ -86,7 +86,7 @@ public abstract class FilterBase extends Filter {
|
|||
/**
|
||||
* WARNING: please to not override this method. Instead override {@link #transformCell(Cell)}.
|
||||
*
|
||||
* This is for transition from 0.94 -> 0.96
|
||||
* This is for transition from 0.94 -> 0.96
|
||||
*/
|
||||
@Override
|
||||
@Deprecated
|
||||
|
|
|
@ -229,7 +229,7 @@ final public class FilterList extends Filter {
|
|||
*
|
||||
* When removing this, its body should be placed in transformCell.
|
||||
*
|
||||
* This is for transition from 0.94 -> 0.96
|
||||
* This is for transition from 0.94 -> 0.96
|
||||
*/
|
||||
@Deprecated
|
||||
@Override
|
||||
|
|
|
@ -130,7 +130,7 @@ final public class FilterWrapper extends Filter {
|
|||
/**
|
||||
* WARNING: please to not override this method. Instead override {@link #transformCell(Cell)}.
|
||||
*
|
||||
* This is for transition from 0.94 -> 0.96
|
||||
* This is for transition from 0.94 -> 0.96
|
||||
*/
|
||||
@Override
|
||||
@Deprecated
|
||||
|
|
|
@ -280,7 +280,7 @@ public final class ProtobufUtil {
|
|||
* @param bytes Bytes to check.
|
||||
* @param offset offset to start at
|
||||
* @param len length to use
|
||||
* @return True if passed <code>bytes</code> has {@link ProtobufMagic#PB_MAGIC} for a prefix.
|
||||
* @return True if passed <code>bytes</code> has {@link #PB_MAGIC} for a prefix.
|
||||
*/
|
||||
public static boolean isPBMagicPrefix(final byte [] bytes, int offset, int len) {
|
||||
if (bytes == null || len < PB_MAGIC.length) return false;
|
||||
|
|
|
@ -2500,7 +2500,7 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId,
|
|||
* @return Created KeyValue OR if we find a length of zero, we will return null which
|
||||
* can be useful marking a stream as done.
|
||||
* @throws IOException
|
||||
* @{@link Deprecated} Use {@link KeyValueUtil#iscreate(InputStream, boolean)}
|
||||
* @deprecated Use {@link KeyValueUtil#iscreate(InputStream, boolean)}
|
||||
*/
|
||||
@Deprecated
|
||||
public static KeyValue iscreate(final InputStream in) throws IOException {
|
||||
|
|
|
@ -555,7 +555,8 @@ public class KeyValueUtil {
|
|||
|
||||
/**
|
||||
* Create a KeyValue reading from the raw InputStream. Named
|
||||
* <code>iscreate</code> so doesn't clash with {@link #create(DataInput)}
|
||||
* <code>iscreate</code> so doesn't clash with the <code>create(DataInput)</code> method
|
||||
* added in 2.0
|
||||
*
|
||||
* @param in
|
||||
* @param withTags
|
||||
|
|
|
@ -144,19 +144,19 @@ public class PrefixTreeSeeker implements EncodedSeeker {
|
|||
|
||||
/**
|
||||
* Seek forward only (should be called reseekToKeyInBlock?).
|
||||
* <p/>
|
||||
* If the exact key is found look at the seekBefore variable and:<br/>
|
||||
* - if true: go to the previous key if it's true<br/>
|
||||
* <p>
|
||||
* If the exact key is found look at the seekBefore variable and:<br>
|
||||
* - if true: go to the previous key if it's true<br>
|
||||
* - if false: stay on the exact key
|
||||
* <p/>
|
||||
* </p><p>
|
||||
* If the exact key is not found, then go to the previous key *if possible*, but remember to
|
||||
* leave the scanner in a valid state if possible.
|
||||
* <p/>
|
||||
* </p>
|
||||
* @param keyOnlyBytes KeyValue format of a Cell's key at which to position the seeker
|
||||
* @param offset offset into the keyOnlyBytes array
|
||||
* @param length number of bytes of the keyOnlyBytes array to use
|
||||
* @param forceBeforeOnExactMatch if an exact match is found and seekBefore=true, back up 1 Cell
|
||||
* @return 0 if the seeker is on the exact key<br/>
|
||||
* @return 0 if the seeker is on the exact key<br>
|
||||
* 1 if the seeker is not on the key for any reason, including seekBefore being true
|
||||
*/
|
||||
@Override
|
||||
|
|
|
@ -34,7 +34,7 @@ import com.google.common.annotations.VisibleForTesting;
|
|||
|
||||
/**
|
||||
* Coordinated operations for {@link SplitLogWorker} and
|
||||
* {@link org.apache.hadoop.hbase.regionserver.handler.HLogSplitterHandler} Important
|
||||
* {@link org.apache.hadoop.hbase.regionserver.handler.WALSplitterHandler} Important
|
||||
* methods for SplitLogWorker: <BR>
|
||||
* {@link #isReady()} called from {@link SplitLogWorker#run()} to check whether the coordination is
|
||||
* ready to supply the tasks <BR>
|
||||
|
|
|
@ -81,8 +81,8 @@ public interface SplitTransactionCoordination {
|
|||
* @param std split transaction details
|
||||
* @param parent
|
||||
* @throws IOException If thrown, transaction failed. Call
|
||||
* {@link org.apache.hadoop.hbase.regionserver.
|
||||
* SplitTransaction#rollback(Server, RegionServerServices)}
|
||||
* {@link org.apache.hadoop.hbase.regionserver.SplitTransaction#rollback(
|
||||
* Server, RegionServerServices)}
|
||||
*/
|
||||
void completeSplitTransaction(RegionServerServices services, Region first,
|
||||
Region second, SplitTransactionDetails std, Region parent) throws IOException;
|
||||
|
|
|
@ -55,8 +55,8 @@ import org.apache.hadoop.util.StringUtils;
|
|||
* single-level and multi-level block indexes.
|
||||
*
|
||||
* Examples of how to use the block index writer can be found in
|
||||
* {@link org.apache.hadoop.hbase.io.hfile.CompoundBloomFilterWriter} and
|
||||
* {@link HFileWriterWriterV2}. Examples of how to use the reader can be
|
||||
* {@link org.apache.hadoop.hbase.util.CompoundBloomFilterWriter} and
|
||||
* {@link HFileWriterV2}. Examples of how to use the reader can be
|
||||
* found in {@link HFileReaderV2} and
|
||||
* {@link org.apache.hadoop.hbase.io.hfile.TestHFileBlockIndex}.
|
||||
*/
|
||||
|
|
|
@ -1300,7 +1300,8 @@ public class HMaster extends HRegionServer implements MasterServices, Server {
|
|||
* @return true if normalization step was performed successfully, false otherwise
|
||||
* (specifically, if HMaster hasn't been initialized properly or normalization
|
||||
* is globally disabled)
|
||||
* @throws IOException, CoordinatedStateException
|
||||
* @throws IOException
|
||||
* @throws CoordinatedStateException
|
||||
*/
|
||||
public boolean normalizeRegions() throws IOException, CoordinatedStateException {
|
||||
if (!this.initialized) {
|
||||
|
|
|
@ -36,13 +36,15 @@ import java.util.List;
|
|||
*
|
||||
* Logic in use:
|
||||
*
|
||||
* - get all regions of a given table
|
||||
* - get avg size S of each region (by total size of store files reported in RegionLoad)
|
||||
* - If biggest region is bigger than S * 2, it is kindly requested to split,
|
||||
* <ol>
|
||||
* <li> get all regions of a given table
|
||||
* <li> get avg size S of each region (by total size of store files reported in RegionLoad)
|
||||
* <li> If biggest region is bigger than S * 2, it is kindly requested to split,
|
||||
* and normalization stops
|
||||
* - Otherwise, two smallest region R1 and its smallest neighbor R2 are kindly requested
|
||||
* to merge, if R1 + R1 < S, and normalization stops
|
||||
* - Otherwise, no action is performed
|
||||
* <li> Otherwise, two smallest region R1 and its smallest neighbor R2 are kindly requested
|
||||
* to merge, if R1 + R1 < S, and normalization stops
|
||||
* <li> Otherwise, no action is performed
|
||||
* </ol>
|
||||
*/
|
||||
@InterfaceAudience.Private
|
||||
public class SimpleRegionNormalizer implements RegionNormalizer {
|
||||
|
|
|
@ -261,7 +261,7 @@ public class ByteBloomFilter implements BloomFilter, BloomFilterWriter {
|
|||
}
|
||||
|
||||
/**
|
||||
* Determines & initializes bloom filter meta data from user config. Call
|
||||
* Determines & initializes bloom filter meta data from user config. Call
|
||||
* {@link #allocBloom()} to allocate bloom filter data.
|
||||
*
|
||||
* @param maxKeys Maximum expected number of keys that will be stored in this
|
||||
|
|
|
@ -40,7 +40,7 @@ import org.apache.zookeeper.KeeperException;
|
|||
import org.apache.zookeeper.KeeperException.NoNodeException;
|
||||
|
||||
/**
|
||||
* Tool to migrate zookeeper data of older hbase versions(<0.95.0) to PB.
|
||||
* Tool to migrate zookeeper data of older hbase versions(<0.95.0) to PB.
|
||||
*/
|
||||
public class ZKDataMigrator extends Configured implements Tool {
|
||||
|
||||
|
|
15
pom.xml
15
pom.xml
|
@ -1158,7 +1158,7 @@
|
|||
<maven.antrun.version>1.6</maven.antrun.version>
|
||||
<jamon.plugin.version>2.3.4</jamon.plugin.version>
|
||||
<findbugs-annotations>1.3.9-1</findbugs-annotations>
|
||||
<javadoc.version>2.9</javadoc.version>
|
||||
<javadoc.version>2.10.3</javadoc.version>
|
||||
<!-- General Packaging -->
|
||||
<package.prefix>/usr</package.prefix>
|
||||
<package.conf.dir>/etc/hbase</package.conf.dir>
|
||||
|
@ -2413,6 +2413,19 @@
|
|||
<excludePackageNames>org.apache.hadoop.hbase.generated.master:org.apache.hadoop.hbase.protobuf.generated</excludePackageNames>
|
||||
<maxmemory>2048m</maxmemory>
|
||||
<notimestamp>true</notimestamp>
|
||||
<!-- JDK8 javadoc requires test scope transitive dependencies due to our custom doclet -->
|
||||
<additionalDependencies>
|
||||
<additionalDependency>
|
||||
<groupId>org.mockito</groupId>
|
||||
<artifactId>mockito-all</artifactId>
|
||||
<version>${mockito-all.version}</version>
|
||||
</additionalDependency>
|
||||
<additionalDependency>
|
||||
<groupId>org.hamcrest</groupId>
|
||||
<artifactId>hamcrest-core</artifactId>
|
||||
<version>${hamcrest.version}</version>
|
||||
</additionalDependency>
|
||||
</additionalDependencies>
|
||||
</configuration>
|
||||
<reportSets>
|
||||
<reportSet>
|
||||
|
|
Loading…
Reference in New Issue