diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
index 2098fff8008..8bbb00ebbb9 100644
--- a/dev-support/Jenkinsfile
+++ b/dev-support/Jenkinsfile
@@ -202,7 +202,9 @@ pipeline {
environment {
BASEDIR = "${env.WORKSPACE}/component"
TESTS = "${env.SHALLOW_CHECKS}"
- SET_JAVA_HOME = '/usr/lib/jvm/java-8'
+ SET_JAVA_HOME = "/usr/lib/jvm/java-11"
+ // Activates hadoop 3.0 profile in maven runs.
+ HADOOP_PROFILE = '3.0'
OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_GENERAL}"
OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_GENERAL}"
ASF_NIGHTLIES_GENERAL_CHECK_BASE="${ASF_NIGHTLIES_BASE}/${OUTPUT_DIR_RELATIVE}"
diff --git a/dev-support/Jenkinsfile_GitHub b/dev-support/Jenkinsfile_GitHub
index c230f7a8e00..51300a965fc 100644
--- a/dev-support/Jenkinsfile_GitHub
+++ b/dev-support/Jenkinsfile_GitHub
@@ -78,7 +78,7 @@ pipeline {
environment {
// customized per parallel stage
PLUGINS = "${GENERAL_CHECK_PLUGINS}"
- SET_JAVA_HOME = '/usr/lib/jvm/java-8'
+ SET_JAVA_HOME = "/usr/lib/jvm/java-11"
WORKDIR_REL = "${WORKDIR_REL_GENERAL_CHECK}"
// identical for all parallel stages
WORKDIR = "${WORKSPACE}/${WORKDIR_REL}"
diff --git a/hbase-build-configuration/pom.xml b/hbase-build-configuration/pom.xml
index af39aa301eb..6edff1e3dfb 100644
--- a/hbase-build-configuration/pom.xml
+++ b/hbase-build-configuration/pom.xml
@@ -68,11 +68,6 @@
false
-
-
-
- 9+181-r4173-1
-
com.google.errorprone
@@ -86,12 +81,6 @@
-
- com.google.errorprone
- javac
- ${javac.version}
- provided
-
@@ -100,17 +89,12 @@
org.apache.maven.plugins
maven-compiler-plugin
-
- ${compileSource}
-
- true
+ ${releaseTarget}
true
-XDcompilePolicy=simple
- -Xplugin:ErrorProne -XepDisableWarningsInGeneratedCode -Xep:FallThrough:OFF -Xep:MutablePublicArray:OFF -Xep:ClassNewInstance:ERROR -Xep:MissingDefault:ERROR
-
- -J-Xbootclasspath/p:${settings.localRepository}/com/google/errorprone/javac/${javac.version}/javac-${javac.version}.jar
+ -Xplugin:ErrorProne -XepDisableWarningsInGeneratedCode -Xep:FallThrough:OFF -Xep:MutablePublicArray:OFF -Xep:ClassNewInstance:ERROR -Xep:MissingDefault:ERROR -Xep:BanJNDI:WARN
@@ -121,6 +105,25 @@
+
+ org.apache.maven.plugins
+ maven-enforcer-plugin
+
+
+ jdk11-required
+
+ enforce
+
+
+
+
+ [11,)
+
+
+
+
+
+
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerCommandLine.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerCommandLine.java
index b6badc79097..b257a938337 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerCommandLine.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerCommandLine.java
@@ -36,21 +36,17 @@ import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableSet;
+
/**
* Base class for command lines that start up various HBase daemons.
*/
@InterfaceAudience.Private
public abstract class ServerCommandLine extends Configured implements Tool {
private static final Logger LOG = LoggerFactory.getLogger(ServerCommandLine.class);
- @SuppressWarnings("serial")
- private static final Set DEFAULT_SKIP_WORDS = new HashSet() {
- {
- add("secret");
- add("passwd");
- add("password");
- add("credential");
- }
- };
+
+ private static final Set DEFAULT_SKIP_WORDS =
+ ImmutableSet.of("secret", "passwd", "password", "credential");
/**
* Implementing subclasses should return a usage string to print out.
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java
index 779fb9a4c90..97ae1662422 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java
@@ -542,14 +542,15 @@ public class TestScanner {
}
}
- /*
- * @param hri Region
+ /**
+ * Count table.
+ * @param hri Region
* @param flushIndex At what row we start the flush.
* @param concurrent if the flush should be concurrent or sync.
* @return Count of rows found.
*/
private int count(final Table countTable, final int flushIndex, boolean concurrent)
- throws IOException {
+ throws Exception {
LOG.info("Taking out counting scan");
Scan scan = new Scan();
for (byte[] qualifier : EXPLICIT_COLS) {
@@ -577,10 +578,10 @@ public class TestScanner {
}
}
};
- if (concurrent) {
- t.start(); // concurrently flush.
- } else {
- t.run(); // sync flush
+ t.start();
+ if (!concurrent) {
+ // sync flush
+ t.join();
}
LOG.info("Continuing on after kicking off background flush");
justFlushed = true;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestNonHBaseReplicationEndpoint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestNonHBaseReplicationEndpoint.java
index 78ee5322fdd..70cae18b456 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestNonHBaseReplicationEndpoint.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestNonHBaseReplicationEndpoint.java
@@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.replication;
import java.io.IOException;
import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
@@ -45,6 +43,8 @@ import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableMap;
+
@Category({ MediumTests.class, ReplicationTests.class })
public class TestNonHBaseReplicationEndpoint {
@@ -86,11 +86,8 @@ public class TestNonHBaseReplicationEndpoint {
ReplicationPeerConfig peerConfig = ReplicationPeerConfig.newBuilder()
.setReplicationEndpointImpl(NonHBaseReplicationEndpoint.class.getName())
- .setReplicateAllUserTables(false).setTableCFsMap(new HashMap>() {
- {
- put(tableName, new ArrayList<>());
- }
- }).build();
+ .setReplicateAllUserTables(false)
+ .setTableCFsMap(ImmutableMap.of(tableName, new ArrayList<>())).build();
ADMIN.addReplicationPeer("1", peerConfig);
loadData(table);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestBasicWALEntryStream.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestBasicWALEntryStream.java
index 8601bff885c..eda89b232c3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestBasicWALEntryStream.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestBasicWALEntryStream.java
@@ -611,7 +611,8 @@ public abstract class TestBasicWALEntryStream extends WALEntryStreamTestBase {
localLogQueue.enqueueLog(emptyLog, fakeWalGroupId);
ReplicationSourceWALReader reader = new ReplicationSourceWALReader(fs, conf, localLogQueue, 0,
getDummyFilter(), source, fakeWalGroupId);
- reader.run();
+ reader.start();
+ reader.join();
// ReplicationSourceWALReaderThread#handleEofException method will
// remove empty log from logQueue.
assertEquals(0, localLogQueue.getQueueSize(fakeWalGroupId));
@@ -650,7 +651,8 @@ public abstract class TestBasicWALEntryStream extends WALEntryStreamTestBase {
getDummyFilter(), source, fakeWalGroupId);
assertEquals("Initial log queue size is not correct", 2,
localLogQueue.getQueueSize(fakeWalGroupId));
- reader.run();
+ reader.start();
+ reader.join();
// remove empty log from logQueue.
assertEquals(0, localLogQueue.getQueueSize(fakeWalGroupId));
diff --git a/pom.xml b/pom.xml
index f8805524c0b..6a70a35a5c2 100644
--- a/pom.xml
+++ b/pom.xml
@@ -840,7 +840,7 @@
-->
8.29
1.6.0
- 2.10.0
+ 2.16
2.4.2
1.0.0
1.8