HBASE-22413 Backport 'HBASE-22399 Change default hadoop-two.version to 2.8.x and remove the 2.7.x hadoop checks' to branch-1

Signed-off-by: Andrew Purtell <apurtell@apache.org>
This commit is contained in:
Duo Zhang 2019-05-17 14:34:11 +08:00 committed by zhangduo
parent e2d48f41c5
commit 4ab2e1c094
4 changed files with 25 additions and 20 deletions

View File

@ -488,27 +488,33 @@ function hadoopcheck_rebuild
# All supported Hadoop versions that we want to test the compilation with
# See the Hadoop section on prereqs in the HBase Reference Guide
hbase_common_hadoop2_versions="2.7.1 2.7.2 2.7.3 2.7.4"
if [[ "${PATCH_BRANCH}" = branch-1.* ]] && [[ "${PATCH_BRANCH#branch-1.}" -lt "5" ]]; then
yetus_info "Setting Hadoop 2 versions to test based on before-branch-1.5 rules."
if [[ "${QUICK_HADOOPCHECK}" == "true" ]]; then
hbase_hadoop2_versions="2.4.1 2.5.2 2.6.5 2.7.4"
hbase_hadoop2_versions="2.4.1 2.5.2 2.6.5 2.7.7"
else
hbase_hadoop2_versions="2.4.0 2.4.1 2.5.0 2.5.1 2.5.2 2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 ${hbase_common_hadoop2_versions}"
hbase_hadoop2_versions="2.4.0 2.4.1 2.5.0 2.5.1 2.5.2 2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 2.7.1 2.7.2 2.7.3 2.7.4 2.7.5 2.7.6 2.7.7"
fi
elif [[ "${PATCH_BRANCH}" = branch-2.0 ]]; then
yetus_info "Setting Hadoop 2 versions to test based on branch-2.0 rules."
if [[ "${QUICK_HADOOPCHECK}" == "true" ]]; then
hbase_hadoop2_versions="2.6.5 2.7.4"
hbase_hadoop2_versions="2.6.5 2.7.7 2.8.5"
else
hbase_hadoop2_versions="2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 ${hbase_common_hadoop2_versions}"
hbase_hadoop2_versions="2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 2.7.1 2.7.2 2.7.3 2.7.4 2.7.5 2.7.6 2.7.7 2.8.2 2.8.3 2.8.4 2.8.5"
fi
elif [[ "${PATCH_BRANCH}" = branch-2.1 ]]; then
yetus_info "Setting Hadoop 2 versions to test based on branch-2.1 rules."
if [[ "${QUICK_HADOOPCHECK}" == "true" ]]; then
hbase_hadoop2_versions="2.7.7 2.8.5"
else
hbase_hadoop2_versions="2.7.1 2.7.2 2.7.3 2.7.4 2.7.5 2.7.6 2.7.7 2.8.2 2.8.3 2.8.4 2.8.5"
fi
else
yetus_info "Setting Hadoop 2 versions to test based on branch-1.5+/branch-2.1+/master/feature branch rules."
if [[ "${QUICK_HADOOPCHECK}" == "true" ]]; then
hbase_hadoop2_versions="2.7.4"
hbase_hadoop2_versions="2.8.5 2.9.2"
else
hbase_hadoop2_versions="${hbase_common_hadoop2_versions}"
hbase_hadoop2_versions="2.8.2 2.8.3 2.8.4 2.8.5 2.9.1 2.9.2"
fi
fi
hbase_hadoop3_versions="3.0.0"

View File

@ -53,13 +53,11 @@ public class TestReversedScannerCallable {
@Before
public void setUp() throws Exception {
byte[] ROW_BEFORE = ConnectionUtils.createCloseRowBefore(ROW);
Configuration conf = Mockito.mock(Configuration.class);
HRegionLocation regionLocation = Mockito.mock(HRegionLocation.class);
ServerName serverName = Mockito.mock(ServerName.class);
HRegionInfo regionInfo = Mockito.mock(HRegionInfo.class);
Mockito.when(connection.getConfiguration()).thenReturn(conf);
Mockito.when(connection.getConfiguration()).thenReturn(new Configuration());
Mockito.when(regionLocations.size()).thenReturn(1);
Mockito.when(regionLocations.getRegionLocation(0)).thenReturn(regionLocation);
Mockito.when(regionLocation.getHostname()).thenReturn("localhost");

View File

@ -15,7 +15,6 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.fs;
import java.lang.reflect.InvocationTargetException;
@ -24,7 +23,6 @@ import java.net.BindException;
import java.net.ServerSocket;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FSDataInputStream;
@ -34,14 +32,12 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hdfs.DFSClient;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
import org.apache.hadoop.hdfs.server.datanode.DataNode;
import org.apache.log4j.Level;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
@ -60,11 +56,6 @@ import org.junit.experimental.categories.Category;
public class TestBlockReorder {
private static final Log LOG = LogFactory.getLog(TestBlockReorder.class);
static {
((Log4JLogger) DFSClient.LOG).getLogger().setLevel(Level.ALL);
((Log4JLogger) HFileSystem.LOG).getLogger().setLevel(Level.ALL);
}
private Configuration conf;
private MiniDFSCluster cluster;
private HBaseTestingUtility htu;

12
pom.xml
View File

@ -1249,7 +1249,7 @@
<maven.min.version>3.0.3</maven.min.version>
<java.min.version>${compileSource}</java.min.version>
<!-- Dependencies -->
<hadoop-two.version>2.7.4</hadoop-two.version>
<hadoop-two.version>2.8.5</hadoop-two.version>
<!-- minikdc introduced in hadoop-2.3; override when building earlier versions -->
<hadoop-two-minikdc.version>${hadoop-two.version}</hadoop-two-minikdc.version>
<hadoop-three.version>3.0.0</hadoop-three.version>
@ -2274,6 +2274,16 @@
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-auth</artifactId>
<version>${hadoop-two.version}</version>
<exclusions>
<exclusion>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</exclusion>
<exclusion>
<groupId>net.minidev</groupId>
<artifactId>json-smart</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>