HBASE-22399 Change default hadoop-two.version to 2.8.x and remove the 2.7.x hadoop checks

This commit is contained in:
Duo Zhang 2019-05-13 10:30:13 +08:00
parent 0b8493f886
commit 083605df8f
4 changed files with 27 additions and 14 deletions

View File

@ -116,7 +116,7 @@ pipeline {
}
stage ('hadoop 2 cache') {
environment {
HADOOP2_VERSION="2.7.1"
HADOOP2_VERSION="2.8.2"
}
steps {
// directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(

View File

@ -504,27 +504,33 @@ function hadoopcheck_rebuild
# All supported Hadoop versions that we want to test the compilation with
# See the Hadoop section on prereqs in the HBase Reference Guide
hbase_common_hadoop2_versions="2.7.1 2.7.2 2.7.3 2.7.4"
if [[ "${PATCH_BRANCH}" = branch-1.* ]] && [[ "${PATCH_BRANCH#branch-1.}" -lt "5" ]]; then
yetus_info "Setting Hadoop 2 versions to test based on before-branch-1.5 rules."
if [[ "${QUICK_HADOOPCHECK}" == "true" ]]; then
hbase_hadoop2_versions="2.4.1 2.5.2 2.6.5 2.7.4"
hbase_hadoop2_versions="2.4.1 2.5.2 2.6.5 2.7.7"
else
hbase_hadoop2_versions="2.4.0 2.4.1 2.5.0 2.5.1 2.5.2 2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 ${hbase_common_hadoop2_versions}"
hbase_hadoop2_versions="2.4.0 2.4.1 2.5.0 2.5.1 2.5.2 2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 2.7.1 2.7.2 2.7.3 2.7.4 2.7.5 2.7.6 2.7.7"
fi
elif [[ "${PATCH_BRANCH}" = branch-2.0 ]]; then
yetus_info "Setting Hadoop 2 versions to test based on branch-2.0 rules."
if [[ "${QUICK_HADOOPCHECK}" == "true" ]]; then
hbase_hadoop2_versions="2.6.5 2.7.4"
hbase_hadoop2_versions="2.6.5 2.7.7 2.8.5"
else
hbase_hadoop2_versions="2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 ${hbase_common_hadoop2_versions}"
hbase_hadoop2_versions="2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 2.7.1 2.7.2 2.7.3 2.7.4 2.7.5 2.7.6 2.7.7 2.8.2 2.8.3 2.8.4 2.8.5"
fi
elif [[ "${PATCH_BRANCH}" = branch-2.1 ]]; then
yetus_info "Setting Hadoop 2 versions to test based on branch-2.1 rules."
if [[ "${QUICK_HADOOPCHECK}" == "true" ]]; then
hbase_hadoop2_versions="2.7.7 2.8.5"
else
hbase_hadoop2_versions="2.7.1 2.7.2 2.7.3 2.7.4 2.7.5 2.7.6 2.7.7 2.8.2 2.8.3 2.8.4 2.8.5"
fi
else
yetus_info "Setting Hadoop 2 versions to test based on branch-1.5+/branch-2.1+/master/feature branch rules."
if [[ "${QUICK_HADOOPCHECK}" == "true" ]]; then
hbase_hadoop2_versions="2.7.4"
hbase_hadoop2_versions="2.8.5 2.9.2"
else
hbase_hadoop2_versions="${hbase_common_hadoop2_versions}"
hbase_hadoop2_versions="2.8.2 2.8.3 2.8.4 2.8.5 2.9.1 2.9.2"
fi
fi
hbase_hadoop3_versions="3.0.0"

View File

@ -57,13 +57,10 @@ public class TestReversedScannerCallable {
@Before
public void setUp() throws Exception {
byte[] ROW_BEFORE = ConnectionUtils.createCloseRowBefore(ROW);
Configuration conf = Mockito.mock(Configuration.class);
HRegionLocation regionLocation = Mockito.mock(HRegionLocation.class);
ServerName serverName = Mockito.mock(ServerName.class);
Mockito.when(connection.getConfiguration()).thenReturn(conf);
Mockito.when(connection.getConfiguration()).thenReturn(new Configuration());
Mockito.when(regionLocations.size()).thenReturn(1);
Mockito.when(regionLocations.getRegionLocation(0)).thenReturn(regionLocation);
Mockito.when(regionLocation.getHostname()).thenReturn("localhost");

14
pom.xml
View File

@ -1412,7 +1412,7 @@
<maven.min.version>3.0.4</maven.min.version>
<java.min.version>${compileSource}</java.min.version>
<!-- Dependencies -->
<hadoop-two.version>2.7.7</hadoop-two.version>
<hadoop-two.version>2.8.5</hadoop-two.version>
<hadoop-three.version>3.0.3</hadoop-three.version>
<!-- These must be defined here for downstream build tools that don't look at profiles.
They ought to match the values found in our default hadoop profile, which is
@ -2657,7 +2657,17 @@
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-auth</artifactId>
<version>${hadoop-two.version}</version>
<version>${hadoop-two.version}</version>
<exclusions>
<exclusion>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</exclusion>
<exclusion>
<groupId>net.minidev</groupId>
<artifactId>json-smart</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>