MAPREDUCE-7350. Replace Guava Lists usage by Hadoop's own Lists in hadoop-mapreduce-project (#3074)

This commit is contained in:
Viraj Jasani 2021-06-07 08:21:29 +05:30 committed by GitHub
parent 9c7b8cf54e
commit 207c92753f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 73 additions and 13 deletions

View File

@ -61,6 +61,7 @@
import org.apache.hadoop.mapreduce.v2.util.MRBuilderUtils; import org.apache.hadoop.mapreduce.v2.util.MRBuilderUtils;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.AccessControlList; import org.apache.hadoop.security.authorize.AccessControlList;
import org.apache.hadoop.util.Lists;
import org.apache.hadoop.yarn.MockApps; import org.apache.hadoop.yarn.MockApps;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationId;
@ -70,7 +71,6 @@
import org.apache.hadoop.yarn.util.Records; import org.apache.hadoop.yarn.util.Records;
import org.apache.hadoop.thirdparty.com.google.common.collect.Iterators; import org.apache.hadoop.thirdparty.com.google.common.collect.Iterators;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.apache.hadoop.thirdparty.com.google.common.collect.Maps; import org.apache.hadoop.thirdparty.com.google.common.collect.Maps;
public class MockJobs extends MockApps { public class MockJobs extends MockApps {

View File

@ -21,7 +21,6 @@
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.apache.hadoop.thirdparty.com.google.common.collect.Maps; import org.apache.hadoop.thirdparty.com.google.common.collect.Maps;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
@ -30,6 +29,7 @@
import org.apache.hadoop.mapreduce.JobCounter; import org.apache.hadoop.mapreduce.JobCounter;
import org.apache.hadoop.mapreduce.TaskCounter; import org.apache.hadoop.mapreduce.TaskCounter;
import org.apache.hadoop.mapreduce.util.ResourceBundles; import org.apache.hadoop.mapreduce.util.ResourceBundles;
import org.apache.hadoop.util.Lists;
/** /**
* An abstract class to provide common implementation of the * An abstract class to provide common implementation of the

View File

@ -41,15 +41,14 @@
import org.apache.hadoop.mapreduce.JobContext; import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.security.TokenCache; import org.apache.hadoop.mapreduce.security.TokenCache;
import org.apache.hadoop.util.Lists;
import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.StopWatch; import org.apache.hadoop.util.StopWatch;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; /**
/**
* A base class for file-based {@link InputFormat}s. * A base class for file-based {@link InputFormat}s.
* *
* <p><code>FileInputFormat</code> is the base class for all file-based * <p><code>FileInputFormat</code> is the base class for all file-based

View File

@ -21,13 +21,12 @@
import java.text.ParseException; import java.text.ParseException;
import java.util.List; import java.util.List;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.mapreduce.counters.AbstractCounters; import org.apache.hadoop.mapreduce.counters.AbstractCounters;
import org.apache.hadoop.mapreduce.Counter; import org.apache.hadoop.mapreduce.Counter;
import org.apache.hadoop.mapreduce.counters.CounterGroupBase; import org.apache.hadoop.mapreduce.counters.CounterGroupBase;
import org.apache.hadoop.util.Lists;
import org.apache.hadoop.util.StringInterner; import org.apache.hadoop.util.StringInterner;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;

View File

@ -33,6 +33,7 @@
import org.apache.hadoop.fs.RawLocalFileSystem; import org.apache.hadoop.fs.RawLocalFileSystem;
import org.apache.hadoop.fs.RemoteIterator; import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.util.Lists;
import org.junit.After; import org.junit.After;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Before; import org.junit.Before;
@ -43,8 +44,6 @@
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
@RunWith(value = Parameterized.class) @RunWith(value = Parameterized.class)
public class TestFileInputFormat { public class TestFileInputFormat {

View File

@ -43,6 +43,7 @@
import org.apache.hadoop.mapred.SplitLocationInfo; import org.apache.hadoop.mapred.SplitLocationInfo;
import org.apache.hadoop.mapreduce.InputSplit; import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.util.Lists;
import org.apache.hadoop.util.Sets; import org.apache.hadoop.util.Sets;
import org.junit.After; import org.junit.After;
import org.junit.Assert; import org.junit.Assert;
@ -54,8 +55,6 @@
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
@RunWith(value = Parameterized.class) @RunWith(value = Parameterized.class)
public class TestFileInputFormat { public class TestFileInputFormat {

View File

@ -30,6 +30,7 @@
import org.apache.hadoop.mapred.nativetask.testutil.ResultVerifier; import org.apache.hadoop.mapred.nativetask.testutil.ResultVerifier;
import org.apache.hadoop.mapred.nativetask.testutil.ScenarioConfiguration; import org.apache.hadoop.mapred.nativetask.testutil.ScenarioConfiguration;
import org.apache.hadoop.mapred.nativetask.testutil.TestConstants; import org.apache.hadoop.mapred.nativetask.testutil.TestConstants;
import org.apache.hadoop.util.Lists;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.apache.hadoop.util.NativeCodeLoader; import org.apache.hadoop.util.NativeCodeLoader;
import org.junit.Assume; import org.junit.Assume;
@ -42,7 +43,6 @@
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.thirdparty.com.google.common.base.Splitter; import org.apache.hadoop.thirdparty.com.google.common.base.Splitter;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
@RunWith(Parameterized.class) @RunWith(Parameterized.class)
public class KVTest { public class KVTest {

View File

@ -18,7 +18,6 @@
package org.apache.hadoop.mapred.uploader; package org.apache.hadoop.mapred.uploader;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.apache.commons.compress.archivers.tar.TarArchiveEntry; import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
import org.apache.commons.compress.archivers.tar.TarArchiveInputStream; import org.apache.commons.compress.archivers.tar.TarArchiveInputStream;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
@ -32,6 +31,7 @@
import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.DistributedFileSystem; import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.util.Lists;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Assume; import org.junit.Assume;
import org.junit.Before; import org.junit.Before;

View File

@ -177,6 +177,38 @@
</properties> </properties>
</configuration> </configuration>
</plugin> </plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-enforcer-plugin</artifactId>
<dependencies>
<dependency>
<groupId>de.skuzzle.enforcer</groupId>
<artifactId>restrict-imports-enforcer-rule</artifactId>
<version>${restrict-imports.enforcer.version}</version>
</dependency>
</dependencies>
<executions>
<execution>
<id>banned-illegal-imports</id>
<phase>process-sources</phase>
<goals>
<goal>enforce</goal>
</goals>
<configuration>
<rules>
<restrictImports implementation="de.skuzzle.enforcer.restrictimports.rule.RestrictImports">
<includeTestCode>true</includeTestCode>
<reason>Use hadoop-common provided Lists rather than Guava provided Lists</reason>
<bannedImports>
<bannedImport>org.apache.hadoop.thirdparty.com.google.common.collect.Lists</bannedImport>
<bannedImport>com.google.common.collect.Lists</bannedImport>
</bannedImports>
</restrictImports>
</rules>
</configuration>
</execution>
</executions>
</plugin>
</plugins> </plugins>
</build> </build>
<profiles> <profiles>

View File

@ -155,6 +155,38 @@
</excludes> </excludes>
</configuration> </configuration>
</plugin> </plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-enforcer-plugin</artifactId>
<dependencies>
<dependency>
<groupId>de.skuzzle.enforcer</groupId>
<artifactId>restrict-imports-enforcer-rule</artifactId>
<version>${restrict-imports.enforcer.version}</version>
</dependency>
</dependencies>
<executions>
<execution>
<id>banned-illegal-imports</id>
<phase>process-sources</phase>
<goals>
<goal>enforce</goal>
</goals>
<configuration>
<rules>
<restrictImports implementation="de.skuzzle.enforcer.restrictimports.rule.RestrictImports">
<includeTestCode>true</includeTestCode>
<reason>Use hadoop-common provided Lists rather than Guava provided Lists</reason>
<bannedImports>
<bannedImport>org.apache.hadoop.thirdparty.com.google.common.collect.Lists</bannedImport>
<bannedImport>com.google.common.collect.Lists</bannedImport>
</bannedImports>
</restrictImports>
</rules>
</configuration>
</execution>
</executions>
</plugin>
</plugins> </plugins>
</build> </build>
</project> </project>