MAPREDUCE-7350. Replace Guava Lists usage by Hadoop's own Lists in hadoop-mapreduce-project (#3074)
This commit is contained in:
parent
9c7b8cf54e
commit
207c92753f
|
@ -61,6 +61,7 @@ import org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl;
|
|||
import org.apache.hadoop.mapreduce.v2.util.MRBuilderUtils;
|
||||
import org.apache.hadoop.security.UserGroupInformation;
|
||||
import org.apache.hadoop.security.authorize.AccessControlList;
|
||||
import org.apache.hadoop.util.Lists;
|
||||
import org.apache.hadoop.yarn.MockApps;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||
|
@ -70,7 +71,6 @@ import org.apache.hadoop.yarn.api.records.Priority;
|
|||
import org.apache.hadoop.yarn.util.Records;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.collect.Iterators;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.collect.Maps;
|
||||
|
||||
public class MockJobs extends MockApps {
|
||||
|
|
|
@ -21,7 +21,6 @@ package org.apache.hadoop.mapreduce.counters;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.collect.Maps;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
|
@ -30,6 +29,7 @@ import org.apache.hadoop.mapreduce.FileSystemCounter;
|
|||
import org.apache.hadoop.mapreduce.JobCounter;
|
||||
import org.apache.hadoop.mapreduce.TaskCounter;
|
||||
import org.apache.hadoop.mapreduce.util.ResourceBundles;
|
||||
import org.apache.hadoop.util.Lists;
|
||||
|
||||
/**
|
||||
* An abstract class to provide common implementation of the
|
||||
|
|
|
@ -41,14 +41,13 @@ import org.apache.hadoop.mapreduce.Job;
|
|||
import org.apache.hadoop.mapreduce.JobContext;
|
||||
import org.apache.hadoop.mapreduce.Mapper;
|
||||
import org.apache.hadoop.mapreduce.security.TokenCache;
|
||||
import org.apache.hadoop.util.Lists;
|
||||
import org.apache.hadoop.util.ReflectionUtils;
|
||||
import org.apache.hadoop.util.StopWatch;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
|
||||
|
||||
/**
|
||||
* A base class for file-based {@link InputFormat}s.
|
||||
*
|
||||
|
|
|
@ -21,13 +21,12 @@ package org.apache.hadoop.mapreduce.util;
|
|||
import java.text.ParseException;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.io.IntWritable;
|
||||
import org.apache.hadoop.mapreduce.counters.AbstractCounters;
|
||||
import org.apache.hadoop.mapreduce.Counter;
|
||||
import org.apache.hadoop.mapreduce.counters.CounterGroupBase;
|
||||
import org.apache.hadoop.util.Lists;
|
||||
import org.apache.hadoop.util.StringInterner;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
|
|
|
@ -33,6 +33,7 @@ import org.apache.hadoop.fs.PathFilter;
|
|||
import org.apache.hadoop.fs.RawLocalFileSystem;
|
||||
import org.apache.hadoop.fs.RemoteIterator;
|
||||
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
|
||||
import org.apache.hadoop.util.Lists;
|
||||
import org.junit.After;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
|
@ -43,8 +44,6 @@ import org.junit.runners.Parameterized.Parameters;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
|
||||
|
||||
@RunWith(value = Parameterized.class)
|
||||
public class TestFileInputFormat {
|
||||
|
||||
|
|
|
@ -43,6 +43,7 @@ import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeDescriptor;
|
|||
import org.apache.hadoop.mapred.SplitLocationInfo;
|
||||
import org.apache.hadoop.mapreduce.InputSplit;
|
||||
import org.apache.hadoop.mapreduce.Job;
|
||||
import org.apache.hadoop.util.Lists;
|
||||
import org.apache.hadoop.util.Sets;
|
||||
import org.junit.After;
|
||||
import org.junit.Assert;
|
||||
|
@ -54,8 +55,6 @@ import org.junit.runners.Parameterized.Parameters;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
|
||||
|
||||
@RunWith(value = Parameterized.class)
|
||||
public class TestFileInputFormat {
|
||||
|
||||
|
|
|
@ -30,6 +30,7 @@ import org.apache.hadoop.mapred.nativetask.NativeRuntime;
|
|||
import org.apache.hadoop.mapred.nativetask.testutil.ResultVerifier;
|
||||
import org.apache.hadoop.mapred.nativetask.testutil.ScenarioConfiguration;
|
||||
import org.apache.hadoop.mapred.nativetask.testutil.TestConstants;
|
||||
import org.apache.hadoop.util.Lists;
|
||||
import org.junit.AfterClass;
|
||||
import org.apache.hadoop.util.NativeCodeLoader;
|
||||
import org.junit.Assume;
|
||||
|
@ -42,7 +43,6 @@ import org.slf4j.Logger;
|
|||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.base.Splitter;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
|
||||
|
||||
@RunWith(Parameterized.class)
|
||||
public class KVTest {
|
||||
|
|
|
@ -18,7 +18,6 @@
|
|||
|
||||
package org.apache.hadoop.mapred.uploader;
|
||||
|
||||
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
|
||||
import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
|
||||
import org.apache.commons.compress.archivers.tar.TarArchiveInputStream;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
|
@ -32,6 +31,7 @@ import org.apache.hadoop.hdfs.MiniDFSCluster;
|
|||
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
||||
import org.apache.hadoop.hdfs.DistributedFileSystem;
|
||||
import org.apache.hadoop.hdfs.HdfsConfiguration;
|
||||
import org.apache.hadoop.util.Lists;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Assume;
|
||||
import org.junit.Before;
|
||||
|
|
|
@ -177,6 +177,38 @@
|
|||
</properties>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-enforcer-plugin</artifactId>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>de.skuzzle.enforcer</groupId>
|
||||
<artifactId>restrict-imports-enforcer-rule</artifactId>
|
||||
<version>${restrict-imports.enforcer.version}</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>banned-illegal-imports</id>
|
||||
<phase>process-sources</phase>
|
||||
<goals>
|
||||
<goal>enforce</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<rules>
|
||||
<restrictImports implementation="de.skuzzle.enforcer.restrictimports.rule.RestrictImports">
|
||||
<includeTestCode>true</includeTestCode>
|
||||
<reason>Use hadoop-common provided Lists rather than Guava provided Lists</reason>
|
||||
<bannedImports>
|
||||
<bannedImport>org.apache.hadoop.thirdparty.com.google.common.collect.Lists</bannedImport>
|
||||
<bannedImport>com.google.common.collect.Lists</bannedImport>
|
||||
</bannedImports>
|
||||
</restrictImports>
|
||||
</rules>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
<profiles>
|
||||
|
|
|
@ -155,6 +155,38 @@
|
|||
</excludes>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-enforcer-plugin</artifactId>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>de.skuzzle.enforcer</groupId>
|
||||
<artifactId>restrict-imports-enforcer-rule</artifactId>
|
||||
<version>${restrict-imports.enforcer.version}</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>banned-illegal-imports</id>
|
||||
<phase>process-sources</phase>
|
||||
<goals>
|
||||
<goal>enforce</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<rules>
|
||||
<restrictImports implementation="de.skuzzle.enforcer.restrictimports.rule.RestrictImports">
|
||||
<includeTestCode>true</includeTestCode>
|
||||
<reason>Use hadoop-common provided Lists rather than Guava provided Lists</reason>
|
||||
<bannedImports>
|
||||
<bannedImport>org.apache.hadoop.thirdparty.com.google.common.collect.Lists</bannedImport>
|
||||
<bannedImport>com.google.common.collect.Lists</bannedImport>
|
||||
</bannedImports>
|
||||
</restrictImports>
|
||||
</rules>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</project>
|
||||
|
|
Loading…
Reference in New Issue