HADOOP-18022. Add restrict-imports-enforcer-rule for Guava Preconditions and remove remaining usages (#3712)

Reviewed-by: Akira Ajisaka <aajisaka@apache.org>
Signed-off-by: Takanobu Asanuma <tasanuma@apache.org>
This commit is contained in:
Viraj Jasani 2021-11-29 14:07:30 +05:30 committed by GitHub
parent 86705eda82
commit 215388beea
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
26 changed files with 124 additions and 88 deletions

View File

@ -24,7 +24,7 @@ import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions; import org.apache.hadoop.util.Preconditions;
import com.qcloud.cos.auth.AnonymousCOSCredentials; import com.qcloud.cos.auth.AnonymousCOSCredentials;
import com.qcloud.cos.auth.COSCredentials; import com.qcloud.cos.auth.COSCredentials;
import com.qcloud.cos.auth.COSCredentialsProvider; import com.qcloud.cos.auth.COSCredentialsProvider;

View File

@ -19,7 +19,7 @@
package org.apache.hadoop.fs.obs; package org.apache.hadoop.fs.obs;
import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions; import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.Futures; import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.Futures;
import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.ListenableFuture; import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.ListenableFuture;
import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.ListeningExecutorService; import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.ListeningExecutorService;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.fs.obs; package org.apache.hadoop.fs.obs;
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions; import org.apache.hadoop.util.Preconditions;
import com.obs.services.ObsClient; import com.obs.services.ObsClient;
import com.obs.services.exception.ObsException; import com.obs.services.exception.ObsException;
import com.obs.services.model.AbortMultipartUploadRequest; import com.obs.services.model.AbortMultipartUploadRequest;

View File

@ -19,7 +19,7 @@
package org.apache.hadoop.fs.obs; package org.apache.hadoop.fs.obs;
import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions; import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSExceptionMessages; import org.apache.hadoop.fs.FSExceptionMessages;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.fs.obs; package org.apache.hadoop.fs.obs;
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions; import org.apache.hadoop.util.Preconditions;
import com.obs.services.exception.ObsException; import com.obs.services.exception.ObsException;
import java.io.IOException; import java.io.IOException;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.fs.obs; package org.apache.hadoop.fs.obs;
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions; import org.apache.hadoop.util.Preconditions;
import com.obs.services.ObsClient; import com.obs.services.ObsClient;
import com.obs.services.exception.ObsException; import com.obs.services.exception.ObsException;
import com.obs.services.model.GetObjectRequest; import com.obs.services.model.GetObjectRequest;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.fs.obs; package org.apache.hadoop.fs.obs;
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions; import org.apache.hadoop.util.Preconditions;
import com.obs.services.ObsClient; import com.obs.services.ObsClient;
import com.obs.services.exception.ObsException; import com.obs.services.exception.ObsException;
import com.obs.services.model.AbortMultipartUploadRequest; import com.obs.services.model.AbortMultipartUploadRequest;

View File

@ -146,7 +146,7 @@ import org.apache.hadoop.yarn.util.UnitsConversionUtil;
import org.apache.hadoop.yarn.util.resource.ResourceUtils; import org.apache.hadoop.yarn.util.resource.ResourceUtils;
import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions; import org.apache.hadoop.util.Preconditions;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;

View File

@ -78,7 +78,7 @@ import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
import org.apache.hadoop.yarn.webapp.BadRequestException; import org.apache.hadoop.yarn.webapp.BadRequestException;
import org.apache.hadoop.yarn.webapp.NotFoundException; import org.apache.hadoop.yarn.webapp.NotFoundException;
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions; import org.apache.hadoop.util.Preconditions;
import com.google.inject.Inject; import com.google.inject.Inject;
@Path("/ws/v1/mapreduce") @Path("/ws/v1/mapreduce")

View File

@ -28,7 +28,7 @@ import java.util.Iterator;
import java.util.Map; import java.util.Map;
import org.apache.hadoop.thirdparty.com.google.common.base.Joiner; import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
import static org.apache.hadoop.thirdparty.com.google.common.base.Preconditions.*; import static org.apache.hadoop.util.Preconditions.*;
import org.apache.hadoop.thirdparty.com.google.common.collect.AbstractIterator; import org.apache.hadoop.thirdparty.com.google.common.collect.AbstractIterator;
import org.apache.hadoop.thirdparty.com.google.common.collect.Iterators; import org.apache.hadoop.thirdparty.com.google.common.collect.Iterators;
import org.apache.hadoop.thirdparty.com.google.common.collect.Maps; import org.apache.hadoop.thirdparty.com.google.common.collect.Maps;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.mapreduce.counters; package org.apache.hadoop.mapreduce.counters;
import static org.apache.hadoop.thirdparty.com.google.common.base.Preconditions.checkNotNull; import static org.apache.hadoop.util.Preconditions.checkNotNull;
import java.io.DataInput; import java.io.DataInput;
import java.io.DataOutput; import java.io.DataOutput;

View File

@ -37,7 +37,7 @@ import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.TaskAttemptID; import org.apache.hadoop.mapreduce.TaskAttemptID;
import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions; import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.util.DurationInfo; import org.apache.hadoop.util.DurationInfo;
import org.apache.hadoop.util.Progressable; import org.apache.hadoop.util.Progressable;
import org.slf4j.Logger; import org.slf4j.Logger;

View File

@ -21,7 +21,7 @@ package org.apache.hadoop.mapreduce.lib.output;
import java.io.IOException; import java.io.IOException;
import java.text.NumberFormat; import java.text.NumberFormat;
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions; import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;

View File

@ -22,7 +22,7 @@ import java.io.IOException;
import java.lang.reflect.Constructor; import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException; import java.lang.reflect.InvocationTargetException;
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions; import org.apache.hadoop.util.Preconditions;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;

View File

@ -20,7 +20,7 @@ package org.apache.hadoop.mapreduce.lib.output;
import java.io.IOException; import java.io.IOException;
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions; import org.apache.hadoop.util.Preconditions;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;

View File

@ -23,7 +23,7 @@ import java.nio.ByteBuffer;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.mapred.nativetask.NativeDataTarget; import org.apache.hadoop.mapred.nativetask.NativeDataTarget;
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions; import org.apache.hadoop.util.Preconditions;
/** /**
* DataOutputStream implementation which buffers data in a fixed-size * DataOutputStream implementation which buffers data in a fixed-size

View File

@ -19,7 +19,7 @@ package org.apache.hadoop.mapred.nativetask.testutil;
import java.util.Random; import java.util.Random;
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions; import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.thirdparty.com.google.common.primitives.Ints; import org.apache.hadoop.thirdparty.com.google.common.primitives.Ints;
import org.apache.hadoop.thirdparty.com.google.common.primitives.Longs; import org.apache.hadoop.thirdparty.com.google.common.primitives.Longs;

View File

@ -22,8 +22,7 @@ import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.EtagSource; import org.apache.hadoop.fs.EtagSource;
import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.LocatedFileStatus; import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.util.Preconditions;
import static org.apache.hadoop.thirdparty.com.google.common.base.Preconditions.checkNotNull;
/** /**
* {@link LocatedFileStatus} extended to also carry an ETag. * {@link LocatedFileStatus} extended to also carry an ETag.
@ -38,7 +37,7 @@ public class AbfsLocatedFileStatus extends LocatedFileStatus implements EtagSour
private final String etag; private final String etag;
public AbfsLocatedFileStatus(FileStatus status, BlockLocation[] locations) { public AbfsLocatedFileStatus(FileStatus status, BlockLocation[] locations) {
super(checkNotNull(status), locations); super(Preconditions.checkNotNull(status), locations);
if (status instanceof EtagSource) { if (status instanceof EtagSource) {
this.etag = ((EtagSource) status).getEtag(); this.etag = ((EtagSource) status).getEtag();
} else { } else {

View File

@ -24,8 +24,6 @@ import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto; import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto;
import org.apache.hadoop.util.Preconditions;
@Private @Private
@Unstable @Unstable
public class ApplicationIdPBImpl extends ApplicationId { public class ApplicationIdPBImpl extends ApplicationId {
@ -46,24 +44,32 @@ public class ApplicationIdPBImpl extends ApplicationId {
@Override @Override
public int getId() { public int getId() {
Preconditions.checkNotNull(proto); if (proto == null) {
throw new NullPointerException("The argument object is NULL");
}
return proto.getId(); return proto.getId();
} }
@Override @Override
protected void setId(int id) { protected void setId(int id) {
Preconditions.checkNotNull(builder); if (builder == null) {
throw new NullPointerException("The argument object is NULL");
}
builder.setId(id); builder.setId(id);
} }
@Override @Override
public long getClusterTimestamp() { public long getClusterTimestamp() {
Preconditions.checkNotNull(proto); if (proto == null) {
throw new NullPointerException("The argument object is NULL");
}
return proto.getClusterTimestamp(); return proto.getClusterTimestamp();
} }
@Override @Override
protected void setClusterTimestamp(long clusterTimestamp) { protected void setClusterTimestamp(long clusterTimestamp) {
Preconditions.checkNotNull(builder); if (builder == null) {
throw new NullPointerException("The argument object is NULL");
}
builder.setClusterTimestamp((clusterTimestamp)); builder.setClusterTimestamp((clusterTimestamp));
} }

View File

@ -58,8 +58,6 @@ import org.apache.hadoop.yarn.server.timelineservice.storage.common.RowKeyPrefix
import org.apache.hadoop.yarn.server.timelineservice.storage.common.TimelineStorageUtils; import org.apache.hadoop.yarn.server.timelineservice.storage.common.TimelineStorageUtils;
import org.apache.hadoop.yarn.webapp.BadRequestException; import org.apache.hadoop.yarn.webapp.BadRequestException;
import org.apache.hadoop.util.Preconditions;
/** /**
* Timeline entity reader for application entities that are stored in the * Timeline entity reader for application entities that are stored in the
* application table. * application table.
@ -336,21 +334,29 @@ class ApplicationEntityReader extends GenericEntityReader {
@Override @Override
protected void validateParams() { protected void validateParams() {
Preconditions.checkNotNull(getContext(), "context shouldn't be null"); if (getContext() == null) {
Preconditions.checkNotNull( throw new NullPointerException("context shouldn't be null");
getDataToRetrieve(), "data to retrieve shouldn't be null"); }
Preconditions.checkNotNull(getContext().getClusterId(), if (getDataToRetrieve() == null) {
"clusterId shouldn't be null"); throw new NullPointerException("data to retrieve shouldn't be null");
Preconditions.checkNotNull(getContext().getEntityType(), }
"entityType shouldn't be null"); if (getContext().getClusterId() == null) {
throw new NullPointerException("clusterId shouldn't be null");
}
if (getContext().getEntityType() == null) {
throw new NullPointerException("entityType shouldn't be null");
}
if (isSingleEntityRead()) { if (isSingleEntityRead()) {
Preconditions.checkNotNull(getContext().getAppId(), if (getContext().getAppId() == null) {
"appId shouldn't be null"); throw new NullPointerException("appId shouldn't be null");
}
} else { } else {
Preconditions.checkNotNull(getContext().getUserId(), if (getContext().getUserId() == null) {
"userId shouldn't be null"); throw new NullPointerException("userId shouldn't be null");
Preconditions.checkNotNull(getContext().getFlowName(), }
"flowName shouldn't be null"); if (getContext().getFlowName() == null) {
throw new NullPointerException("flowName shouldn't be null");
}
} }
} }

View File

@ -17,7 +17,6 @@
*/ */
package org.apache.hadoop.yarn.server.timelineservice.storage.reader; package org.apache.hadoop.yarn.server.timelineservice.storage.reader;
import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Result;
@ -115,11 +114,15 @@ public final class EntityTypeReader extends AbstractTimelineStorageReader {
@Override @Override
protected void validateParams() { protected void validateParams() {
Preconditions.checkNotNull(getContext(), "context shouldn't be null"); if (getContext() == null) {
Preconditions.checkNotNull(getContext().getClusterId(), throw new NullPointerException("context shouldn't be null");
"clusterId shouldn't be null"); }
Preconditions.checkNotNull(getContext().getAppId(), if (getContext().getClusterId() == null) {
"appId shouldn't be null"); throw new NullPointerException("clusterId shouldn't be null");
}
if (getContext().getAppId() == null) {
throw new NullPointerException("appId shouldn't be null");
}
} }
/** /**

View File

@ -46,8 +46,6 @@ import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowActivityRo
import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowActivityTableRW; import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowActivityTableRW;
import org.apache.hadoop.yarn.webapp.BadRequestException; import org.apache.hadoop.yarn.webapp.BadRequestException;
import org.apache.hadoop.util.Preconditions;
/** /**
* Timeline entity reader for flow activity entities that are stored in the * Timeline entity reader for flow activity entities that are stored in the
* flow activity table. * flow activity table.
@ -82,8 +80,10 @@ class FlowActivityEntityReader extends TimelineEntityReader {
@Override @Override
protected void validateParams() { protected void validateParams() {
Preconditions.checkNotNull(getContext().getClusterId(), String clusterId = getContext().getClusterId();
"clusterId shouldn't be null"); if (clusterId == null) {
throw new NullPointerException("clusterId shouldn't be null");
}
} }
@Override @Override

View File

@ -57,8 +57,6 @@ import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowRunRowKeyP
import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowRunTableRW; import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowRunTableRW;
import org.apache.hadoop.yarn.webapp.BadRequestException; import org.apache.hadoop.yarn.webapp.BadRequestException;
import org.apache.hadoop.util.Preconditions;
/** /**
* Timeline entity reader for flow run entities that are stored in the flow run * Timeline entity reader for flow run entities that are stored in the flow run
* table. * table.
@ -86,18 +84,25 @@ class FlowRunEntityReader extends TimelineEntityReader {
@Override @Override
protected void validateParams() { protected void validateParams() {
Preconditions.checkNotNull(getContext(), "context shouldn't be null"); if (getContext() == null) {
Preconditions.checkNotNull(getDataToRetrieve(), throw new NullPointerException("context shouldn't be null");
"data to retrieve shouldn't be null"); }
Preconditions.checkNotNull(getContext().getClusterId(), if (getDataToRetrieve() == null) {
"clusterId shouldn't be null"); throw new NullPointerException("data to retrieve shouldn't be null");
Preconditions.checkNotNull(getContext().getUserId(), }
"userId shouldn't be null"); if (getContext().getClusterId() == null) {
Preconditions.checkNotNull(getContext().getFlowName(), throw new NullPointerException("clusterId shouldn't be null");
"flowName shouldn't be null"); }
if (getContext().getUserId() == null) {
throw new NullPointerException("userId shouldn't be null");
}
if (getContext().getFlowName() == null) {
throw new NullPointerException("flowName shouldn't be null");
}
if (isSingleEntityRead()) { if (isSingleEntityRead()) {
Preconditions.checkNotNull(getContext().getFlowRunId(), if (getContext().getFlowRunId() == null) {
"flowRunId shouldn't be null"); throw new NullPointerException("flowRunId shouldn't be null");
}
} }
EnumSet<Field> fieldsToRetrieve = getDataToRetrieve().getFieldsToRetrieve(); EnumSet<Field> fieldsToRetrieve = getDataToRetrieve().getFieldsToRetrieve();
if (!isSingleEntityRead() && fieldsToRetrieve != null) { if (!isSingleEntityRead() && fieldsToRetrieve != null) {

View File

@ -63,8 +63,6 @@ import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityRowKey
import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityTableRW; import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityTableRW;
import org.apache.hadoop.yarn.webapp.BadRequestException; import org.apache.hadoop.yarn.webapp.BadRequestException;
import org.apache.hadoop.util.Preconditions;
/** /**
* Timeline entity reader for generic entities that are stored in the entity * Timeline entity reader for generic entities that are stored in the entity
* table. * table.
@ -406,18 +404,25 @@ class GenericEntityReader extends TimelineEntityReader {
@Override @Override
protected void validateParams() { protected void validateParams() {
Preconditions.checkNotNull(getContext(), "context shouldn't be null"); if (getContext() == null) {
Preconditions.checkNotNull(getDataToRetrieve(), throw new NullPointerException("context shouldn't be null");
"data to retrieve shouldn't be null"); }
Preconditions.checkNotNull(getContext().getClusterId(), if (getDataToRetrieve() == null) {
"clusterId shouldn't be null"); throw new NullPointerException("data to retrieve shouldn't be null");
Preconditions.checkNotNull(getContext().getAppId(), }
"appId shouldn't be null"); if (getContext().getClusterId() == null) {
Preconditions.checkNotNull(getContext().getEntityType(), throw new NullPointerException("clusterId shouldn't be null");
"entityType shouldn't be null"); }
if (getContext().getAppId() == null) {
throw new NullPointerException("appId shouldn't be null");
}
if (getContext().getEntityType() == null) {
throw new NullPointerException("entityType shouldn't be null");
}
if (isSingleEntityRead()) { if (isSingleEntityRead()) {
Preconditions.checkNotNull(getContext().getEntityId(), if (getContext().getEntityId() == null) {
"entityId shouldn't be null"); throw new NullPointerException("entityId shouldn't be null");
}
} }
} }

View File

@ -56,8 +56,6 @@ import org.apache.hadoop.yarn.server.timelineservice.storage.subapplication.SubA
import org.apache.hadoop.yarn.server.timelineservice.storage.subapplication.SubApplicationTableRW; import org.apache.hadoop.yarn.server.timelineservice.storage.subapplication.SubApplicationTableRW;
import org.apache.hadoop.yarn.webapp.BadRequestException; import org.apache.hadoop.yarn.webapp.BadRequestException;
import org.apache.hadoop.util.Preconditions;
class SubApplicationEntityReader extends GenericEntityReader { class SubApplicationEntityReader extends GenericEntityReader {
private static final SubApplicationTableRW SUB_APPLICATION_TABLE = private static final SubApplicationTableRW SUB_APPLICATION_TABLE =
new SubApplicationTableRW(); new SubApplicationTableRW();
@ -308,15 +306,21 @@ class SubApplicationEntityReader extends GenericEntityReader {
@Override @Override
protected void validateParams() { protected void validateParams() {
Preconditions.checkNotNull(getContext(), "context shouldn't be null"); if (getContext() == null) {
Preconditions.checkNotNull(getDataToRetrieve(), throw new NullPointerException("context shouldn't be null");
"data to retrieve shouldn't be null"); }
Preconditions.checkNotNull(getContext().getClusterId(), if (getDataToRetrieve() == null) {
"clusterId shouldn't be null"); throw new NullPointerException("data to retrieve shouldn't be null");
Preconditions.checkNotNull(getContext().getDoAsUser(), }
"DoAsUser shouldn't be null"); if (getContext().getClusterId() == null) {
Preconditions.checkNotNull(getContext().getEntityType(), throw new NullPointerException("clusterId shouldn't be null");
"entityType shouldn't be null"); }
if (getContext().getDoAsUser() == null) {
throw new NullPointerException("DoAsUser shouldn't be null");
}
if (getContext().getEntityType() == null) {
throw new NullPointerException("entityType shouldn't be null");
}
} }
@Override @Override

View File

@ -254,6 +254,14 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/x
<bannedImport>org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableListMultimap</bannedImport> <bannedImport>org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableListMultimap</bannedImport>
</bannedImports> </bannedImports>
</restrictImports> </restrictImports>
<restrictImports implementation="de.skuzzle.enforcer.restrictimports.rule.RestrictImports">
<includeTestCode>true</includeTestCode>
<reason>Use hadoop-common provided Preconditions rather than Guava provided</reason>
<bannedImports>
<bannedImport>org.apache.hadoop.thirdparty.com.google.common.base.Preconditions</bannedImport>
<bannedImport>static org.apache.hadoop.thirdparty.com.google.common.base.Preconditions.**</bannedImport>
</bannedImports>
</restrictImports>
</rules> </rules>
</configuration> </configuration>
</execution> </execution>