HADOOP-17970. unguava: remove Preconditions from hdfs-projects modules (#3566)
This commit is contained in:
parent
9cfd8d0a83
commit
62c86eaa0e
|
@ -178,6 +178,38 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
<excludePackageNames>org.apache.hadoop.hdfs.protocol.proto</excludePackageNames>
|
<excludePackageNames>org.apache.hadoop.hdfs.protocol.proto</excludePackageNames>
|
||||||
</configuration>
|
</configuration>
|
||||||
</plugin>
|
</plugin>
|
||||||
|
<plugin>
|
||||||
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
|
<artifactId>maven-enforcer-plugin</artifactId>
|
||||||
|
<dependencies>
|
||||||
|
<dependency>
|
||||||
|
<groupId>de.skuzzle.enforcer</groupId>
|
||||||
|
<artifactId>restrict-imports-enforcer-rule</artifactId>
|
||||||
|
<version>${restrict-imports.enforcer.version}</version>
|
||||||
|
</dependency>
|
||||||
|
</dependencies>
|
||||||
|
<executions>
|
||||||
|
<execution>
|
||||||
|
<id>banned-illegal-imports</id>
|
||||||
|
<phase>process-sources</phase>
|
||||||
|
<goals>
|
||||||
|
<goal>enforce</goal>
|
||||||
|
</goals>
|
||||||
|
<configuration>
|
||||||
|
<rules>
|
||||||
|
<restrictImports implementation="de.skuzzle.enforcer.restrictimports.rule.RestrictImports">
|
||||||
|
<includeTestCode>true</includeTestCode>
|
||||||
|
<reason>Use hadoop-common provided VisibleForTesting rather than the one provided by Guava</reason>
|
||||||
|
<bannedImports>
|
||||||
|
<bannedImport>org.apache.hadoop.thirdparty.com.google.common.base.Preconditions</bannedImport>
|
||||||
|
<bannedImport>com.google.common.base.Preconditions</bannedImport>
|
||||||
|
</bannedImports>
|
||||||
|
</restrictImports>
|
||||||
|
</rules>
|
||||||
|
</configuration>
|
||||||
|
</execution>
|
||||||
|
</executions>
|
||||||
|
</plugin>
|
||||||
</plugins>
|
</plugins>
|
||||||
</build>
|
</build>
|
||||||
</project>
|
</project>
|
||||||
|
|
|
@ -43,7 +43,7 @@ import org.apache.hadoop.net.ScriptBasedMapping;
|
||||||
import org.apache.hadoop.util.ReflectionUtils;
|
import org.apache.hadoop.util.ReflectionUtils;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.VisibleForTesting;
|
import org.apache.hadoop.classification.VisibleForTesting;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
|
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
|
@ -196,7 +196,7 @@ import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.VisibleForTesting;
|
import org.apache.hadoop.classification.VisibleForTesting;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
|
import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.net.InetAddresses;
|
import org.apache.hadoop.thirdparty.com.google.common.net.InetAddresses;
|
||||||
|
|
||||||
/********************************************************
|
/********************************************************
|
||||||
|
|
|
@ -73,7 +73,7 @@ import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.VisibleForTesting;
|
import org.apache.hadoop.classification.VisibleForTesting;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
|
|
||||||
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.Write.RECOVER_LEASE_ON_CLOSE_EXCEPTION_DEFAULT;
|
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.Write.RECOVER_LEASE_ON_CLOSE_EXCEPTION_DEFAULT;
|
||||||
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.Write.RECOVER_LEASE_ON_CLOSE_EXCEPTION_KEY;
|
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.Write.RECOVER_LEASE_ON_CLOSE_EXCEPTION_KEY;
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
package org.apache.hadoop.hdfs;
|
package org.apache.hadoop.hdfs;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.VisibleForTesting;
|
import org.apache.hadoop.classification.VisibleForTesting;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.HadoopIllegalArgumentException;
|
import org.apache.hadoop.HadoopIllegalArgumentException;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.fs.CreateFlag;
|
import org.apache.hadoop.fs.CreateFlag;
|
||||||
|
|
|
@ -19,7 +19,7 @@ package org.apache.hadoop.hdfs;
|
||||||
|
|
||||||
import org.apache.hadoop.net.DomainNameResolver;
|
import org.apache.hadoop.net.DomainNameResolver;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
|
import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.collect.Maps;
|
import org.apache.hadoop.thirdparty.com.google.common.collect.Maps;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.primitives.SignedBytes;
|
import org.apache.hadoop.thirdparty.com.google.common.primitives.SignedBytes;
|
||||||
import java.net.URISyntaxException;
|
import java.net.URISyntaxException;
|
||||||
|
@ -145,7 +145,10 @@ public class DFSUtilClient {
|
||||||
*/
|
*/
|
||||||
public static byte[][] bytes2byteArray(byte[] bytes, int len,
|
public static byte[][] bytes2byteArray(byte[] bytes, int len,
|
||||||
byte separator) {
|
byte separator) {
|
||||||
Preconditions.checkPositionIndex(len, bytes.length);
|
if (len < 0 || len > bytes.length) {
|
||||||
|
throw new IndexOutOfBoundsException(
|
||||||
|
"Incorrect index [len, size] [" + len + ", " + bytes.length + "]");
|
||||||
|
}
|
||||||
if (len == 0) {
|
if (len == 0) {
|
||||||
return new byte[][]{null};
|
return new byte[][]{null};
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,7 +22,7 @@ package org.apache.hadoop.hdfs;
|
||||||
import org.apache.hadoop.ipc.RpcNoSuchMethodException;
|
import org.apache.hadoop.ipc.RpcNoSuchMethodException;
|
||||||
import org.apache.hadoop.security.AccessControlException;
|
import org.apache.hadoop.security.AccessControlException;
|
||||||
import org.apache.hadoop.classification.VisibleForTesting;
|
import org.apache.hadoop.classification.VisibleForTesting;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.commons.collections.list.TreeList;
|
import org.apache.commons.collections.list.TreeList;
|
||||||
import org.apache.hadoop.HadoopIllegalArgumentException;
|
import org.apache.hadoop.HadoopIllegalArgumentException;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
|
|
|
@ -36,7 +36,7 @@ import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.VisibleForTesting;
|
import org.apache.hadoop.classification.VisibleForTesting;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
|
import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
|
||||||
|
|
|
@ -24,7 +24,7 @@ import java.util.List;
|
||||||
import java.util.Map.Entry;
|
import java.util.Map.Entry;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.VisibleForTesting;
|
import org.apache.hadoop.classification.VisibleForTesting;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.collect.LinkedListMultimap;
|
import org.apache.hadoop.thirdparty.com.google.common.collect.LinkedListMultimap;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
|
|
|
@ -17,7 +17,7 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hdfs;
|
package org.apache.hadoop.hdfs;
|
||||||
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
|
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
|
||||||
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
|
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
|
||||||
import org.apache.hadoop.hdfs.util.StripedBlockUtil;
|
import org.apache.hadoop.hdfs.util.StripedBlockUtil;
|
||||||
|
|
|
@ -17,7 +17,7 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hdfs;
|
package org.apache.hadoop.hdfs;
|
||||||
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
|
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
|
||||||
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
|
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
|
||||||
import org.apache.hadoop.hdfs.util.StripedBlockUtil;
|
import org.apache.hadoop.hdfs.util.StripedBlockUtil;
|
||||||
|
|
|
@ -17,7 +17,7 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hdfs;
|
package org.apache.hadoop.hdfs;
|
||||||
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.fs.ChecksumException;
|
import org.apache.hadoop.fs.ChecksumException;
|
||||||
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
|
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
|
||||||
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
|
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
|
||||||
|
|
|
@ -27,7 +27,7 @@ import org.apache.hadoop.fs.XAttr.NameSpace;
|
||||||
import org.apache.hadoop.util.Lists;
|
import org.apache.hadoop.util.Lists;
|
||||||
import org.apache.hadoop.util.StringUtils;
|
import org.apache.hadoop.util.StringUtils;
|
||||||
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.collect.Maps;
|
import org.apache.hadoop.thirdparty.com.google.common.collect.Maps;
|
||||||
|
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
|
|
|
@ -31,7 +31,7 @@ import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
|
||||||
import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
|
import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
|
||||||
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
|
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
|
||||||
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The Hdfs implementation of {@link FSDataInputStream}.
|
* The Hdfs implementation of {@link FSDataInputStream}.
|
||||||
|
|
|
@ -28,7 +28,7 @@ import org.apache.hadoop.fs.FSDataOutputStream;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.hdfs.DFSOutputStream;
|
import org.apache.hadoop.hdfs.DFSOutputStream;
|
||||||
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The Hdfs implementation of {@link FSDataOutputStream}.
|
* The Hdfs implementation of {@link FSDataOutputStream}.
|
||||||
|
|
|
@ -75,7 +75,7 @@ import org.apache.hadoop.util.PerformanceAdvisory;
|
||||||
import org.apache.hadoop.util.Time;
|
import org.apache.hadoop.util.Time;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.VisibleForTesting;
|
import org.apache.hadoop.classification.VisibleForTesting;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
|
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
package org.apache.hadoop.hdfs.client.impl;
|
package org.apache.hadoop.hdfs.client.impl;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.VisibleForTesting;
|
import org.apache.hadoop.classification.VisibleForTesting;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.fs.ReadOption;
|
import org.apache.hadoop.fs.ReadOption;
|
||||||
import org.apache.hadoop.fs.StorageType;
|
import org.apache.hadoop.fs.StorageType;
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
package org.apache.hadoop.hdfs.client.impl;
|
package org.apache.hadoop.hdfs.client.impl;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.VisibleForTesting;
|
import org.apache.hadoop.classification.VisibleForTesting;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.HadoopIllegalArgumentException;
|
import org.apache.hadoop.HadoopIllegalArgumentException;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
|
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
|
||||||
|
|
|
@ -25,7 +25,7 @@ import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.classification.InterfaceStability;
|
import org.apache.hadoop.classification.InterfaceStability;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.hdfs.DFSUtilClient;
|
import org.apache.hadoop.hdfs.DFSUtilClient;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -26,7 +26,7 @@ import org.apache.hadoop.fs.BatchedRemoteIterator;
|
||||||
import org.apache.hadoop.fs.InvalidRequestException;
|
import org.apache.hadoop.fs.InvalidRequestException;
|
||||||
import org.apache.hadoop.ipc.RemoteException;
|
import org.apache.hadoop.ipc.RemoteException;
|
||||||
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.tracing.TraceScope;
|
import org.apache.hadoop.tracing.TraceScope;
|
||||||
import org.apache.hadoop.tracing.Tracer;
|
import org.apache.hadoop.tracing.Tracer;
|
||||||
|
|
||||||
|
|
|
@ -17,7 +17,7 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hdfs.protocol;
|
package org.apache.hadoop.hdfs.protocol;
|
||||||
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.commons.lang3.builder.EqualsBuilder;
|
import org.apache.commons.lang3.builder.EqualsBuilder;
|
||||||
import org.apache.commons.lang3.builder.HashCodeBuilder;
|
import org.apache.commons.lang3.builder.HashCodeBuilder;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
|
|
|
@ -17,7 +17,7 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hdfs.protocol;
|
package org.apache.hadoop.hdfs.protocol;
|
||||||
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.commons.lang3.builder.EqualsBuilder;
|
import org.apache.commons.lang3.builder.EqualsBuilder;
|
||||||
import org.apache.commons.lang3.builder.HashCodeBuilder;
|
import org.apache.commons.lang3.builder.HashCodeBuilder;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
|
|
||||||
package org.apache.hadoop.hdfs.protocol;
|
package org.apache.hadoop.hdfs.protocol;
|
||||||
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.commons.lang3.builder.ToStringBuilder;
|
import org.apache.commons.lang3.builder.ToStringBuilder;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.ipc.RemoteException;
|
import org.apache.hadoop.ipc.RemoteException;
|
||||||
|
|
|
@ -22,7 +22,7 @@ import java.util.Arrays;
|
||||||
import java.util.Comparator;
|
import java.util.Comparator;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.classification.InterfaceStability;
|
import org.apache.hadoop.classification.InterfaceStability;
|
||||||
import org.apache.hadoop.fs.StorageType;
|
import org.apache.hadoop.fs.StorageType;
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
package org.apache.hadoop.hdfs.protocol;
|
package org.apache.hadoop.hdfs.protocol;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.VisibleForTesting;
|
import org.apache.hadoop.classification.VisibleForTesting;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.fs.BatchedRemoteIterator.BatchedListEntries;
|
import org.apache.hadoop.fs.BatchedRemoteIterator.BatchedListEntries;
|
||||||
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ReencryptionInfoProto;
|
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ReencryptionInfoProto;
|
||||||
|
|
|
@ -20,7 +20,7 @@ package org.apache.hadoop.hdfs.protocol;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
|
|
||||||
import org.apache.hadoop.hdfs.DFSUtilClient;
|
import org.apache.hadoop.hdfs.DFSUtilClient;
|
||||||
|
|
||||||
|
|
|
@ -17,7 +17,7 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hdfs.protocol;
|
package org.apache.hadoop.hdfs.protocol;
|
||||||
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ReencryptionInfoProto;
|
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ReencryptionInfoProto;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -27,7 +27,7 @@ import org.apache.hadoop.classification.InterfaceStability;
|
||||||
import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PacketHeaderProto;
|
import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PacketHeaderProto;
|
||||||
import org.apache.hadoop.hdfs.util.ByteBufferOutputStream;
|
import org.apache.hadoop.hdfs.util.ByteBufferOutputStream;
|
||||||
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.primitives.Shorts;
|
import org.apache.hadoop.thirdparty.com.google.common.primitives.Shorts;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.primitives.Ints;
|
import org.apache.hadoop.thirdparty.com.google.common.primitives.Ints;
|
||||||
import org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException;
|
import org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException;
|
||||||
|
|
|
@ -31,7 +31,7 @@ import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
|
||||||
import org.apache.hadoop.util.DirectBufferPool;
|
import org.apache.hadoop.util.DirectBufferPool;
|
||||||
import org.apache.hadoop.io.IOUtils;
|
import org.apache.hadoop.io.IOUtils;
|
||||||
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.primitives.Ints;
|
import org.apache.hadoop.thirdparty.com.google.common.primitives.Ints;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
|
@ -28,7 +28,7 @@ import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.cache.CacheBuilder;
|
import org.apache.hadoop.thirdparty.com.google.common.cache.CacheBuilder;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.cache.CacheLoader;
|
import org.apache.hadoop.thirdparty.com.google.common.cache.CacheLoader;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.cache.LoadingCache;
|
import org.apache.hadoop.thirdparty.com.google.common.cache.LoadingCache;
|
||||||
|
|
|
@ -22,7 +22,7 @@ package org.apache.hadoop.hdfs.server.datanode;
|
||||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
import com.fasterxml.jackson.databind.ObjectReader;
|
import com.fasterxml.jackson.databind.ObjectReader;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.classification.InterfaceStability;
|
import org.apache.hadoop.classification.InterfaceStability;
|
||||||
|
|
||||||
|
|
|
@ -23,7 +23,7 @@ package org.apache.hadoop.hdfs.server.datanode;
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
import com.fasterxml.jackson.databind.ObjectReader;
|
import com.fasterxml.jackson.databind.ObjectReader;
|
||||||
import com.fasterxml.jackson.databind.SerializationFeature;
|
import com.fasterxml.jackson.databind.SerializationFeature;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.classification.InterfaceStability;
|
import org.apache.hadoop.classification.InterfaceStability;
|
||||||
|
|
||||||
|
|
|
@ -26,7 +26,7 @@ import org.apache.hadoop.hdfs.shortcircuit.DfsClientShmManager.EndpointShmManage
|
||||||
import org.apache.hadoop.net.unix.DomainSocket;
|
import org.apache.hadoop.net.unix.DomainSocket;
|
||||||
import org.apache.hadoop.net.unix.DomainSocketWatcher;
|
import org.apache.hadoop.net.unix.DomainSocketWatcher;
|
||||||
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* DfsClientShm is a subclass of ShortCircuitShm which is used by the
|
* DfsClientShm is a subclass of ShortCircuitShm which is used by the
|
||||||
|
|
|
@ -44,7 +44,7 @@ import org.apache.hadoop.net.unix.DomainSocket;
|
||||||
import org.apache.hadoop.net.unix.DomainSocketWatcher;
|
import org.apache.hadoop.net.unix.DomainSocketWatcher;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.VisibleForTesting;
|
import org.apache.hadoop.classification.VisibleForTesting;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
|
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
|
@ -30,7 +30,7 @@ import org.apache.hadoop.hdfs.client.impl.DfsClientConf.ShortCircuitConf;
|
||||||
import org.apache.hadoop.net.unix.DomainSocket;
|
import org.apache.hadoop.net.unix.DomainSocket;
|
||||||
import org.apache.hadoop.util.PerformanceAdvisory;
|
import org.apache.hadoop.util.PerformanceAdvisory;
|
||||||
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.cache.Cache;
|
import org.apache.hadoop.thirdparty.com.google.common.cache.Cache;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.cache.CacheBuilder;
|
import org.apache.hadoop.thirdparty.com.google.common.cache.CacheBuilder;
|
||||||
|
|
||||||
|
|
|
@ -55,7 +55,7 @@ import org.apache.hadoop.util.Time;
|
||||||
import org.apache.hadoop.util.Waitable;
|
import org.apache.hadoop.util.Waitable;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.VisibleForTesting;
|
import org.apache.hadoop.classification.VisibleForTesting;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
|
import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
|
||||||
|
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
|
|
|
@ -32,7 +32,7 @@ import org.apache.hadoop.io.nativeio.NativeIO;
|
||||||
import org.apache.hadoop.util.Time;
|
import org.apache.hadoop.util.Time;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.VisibleForTesting;
|
import org.apache.hadoop.classification.VisibleForTesting;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
|
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
|
@ -39,7 +39,7 @@ import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import sun.misc.Unsafe;
|
import sun.misc.Unsafe;
|
||||||
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.collect.ComparisonChain;
|
import org.apache.hadoop.thirdparty.com.google.common.collect.ComparisonChain;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.primitives.Ints;
|
import org.apache.hadoop.thirdparty.com.google.common.primitives.Ints;
|
||||||
|
|
||||||
|
|
|
@ -26,7 +26,7 @@ import org.apache.hadoop.HadoopIllegalArgumentException;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.util.Time;
|
import org.apache.hadoop.util.Time;
|
||||||
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
|
|
@ -28,7 +28,7 @@ import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
|
||||||
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
|
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
|
||||||
import org.apache.hadoop.hdfs.protocol.LocatedStripedBlock;
|
import org.apache.hadoop.hdfs.protocol.LocatedStripedBlock;
|
||||||
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;
|
import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;
|
||||||
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
|
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
|
||||||
import org.apache.hadoop.hdfs.DFSStripedOutputStream;
|
import org.apache.hadoop.hdfs.DFSStripedOutputStream;
|
||||||
|
|
|
@ -19,7 +19,7 @@ package org.apache.hadoop.hdfs.web;
|
||||||
|
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
import com.fasterxml.jackson.databind.ObjectReader;
|
import com.fasterxml.jackson.databind.ObjectReader;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.collect.Maps;
|
import org.apache.hadoop.thirdparty.com.google.common.collect.Maps;
|
||||||
import org.apache.hadoop.fs.ContentSummary;
|
import org.apache.hadoop.fs.ContentSummary;
|
||||||
import org.apache.hadoop.fs.FileChecksum;
|
import org.apache.hadoop.fs.FileChecksum;
|
||||||
|
|
|
@ -138,7 +138,7 @@ import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.VisibleForTesting;
|
import org.apache.hadoop.classification.VisibleForTesting;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
|
import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
|
|
||||||
import static org.apache.hadoop.fs.impl.PathCapabilitiesSupport.validatePathCapabilityArgs;
|
import static org.apache.hadoop.fs.impl.PathCapabilitiesSupport.validatePathCapabilityArgs;
|
||||||
|
|
||||||
|
|
|
@ -343,6 +343,38 @@
|
||||||
<excludeFilterFile>${basedir}/dev-support/findbugsExcludeFile.xml</excludeFilterFile>
|
<excludeFilterFile>${basedir}/dev-support/findbugsExcludeFile.xml</excludeFilterFile>
|
||||||
</configuration>
|
</configuration>
|
||||||
</plugin>
|
</plugin>
|
||||||
|
<plugin>
|
||||||
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
|
<artifactId>maven-enforcer-plugin</artifactId>
|
||||||
|
<dependencies>
|
||||||
|
<dependency>
|
||||||
|
<groupId>de.skuzzle.enforcer</groupId>
|
||||||
|
<artifactId>restrict-imports-enforcer-rule</artifactId>
|
||||||
|
<version>${restrict-imports.enforcer.version}</version>
|
||||||
|
</dependency>
|
||||||
|
</dependencies>
|
||||||
|
<executions>
|
||||||
|
<execution>
|
||||||
|
<id>banned-illegal-imports</id>
|
||||||
|
<phase>process-sources</phase>
|
||||||
|
<goals>
|
||||||
|
<goal>enforce</goal>
|
||||||
|
</goals>
|
||||||
|
<configuration>
|
||||||
|
<rules>
|
||||||
|
<restrictImports implementation="de.skuzzle.enforcer.restrictimports.rule.RestrictImports">
|
||||||
|
<includeTestCode>true</includeTestCode>
|
||||||
|
<reason>Use hadoop-common provided VisibleForTesting rather than the one provided by Guava</reason>
|
||||||
|
<bannedImports>
|
||||||
|
<bannedImport>org.apache.hadoop.thirdparty.com.google.common.base.Preconditions</bannedImport>
|
||||||
|
<bannedImport>com.google.common.base.Preconditions</bannedImport>
|
||||||
|
</bannedImports>
|
||||||
|
</restrictImports>
|
||||||
|
</rules>
|
||||||
|
</configuration>
|
||||||
|
</execution>
|
||||||
|
</executions>
|
||||||
|
</plugin>
|
||||||
</plugins>
|
</plugins>
|
||||||
</build>
|
</build>
|
||||||
|
|
||||||
|
|
|
@ -73,7 +73,7 @@ import org.json.simple.JSONObject;
|
||||||
import org.json.simple.parser.JSONParser;
|
import org.json.simple.parser.JSONParser;
|
||||||
import org.json.simple.parser.ParseException;
|
import org.json.simple.parser.ParseException;
|
||||||
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.collect.Maps;
|
import org.apache.hadoop.thirdparty.com.google.common.collect.Maps;
|
||||||
|
|
||||||
import java.io.BufferedInputStream;
|
import java.io.BufferedInputStream;
|
||||||
|
|
|
@ -219,6 +219,38 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
</execution>
|
</execution>
|
||||||
</executions>
|
</executions>
|
||||||
</plugin>
|
</plugin>
|
||||||
|
<plugin>
|
||||||
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
|
<artifactId>maven-enforcer-plugin</artifactId>
|
||||||
|
<dependencies>
|
||||||
|
<dependency>
|
||||||
|
<groupId>de.skuzzle.enforcer</groupId>
|
||||||
|
<artifactId>restrict-imports-enforcer-rule</artifactId>
|
||||||
|
<version>${restrict-imports.enforcer.version}</version>
|
||||||
|
</dependency>
|
||||||
|
</dependencies>
|
||||||
|
<executions>
|
||||||
|
<execution>
|
||||||
|
<id>banned-illegal-imports</id>
|
||||||
|
<phase>process-sources</phase>
|
||||||
|
<goals>
|
||||||
|
<goal>enforce</goal>
|
||||||
|
</goals>
|
||||||
|
<configuration>
|
||||||
|
<rules>
|
||||||
|
<restrictImports implementation="de.skuzzle.enforcer.restrictimports.rule.RestrictImports">
|
||||||
|
<includeTestCode>true</includeTestCode>
|
||||||
|
<reason>Use hadoop-common provided VisibleForTesting rather than the one provided by Guava</reason>
|
||||||
|
<bannedImports>
|
||||||
|
<bannedImport>org.apache.hadoop.thirdparty.com.google.common.base.Preconditions</bannedImport>
|
||||||
|
<bannedImport>com.google.common.base.Preconditions</bannedImport>
|
||||||
|
</bannedImports>
|
||||||
|
</restrictImports>
|
||||||
|
</rules>
|
||||||
|
</configuration>
|
||||||
|
</execution>
|
||||||
|
</executions>
|
||||||
|
</plugin>
|
||||||
</plugins>
|
</plugins>
|
||||||
</build>
|
</build>
|
||||||
</profile>
|
</profile>
|
||||||
|
|
|
@ -29,7 +29,7 @@ import java.util.concurrent.ConcurrentMap;
|
||||||
import java.util.concurrent.ExecutionException;
|
import java.util.concurrent.ExecutionException;
|
||||||
import java.util.concurrent.TimeUnit;
|
import java.util.concurrent.TimeUnit;
|
||||||
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.fs.FSDataInputStream;
|
import org.apache.hadoop.fs.FSDataInputStream;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.hdfs.DFSClient;
|
import org.apache.hadoop.hdfs.DFSClient;
|
||||||
|
|
|
@ -19,7 +19,7 @@ package org.apache.hadoop.hdfs.nfs.nfs3;
|
||||||
|
|
||||||
import java.util.Comparator;
|
import java.util.Comparator;
|
||||||
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* OffsetRange is the range of read/write request. A single point (e.g.,[5,5])
|
* OffsetRange is the range of read/write request. A single point (e.g.,[5,5])
|
||||||
|
|
|
@ -58,7 +58,7 @@ import org.apache.hadoop.util.Daemon;
|
||||||
import org.apache.hadoop.util.Time;
|
import org.apache.hadoop.util.Time;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.VisibleForTesting;
|
import org.apache.hadoop.classification.VisibleForTesting;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
|
|
@ -31,7 +31,7 @@ import org.apache.hadoop.util.Daemon;
|
||||||
import org.apache.hadoop.util.Time;
|
import org.apache.hadoop.util.Time;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.VisibleForTesting;
|
import org.apache.hadoop.classification.VisibleForTesting;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.collect.Maps;
|
import org.apache.hadoop.thirdparty.com.google.common.collect.Maps;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -30,7 +30,7 @@ import org.apache.hadoop.nfs.nfs3.FileHandle;
|
||||||
import org.apache.hadoop.nfs.nfs3.Nfs3Constant.WriteStableHow;
|
import org.apache.hadoop.nfs.nfs3.Nfs3Constant.WriteStableHow;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.VisibleForTesting;
|
import org.apache.hadoop.classification.VisibleForTesting;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* WriteCtx saves the context of one write request, such as request, channel,
|
* WriteCtx saves the context of one write request, such as request, channel,
|
||||||
|
|
|
@ -305,6 +305,38 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
</filesets>
|
</filesets>
|
||||||
</configuration>
|
</configuration>
|
||||||
</plugin>
|
</plugin>
|
||||||
|
<plugin>
|
||||||
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
|
<artifactId>maven-enforcer-plugin</artifactId>
|
||||||
|
<dependencies>
|
||||||
|
<dependency>
|
||||||
|
<groupId>de.skuzzle.enforcer</groupId>
|
||||||
|
<artifactId>restrict-imports-enforcer-rule</artifactId>
|
||||||
|
<version>${restrict-imports.enforcer.version}</version>
|
||||||
|
</dependency>
|
||||||
|
</dependencies>
|
||||||
|
<executions>
|
||||||
|
<execution>
|
||||||
|
<id>banned-illegal-imports</id>
|
||||||
|
<phase>process-sources</phase>
|
||||||
|
<goals>
|
||||||
|
<goal>enforce</goal>
|
||||||
|
</goals>
|
||||||
|
<configuration>
|
||||||
|
<rules>
|
||||||
|
<restrictImports implementation="de.skuzzle.enforcer.restrictimports.rule.RestrictImports">
|
||||||
|
<includeTestCode>true</includeTestCode>
|
||||||
|
<reason>Use hadoop-common provided VisibleForTesting rather than the one provided by Guava</reason>
|
||||||
|
<bannedImports>
|
||||||
|
<bannedImport>org.apache.hadoop.thirdparty.com.google.common.base.Preconditions</bannedImport>
|
||||||
|
<bannedImport>com.google.common.base.Preconditions</bannedImport>
|
||||||
|
</bannedImports>
|
||||||
|
</restrictImports>
|
||||||
|
</rules>
|
||||||
|
</configuration>
|
||||||
|
</execution>
|
||||||
|
</executions>
|
||||||
|
</plugin>
|
||||||
</plugins>
|
</plugins>
|
||||||
</build>
|
</build>
|
||||||
<profiles>
|
<profiles>
|
||||||
|
|
|
@ -39,7 +39,7 @@ import org.apache.hadoop.hdfs.server.federation.store.records.MembershipState;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Order the destinations based on available space. This resolver uses a
|
* Order the destinations based on available space. This resolver uses a
|
||||||
|
|
|
@ -30,7 +30,7 @@ import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
|
|
|
@ -447,6 +447,38 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
</filesets>
|
</filesets>
|
||||||
</configuration>
|
</configuration>
|
||||||
</plugin>
|
</plugin>
|
||||||
|
<plugin>
|
||||||
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
|
<artifactId>maven-enforcer-plugin</artifactId>
|
||||||
|
<dependencies>
|
||||||
|
<dependency>
|
||||||
|
<groupId>de.skuzzle.enforcer</groupId>
|
||||||
|
<artifactId>restrict-imports-enforcer-rule</artifactId>
|
||||||
|
<version>${restrict-imports.enforcer.version}</version>
|
||||||
|
</dependency>
|
||||||
|
</dependencies>
|
||||||
|
<executions>
|
||||||
|
<execution>
|
||||||
|
<id>banned-illegal-imports</id>
|
||||||
|
<phase>process-sources</phase>
|
||||||
|
<goals>
|
||||||
|
<goal>enforce</goal>
|
||||||
|
</goals>
|
||||||
|
<configuration>
|
||||||
|
<rules>
|
||||||
|
<restrictImports implementation="de.skuzzle.enforcer.restrictimports.rule.RestrictImports">
|
||||||
|
<includeTestCode>true</includeTestCode>
|
||||||
|
<reason>Use hadoop-common provided VisibleForTesting rather than the one provided by Guava</reason>
|
||||||
|
<bannedImports>
|
||||||
|
<bannedImport>org.apache.hadoop.thirdparty.com.google.common.base.Preconditions</bannedImport>
|
||||||
|
<bannedImport>com.google.common.base.Preconditions</bannedImport>
|
||||||
|
</bannedImports>
|
||||||
|
</restrictImports>
|
||||||
|
</rules>
|
||||||
|
</configuration>
|
||||||
|
</execution>
|
||||||
|
</executions>
|
||||||
|
</plugin>
|
||||||
</plugins>
|
</plugins>
|
||||||
</build>
|
</build>
|
||||||
|
|
||||||
|
|
|
@ -110,7 +110,7 @@ import org.apache.hadoop.util.ToolRunner;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.VisibleForTesting;
|
import org.apache.hadoop.classification.VisibleForTesting;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
|
import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.thirdparty.protobuf.BlockingService;
|
import org.apache.hadoop.thirdparty.protobuf.BlockingService;
|
||||||
|
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
|
@ -310,7 +310,11 @@ public class DFSUtil {
|
||||||
// specifically not using StringBuilder to more efficiently build
|
// specifically not using StringBuilder to more efficiently build
|
||||||
// string w/o excessive byte[] copies and charset conversions.
|
// string w/o excessive byte[] copies and charset conversions.
|
||||||
final int range = offset + length;
|
final int range = offset + length;
|
||||||
Preconditions.checkPositionIndexes(offset, range, components.length);
|
if (offset < 0 || range < offset || range > components.length) {
|
||||||
|
throw new IndexOutOfBoundsException(
|
||||||
|
"Incorrect index [offset, range, size] ["
|
||||||
|
+ offset + ", " + range + ", " + components.length + "]");
|
||||||
|
}
|
||||||
if (length == 0) {
|
if (length == 0) {
|
||||||
return "";
|
return "";
|
||||||
}
|
}
|
||||||
|
|
|
@ -57,7 +57,7 @@ import org.apache.hadoop.ipc.StandbyException;
|
||||||
import org.apache.hadoop.security.UserGroupInformation;
|
import org.apache.hadoop.security.UserGroupInformation;
|
||||||
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
|
import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.util.Lists;
|
import org.apache.hadoop.util.Lists;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
package org.apache.hadoop.hdfs.net;
|
package org.apache.hadoop.hdfs.net;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.VisibleForTesting;
|
import org.apache.hadoop.classification.VisibleForTesting;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.StorageType;
|
import org.apache.hadoop.fs.StorageType;
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
package org.apache.hadoop.hdfs.net;
|
package org.apache.hadoop.hdfs.net;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.VisibleForTesting;
|
import org.apache.hadoop.classification.VisibleForTesting;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.fs.StorageType;
|
import org.apache.hadoop.fs.StorageType;
|
||||||
import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeDescriptor;
|
import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeDescriptor;
|
||||||
import org.apache.hadoop.net.InnerNode;
|
import org.apache.hadoop.net.InnerNode;
|
||||||
|
|
|
@ -35,7 +35,7 @@ import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.ReplicaState;
|
||||||
import org.apache.hadoop.hdfs.server.datanode.Replica;
|
import org.apache.hadoop.hdfs.server.datanode.Replica;
|
||||||
import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeSpi;
|
import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeSpi;
|
||||||
import org.apache.hadoop.classification.VisibleForTesting;
|
import org.apache.hadoop.classification.VisibleForTesting;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.thirdparty.protobuf.ByteString;
|
import org.apache.hadoop.thirdparty.protobuf.ByteString;
|
||||||
import org.apache.hadoop.thirdparty.protobuf.CodedInputStream;
|
import org.apache.hadoop.thirdparty.protobuf.CodedInputStream;
|
||||||
import org.apache.hadoop.thirdparty.protobuf.CodedOutputStream;
|
import org.apache.hadoop.thirdparty.protobuf.CodedOutputStream;
|
||||||
|
|
|
@ -17,8 +17,6 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hdfs.protocol;
|
package org.apache.hadoop.hdfs.protocol;
|
||||||
|
|
||||||
import static org.apache.hadoop.thirdparty.com.google.common.base.Preconditions.checkNotNull;
|
|
||||||
|
|
||||||
import java.util.Date;
|
import java.util.Date;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
|
@ -28,7 +26,7 @@ import org.apache.hadoop.hdfs.server.namenode.CachePool;
|
||||||
import org.apache.hadoop.util.IntrusiveCollection;
|
import org.apache.hadoop.util.IntrusiveCollection;
|
||||||
import org.apache.hadoop.util.IntrusiveCollection.Element;
|
import org.apache.hadoop.util.IntrusiveCollection.Element;
|
||||||
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Namenode class that tracks state related to a cached path.
|
* Namenode class that tracks state related to a cached path.
|
||||||
|
@ -63,7 +61,7 @@ public final class CacheDirective implements IntrusiveCollection.Element {
|
||||||
short replication, long expiryTime) {
|
short replication, long expiryTime) {
|
||||||
Preconditions.checkArgument(id > 0);
|
Preconditions.checkArgument(id > 0);
|
||||||
this.id = id;
|
this.id = id;
|
||||||
this.path = checkNotNull(path);
|
this.path = Preconditions.checkNotNull(path);
|
||||||
Preconditions.checkArgument(replication > 0);
|
Preconditions.checkArgument(replication > 0);
|
||||||
this.replication = replication;
|
this.replication = replication;
|
||||||
this.expiryTime = expiryTime;
|
this.expiryTime = expiryTime;
|
||||||
|
|
|
@ -61,7 +61,7 @@ import org.apache.hadoop.hdfs.server.protocol.StorageReceivedDeletedBlocks;
|
||||||
import org.apache.hadoop.hdfs.server.protocol.StorageReport;
|
import org.apache.hadoop.hdfs.server.protocol.StorageReport;
|
||||||
import org.apache.hadoop.hdfs.server.protocol.VolumeFailureSummary;
|
import org.apache.hadoop.hdfs.server.protocol.VolumeFailureSummary;
|
||||||
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.thirdparty.protobuf.RpcController;
|
import org.apache.hadoop.thirdparty.protobuf.RpcController;
|
||||||
import org.apache.hadoop.thirdparty.protobuf.ServiceException;
|
import org.apache.hadoop.thirdparty.protobuf.ServiceException;
|
||||||
|
|
||||||
|
|
|
@ -36,7 +36,7 @@ import org.apache.hadoop.hdfs.server.protocol.RemoteEditLogManifest;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.VisibleForTesting;
|
import org.apache.hadoop.classification.VisibleForTesting;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
|
import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableList;
|
import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableList;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.collect.Maps;
|
import org.apache.hadoop.thirdparty.com.google.common.collect.Maps;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.ListenableFuture;
|
import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.ListenableFuture;
|
||||||
|
|
|
@ -58,7 +58,7 @@ import org.apache.hadoop.security.SecurityUtil;
|
||||||
import org.apache.hadoop.util.StopWatch;
|
import org.apache.hadoop.util.StopWatch;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.VisibleForTesting;
|
import org.apache.hadoop.classification.VisibleForTesting;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.net.InetAddresses;
|
import org.apache.hadoop.thirdparty.com.google.common.net.InetAddresses;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.FutureCallback;
|
import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.FutureCallback;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.Futures;
|
import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.Futures;
|
||||||
|
|
|
@ -30,7 +30,7 @@ import org.apache.hadoop.util.StopWatch;
|
||||||
import org.apache.hadoop.util.Timer;
|
import org.apache.hadoop.util.Timer;
|
||||||
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
|
import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.collect.Maps;
|
import org.apache.hadoop.thirdparty.com.google.common.collect.Maps;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.FutureCallback;
|
import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.FutureCallback;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.Futures;
|
import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.Futures;
|
||||||
|
|
|
@ -23,7 +23,7 @@ import java.util.Map;
|
||||||
import org.apache.hadoop.util.StringUtils;
|
import org.apache.hadoop.util.StringUtils;
|
||||||
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
|
import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Exception thrown when too many exceptions occur while gathering
|
* Exception thrown when too many exceptions occur while gathering
|
||||||
|
|
|
@ -61,7 +61,7 @@ import org.apache.hadoop.log.LogThrottlingHelper.LogAction;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.VisibleForTesting;
|
import org.apache.hadoop.classification.VisibleForTesting;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
|
import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.thirdparty.protobuf.TextFormat;
|
import org.apache.hadoop.thirdparty.protobuf.TextFormat;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -23,7 +23,7 @@ import java.util.Map.Entry;
|
||||||
import org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos.PrepareRecoveryResponseProto;
|
import org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos.PrepareRecoveryResponseProto;
|
||||||
import org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos.SegmentStateProto;
|
import org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos.SegmentStateProto;
|
||||||
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.collect.ComparisonChain;
|
import org.apache.hadoop.thirdparty.com.google.common.collect.ComparisonChain;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.primitives.Booleans;
|
import org.apache.hadoop.thirdparty.com.google.common.primitives.Booleans;
|
||||||
|
|
||||||
|
|
|
@ -73,7 +73,7 @@ import org.apache.hadoop.util.Time;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.VisibleForTesting;
|
import org.apache.hadoop.classification.VisibleForTesting;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
|
import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableList;
|
import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableList;
|
||||||
import org.apache.hadoop.thirdparty.protobuf.TextFormat;
|
import org.apache.hadoop.thirdparty.protobuf.TextFormat;
|
||||||
|
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
package org.apache.hadoop.hdfs.qjournal.server;
|
package org.apache.hadoop.hdfs.qjournal.server;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.VisibleForTesting;
|
import org.apache.hadoop.classification.VisibleForTesting;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Strings;
|
import org.apache.hadoop.thirdparty.com.google.common.base.Strings;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.collect.Maps;
|
import org.apache.hadoop.thirdparty.com.google.common.collect.Maps;
|
||||||
import org.apache.hadoop.util.Lists;
|
import org.apache.hadoop.util.Lists;
|
||||||
|
|
|
@ -46,7 +46,7 @@ import org.apache.hadoop.util.Time;
|
||||||
import org.apache.hadoop.util.Timer;
|
import org.apache.hadoop.util.Timer;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.VisibleForTesting;
|
import org.apache.hadoop.classification.VisibleForTesting;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.collect.HashMultiset;
|
import org.apache.hadoop.thirdparty.com.google.common.collect.HashMultiset;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.collect.Multiset;
|
import org.apache.hadoop.thirdparty.com.google.common.collect.Multiset;
|
||||||
|
|
||||||
|
|
|
@ -50,7 +50,7 @@ import org.apache.hadoop.security.token.Token;
|
||||||
import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager;
|
import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager;
|
||||||
import org.apache.hadoop.security.token.delegation.DelegationKey;
|
import org.apache.hadoop.security.token.delegation.DelegationKey;
|
||||||
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.thirdparty.protobuf.ByteString;
|
import org.apache.hadoop.thirdparty.protobuf.ByteString;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -17,7 +17,6 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hdfs.server.balancer;
|
package org.apache.hadoop.hdfs.server.balancer;
|
||||||
|
|
||||||
import static org.apache.hadoop.thirdparty.com.google.common.base.Preconditions.checkArgument;
|
|
||||||
import static org.apache.hadoop.hdfs.protocol.BlockType.CONTIGUOUS;
|
import static org.apache.hadoop.hdfs.protocol.BlockType.CONTIGUOUS;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
@ -75,7 +74,7 @@ import org.apache.hadoop.util.Time;
|
||||||
import org.apache.hadoop.util.Tool;
|
import org.apache.hadoop.util.Tool;
|
||||||
import org.apache.hadoop.util.ToolRunner;
|
import org.apache.hadoop.util.ToolRunner;
|
||||||
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
|
|
||||||
/** <p>The balancer is a tool that balances disk space usage on an HDFS cluster
|
/** <p>The balancer is a tool that balances disk space usage on an HDFS cluster
|
||||||
* when some datanodes become full or when new empty nodes join the cluster.
|
* when some datanodes become full or when new empty nodes join the cluster.
|
||||||
|
@ -998,7 +997,7 @@ public class Balancer {
|
||||||
try {
|
try {
|
||||||
for(int i = 0; i < args.length; i++) {
|
for(int i = 0; i < args.length; i++) {
|
||||||
if ("-threshold".equalsIgnoreCase(args[i])) {
|
if ("-threshold".equalsIgnoreCase(args[i])) {
|
||||||
checkArgument(++i < args.length,
|
Preconditions.checkArgument(++i < args.length,
|
||||||
"Threshold value is missing: args = " + Arrays.toString(args));
|
"Threshold value is missing: args = " + Arrays.toString(args));
|
||||||
try {
|
try {
|
||||||
double threshold = Double.parseDouble(args[i]);
|
double threshold = Double.parseDouble(args[i]);
|
||||||
|
@ -1015,7 +1014,7 @@ public class Balancer {
|
||||||
throw e;
|
throw e;
|
||||||
}
|
}
|
||||||
} else if ("-policy".equalsIgnoreCase(args[i])) {
|
} else if ("-policy".equalsIgnoreCase(args[i])) {
|
||||||
checkArgument(++i < args.length,
|
Preconditions.checkArgument(++i < args.length,
|
||||||
"Policy value is missing: args = " + Arrays.toString(args));
|
"Policy value is missing: args = " + Arrays.toString(args));
|
||||||
try {
|
try {
|
||||||
b.setBalancingPolicy(BalancingPolicy.parse(args[i]));
|
b.setBalancingPolicy(BalancingPolicy.parse(args[i]));
|
||||||
|
@ -1036,7 +1035,7 @@ public class Balancer {
|
||||||
i = processHostList(args, i, "source", sourceNodes);
|
i = processHostList(args, i, "source", sourceNodes);
|
||||||
b.setSourceNodes(sourceNodes);
|
b.setSourceNodes(sourceNodes);
|
||||||
} else if ("-blockpools".equalsIgnoreCase(args[i])) {
|
} else if ("-blockpools".equalsIgnoreCase(args[i])) {
|
||||||
checkArgument(
|
Preconditions.checkArgument(
|
||||||
++i < args.length,
|
++i < args.length,
|
||||||
"blockpools value is missing: args = "
|
"blockpools value is missing: args = "
|
||||||
+ Arrays.toString(args));
|
+ Arrays.toString(args));
|
||||||
|
@ -1045,7 +1044,7 @@ public class Balancer {
|
||||||
+ blockpools.toString());
|
+ blockpools.toString());
|
||||||
b.setBlockpools(blockpools);
|
b.setBlockpools(blockpools);
|
||||||
} else if ("-idleiterations".equalsIgnoreCase(args[i])) {
|
} else if ("-idleiterations".equalsIgnoreCase(args[i])) {
|
||||||
checkArgument(++i < args.length,
|
Preconditions.checkArgument(++i < args.length,
|
||||||
"idleiterations value is missing: args = " + Arrays
|
"idleiterations value is missing: args = " + Arrays
|
||||||
.toString(args));
|
.toString(args));
|
||||||
int maxIdleIteration = Integer.parseInt(args[i]);
|
int maxIdleIteration = Integer.parseInt(args[i]);
|
||||||
|
@ -1061,7 +1060,7 @@ public class Balancer {
|
||||||
b.setRunAsService(true);
|
b.setRunAsService(true);
|
||||||
LOG.info("Balancer will run as a long running service");
|
LOG.info("Balancer will run as a long running service");
|
||||||
} else if ("-hotBlockTimeInterval".equalsIgnoreCase(args[i])) {
|
} else if ("-hotBlockTimeInterval".equalsIgnoreCase(args[i])) {
|
||||||
checkArgument(++i < args.length,
|
Preconditions.checkArgument(++i < args.length,
|
||||||
"hotBlockTimeInterval value is missing: args = "
|
"hotBlockTimeInterval value is missing: args = "
|
||||||
+ Arrays.toString(args));
|
+ Arrays.toString(args));
|
||||||
long hotBlockTimeInterval = Long.parseLong(args[i]);
|
long hotBlockTimeInterval = Long.parseLong(args[i]);
|
||||||
|
@ -1077,7 +1076,7 @@ public class Balancer {
|
||||||
+ Arrays.toString(args));
|
+ Arrays.toString(args));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
checkArgument(excludedNodes == null || includedNodes == null,
|
Preconditions.checkArgument(excludedNodes == null || includedNodes == null,
|
||||||
"-exclude and -include options cannot be specified together.");
|
"-exclude and -include options cannot be specified together.");
|
||||||
} catch(RuntimeException e) {
|
} catch(RuntimeException e) {
|
||||||
printUsage(System.err);
|
printUsage(System.err);
|
||||||
|
|
|
@ -85,7 +85,7 @@ import org.apache.hadoop.util.StringUtils;
|
||||||
import org.apache.hadoop.util.Time;
|
import org.apache.hadoop.util.Time;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.VisibleForTesting;
|
import org.apache.hadoop.classification.VisibleForTesting;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
|
|
||||||
/** Dispatching block replica moves between datanodes. */
|
/** Dispatching block replica moves between datanodes. */
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
|
|
|
@ -31,7 +31,7 @@ import java.util.Map;
|
||||||
import java.util.concurrent.atomic.AtomicBoolean;
|
import java.util.concurrent.atomic.AtomicBoolean;
|
||||||
import java.util.concurrent.atomic.AtomicLong;
|
import java.util.concurrent.atomic.AtomicLong;
|
||||||
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.RateLimiter;
|
import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.RateLimiter;
|
||||||
import org.apache.hadoop.ha.HAServiceProtocol;
|
import org.apache.hadoop.ha.HAServiceProtocol;
|
||||||
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
||||||
|
|
|
@ -29,7 +29,7 @@ import java.util.List;
|
||||||
import java.util.Random;
|
import java.util.Random;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
|
|
||||||
package org.apache.hadoop.hdfs.server.blockmanagement;
|
package org.apache.hadoop.hdfs.server.blockmanagement;
|
||||||
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.StorageType;
|
import org.apache.hadoop.fs.StorageType;
|
||||||
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
package org.apache.hadoop.hdfs.server.blockmanagement;
|
package org.apache.hadoop.hdfs.server.blockmanagement;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.VisibleForTesting;
|
import org.apache.hadoop.classification.VisibleForTesting;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.hdfs.protocol.Block;
|
import org.apache.hadoop.hdfs.protocol.Block;
|
||||||
import org.apache.hadoop.hdfs.protocol.BlockType;
|
import org.apache.hadoop.hdfs.protocol.BlockType;
|
||||||
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
|
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
|
||||||
|
|
|
@ -22,7 +22,7 @@ import java.util.Iterator;
|
||||||
import java.util.LinkedList;
|
import java.util.LinkedList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.fs.StorageType;
|
import org.apache.hadoop.fs.StorageType;
|
||||||
import org.apache.hadoop.hdfs.protocol.Block;
|
import org.apache.hadoop.hdfs.protocol.Block;
|
||||||
|
|
|
@ -17,7 +17,7 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hdfs.server.blockmanagement;
|
package org.apache.hadoop.hdfs.server.blockmanagement;
|
||||||
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.fs.StorageType;
|
import org.apache.hadoop.fs.StorageType;
|
||||||
import org.apache.hadoop.hdfs.protocol.Block;
|
import org.apache.hadoop.hdfs.protocol.Block;
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
package org.apache.hadoop.hdfs.server.blockmanagement;
|
package org.apache.hadoop.hdfs.server.blockmanagement;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.VisibleForTesting;
|
import org.apache.hadoop.classification.VisibleForTesting;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.hdfs.protocol.Block;
|
import org.apache.hadoop.hdfs.protocol.Block;
|
||||||
import org.apache.hadoop.hdfs.protocol.BlockType;
|
import org.apache.hadoop.hdfs.protocol.BlockType;
|
||||||
|
|
|
@ -126,7 +126,7 @@ import org.apache.hadoop.util.StringUtils;
|
||||||
import org.apache.hadoop.util.Time;
|
import org.apache.hadoop.util.Time;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.VisibleForTesting;
|
import org.apache.hadoop.classification.VisibleForTesting;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
|
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
|
@ -36,7 +36,7 @@ import org.apache.hadoop.net.NetworkTopology;
|
||||||
import org.apache.hadoop.util.Daemon;
|
import org.apache.hadoop.util.Daemon;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.VisibleForTesting;
|
import org.apache.hadoop.classification.VisibleForTesting;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
|
|
@ -25,7 +25,7 @@ import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.hdfs.AddBlockFlag;
|
import org.apache.hadoop.hdfs.AddBlockFlag;
|
||||||
|
|
|
@ -26,7 +26,7 @@ import static org.apache.hadoop.util.Time.monotonicNow;
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
import java.util.concurrent.TimeUnit;
|
import java.util.concurrent.TimeUnit;
|
||||||
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.hdfs.AddBlockFlag;
|
import org.apache.hadoop.hdfs.AddBlockFlag;
|
||||||
|
|
|
@ -17,7 +17,7 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hdfs.server.blockmanagement;
|
package org.apache.hadoop.hdfs.server.blockmanagement;
|
||||||
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
||||||
import org.apache.hadoop.util.Time;
|
import org.apache.hadoop.util.Time;
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
package org.apache.hadoop.hdfs.server.blockmanagement;
|
package org.apache.hadoop.hdfs.server.blockmanagement;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.VisibleForTesting;
|
import org.apache.hadoop.classification.VisibleForTesting;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.StorageType;
|
import org.apache.hadoop.fs.StorageType;
|
||||||
|
|
|
@ -19,7 +19,7 @@ package org.apache.hadoop.hdfs.server.blockmanagement;
|
||||||
|
|
||||||
import static org.apache.hadoop.hdfs.server.blockmanagement.CorruptReplicasMap.Reason;
|
import static org.apache.hadoop.hdfs.server.blockmanagement.CorruptReplicasMap.Reason;
|
||||||
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.hdfs.protocol.Block;
|
import org.apache.hadoop.hdfs.protocol.Block;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -55,7 +55,7 @@ import org.apache.hadoop.util.Time;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Scans the namesystem, scheduling blocks to be cached as appropriate.
|
* Scans the namesystem, scheduling blocks to be cached as appropriate.
|
||||||
|
|
|
@ -17,7 +17,7 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hdfs.server.blockmanagement;
|
package org.apache.hadoop.hdfs.server.blockmanagement;
|
||||||
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
||||||
import org.apache.hadoop.hdfs.protocol.DatanodeID;
|
import org.apache.hadoop.hdfs.protocol.DatanodeID;
|
||||||
import org.apache.hadoop.hdfs.server.namenode.INode;
|
import org.apache.hadoop.hdfs.server.namenode.INode;
|
||||||
|
|
|
@ -17,7 +17,6 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hdfs.server.blockmanagement;
|
package org.apache.hadoop.hdfs.server.blockmanagement;
|
||||||
|
|
||||||
import static org.apache.hadoop.thirdparty.com.google.common.base.Preconditions.checkArgument;
|
|
||||||
import static org.apache.hadoop.util.Time.monotonicNow;
|
import static org.apache.hadoop.util.Time.monotonicNow;
|
||||||
|
|
||||||
import java.util.Queue;
|
import java.util.Queue;
|
||||||
|
@ -30,6 +29,7 @@ import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
||||||
import org.apache.hadoop.hdfs.server.namenode.NameNode;
|
import org.apache.hadoop.hdfs.server.namenode.NameNode;
|
||||||
import org.apache.hadoop.hdfs.server.namenode.Namesystem;
|
import org.apache.hadoop.hdfs.server.namenode.Namesystem;
|
||||||
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.util.ReflectionUtils;
|
import org.apache.hadoop.util.ReflectionUtils;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
@ -105,7 +105,7 @@ public class DatanodeAdminManager {
|
||||||
DFSConfigKeys.DFS_NAMENODE_DECOMMISSION_INTERVAL_KEY,
|
DFSConfigKeys.DFS_NAMENODE_DECOMMISSION_INTERVAL_KEY,
|
||||||
DFSConfigKeys.DFS_NAMENODE_DECOMMISSION_INTERVAL_DEFAULT,
|
DFSConfigKeys.DFS_NAMENODE_DECOMMISSION_INTERVAL_DEFAULT,
|
||||||
TimeUnit.SECONDS);
|
TimeUnit.SECONDS);
|
||||||
checkArgument(intervalSecs >= 0, "Cannot set a negative " +
|
Preconditions.checkArgument(intervalSecs >= 0, "Cannot set a negative " +
|
||||||
"value for " + DFSConfigKeys.DFS_NAMENODE_DECOMMISSION_INTERVAL_KEY);
|
"value for " + DFSConfigKeys.DFS_NAMENODE_DECOMMISSION_INTERVAL_KEY);
|
||||||
|
|
||||||
int blocksPerInterval = conf.getInt(
|
int blocksPerInterval = conf.getInt(
|
||||||
|
@ -122,7 +122,7 @@ public class DatanodeAdminManager {
|
||||||
DFSConfigKeys.DFS_NAMENODE_DECOMMISSION_BLOCKS_PER_INTERVAL_KEY);
|
DFSConfigKeys.DFS_NAMENODE_DECOMMISSION_BLOCKS_PER_INTERVAL_KEY);
|
||||||
}
|
}
|
||||||
|
|
||||||
checkArgument(blocksPerInterval > 0,
|
Preconditions.checkArgument(blocksPerInterval > 0,
|
||||||
"Must set a positive value for "
|
"Must set a positive value for "
|
||||||
+ DFSConfigKeys.DFS_NAMENODE_DECOMMISSION_BLOCKS_PER_INTERVAL_KEY);
|
+ DFSConfigKeys.DFS_NAMENODE_DECOMMISSION_BLOCKS_PER_INTERVAL_KEY);
|
||||||
|
|
||||||
|
@ -130,8 +130,8 @@ public class DatanodeAdminManager {
|
||||||
DFSConfigKeys.DFS_NAMENODE_DECOMMISSION_MAX_CONCURRENT_TRACKED_NODES,
|
DFSConfigKeys.DFS_NAMENODE_DECOMMISSION_MAX_CONCURRENT_TRACKED_NODES,
|
||||||
DFSConfigKeys
|
DFSConfigKeys
|
||||||
.DFS_NAMENODE_DECOMMISSION_MAX_CONCURRENT_TRACKED_NODES_DEFAULT);
|
.DFS_NAMENODE_DECOMMISSION_MAX_CONCURRENT_TRACKED_NODES_DEFAULT);
|
||||||
checkArgument(maxConcurrentTrackedNodes >= 0, "Cannot set a negative " +
|
Preconditions.checkArgument(maxConcurrentTrackedNodes >= 0,
|
||||||
"value for "
|
"Cannot set a negative value for "
|
||||||
+ DFSConfigKeys.DFS_NAMENODE_DECOMMISSION_MAX_CONCURRENT_TRACKED_NODES);
|
+ DFSConfigKeys.DFS_NAMENODE_DECOMMISSION_MAX_CONCURRENT_TRACKED_NODES);
|
||||||
|
|
||||||
Class cls = null;
|
Class cls = null;
|
||||||
|
|
|
@ -24,7 +24,7 @@ import static org.apache.hadoop.util.Time.monotonicNow;
|
||||||
|
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
import org.apache.hadoop.classification.VisibleForTesting;
|
import org.apache.hadoop.classification.VisibleForTesting;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.net.InetAddresses;
|
import org.apache.hadoop.thirdparty.com.google.common.net.InetAddresses;
|
||||||
|
|
||||||
import org.apache.hadoop.fs.StorageType;
|
import org.apache.hadoop.fs.StorageType;
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
package org.apache.hadoop.hdfs.server.blockmanagement;
|
package org.apache.hadoop.hdfs.server.blockmanagement;
|
||||||
|
|
||||||
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.collect.HashMultimap;
|
import org.apache.hadoop.thirdparty.com.google.common.collect.HashMultimap;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.collect.Multimap;
|
import org.apache.hadoop.thirdparty.com.google.common.collect.Multimap;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.collect.UnmodifiableIterator;
|
import org.apache.hadoop.thirdparty.com.google.common.collect.UnmodifiableIterator;
|
||||||
|
|
|
@ -31,7 +31,7 @@ import org.apache.hadoop.hdfs.protocol.HdfsConstants;
|
||||||
import org.apache.hadoop.hdfs.server.namenode.FSDirectory;
|
import org.apache.hadoop.hdfs.server.namenode.FSDirectory;
|
||||||
import org.apache.hadoop.hdfs.server.namenode.MetaRecoveryContext;
|
import org.apache.hadoop.hdfs.server.namenode.MetaRecoveryContext;
|
||||||
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.hdfs.server.namenode.NameNodeLayoutVersion;
|
import org.apache.hadoop.hdfs.server.namenode.NameNodeLayoutVersion;
|
||||||
import org.apache.hadoop.util.StringUtils;
|
import org.apache.hadoop.util.StringUtils;
|
||||||
|
|
||||||
|
|
|
@ -54,7 +54,7 @@ import org.apache.hadoop.util.ToolRunner;
|
||||||
import org.apache.hadoop.util.VersionInfo;
|
import org.apache.hadoop.util.VersionInfo;
|
||||||
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
|
import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
|
|
@ -54,7 +54,7 @@ import org.apache.hadoop.util.StringUtils;
|
||||||
import org.apache.hadoop.util.Time;
|
import org.apache.hadoop.util.Time;
|
||||||
import org.apache.hadoop.hdfs.web.URLConnectionFactory;
|
import org.apache.hadoop.hdfs.web.URLConnectionFactory;
|
||||||
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
|
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
package org.apache.hadoop.hdfs.server.datanode;
|
package org.apache.hadoop.hdfs.server.datanode;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.VisibleForTesting;
|
import org.apache.hadoop.classification.VisibleForTesting;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.fs.StorageType;
|
import org.apache.hadoop.fs.StorageType;
|
||||||
|
|
|
@ -17,7 +17,7 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hdfs.server.datanode;
|
package org.apache.hadoop.hdfs.server.datanode;
|
||||||
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.hdfs.DFSUtilClient;
|
import org.apache.hadoop.hdfs.DFSUtilClient;
|
||||||
|
|
|
@ -33,7 +33,7 @@ import org.apache.hadoop.util.Sets;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.VisibleForTesting;
|
import org.apache.hadoop.classification.VisibleForTesting;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
|
import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.collect.Maps;
|
import org.apache.hadoop.thirdparty.com.google.common.collect.Maps;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
|
|
||||||
|
|
|
@ -48,7 +48,7 @@ import org.apache.hadoop.util.Daemon;
|
||||||
import org.apache.hadoop.util.Lists;
|
import org.apache.hadoop.util.Lists;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.VisibleForTesting;
|
import org.apache.hadoop.classification.VisibleForTesting;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Manages storage for the set of BlockPoolSlices which share a particular
|
* Manages storage for the set of BlockPoolSlices which share a particular
|
||||||
|
|
|
@ -19,7 +19,7 @@ package org.apache.hadoop.hdfs.server.datanode;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.VisibleForTesting;
|
import org.apache.hadoop.classification.VisibleForTesting;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
|
import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.hdfs.protocol.DatanodeID;
|
import org.apache.hadoop.hdfs.protocol.DatanodeID;
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue