HDFS-14221. Replace Guava Optional with Java Optional. Contributed by Arpit Agarwal.

This commit is contained in:
Arpit Agarwal 2019-01-21 20:44:37 -08:00
parent de34fc148c
commit 1ff658b2ef
8 changed files with 20 additions and 31 deletions

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.hdfs.server.datanode.checker;
import com.google.common.base.Optional;
import java.util.Optional;
import com.google.common.util.concurrent.ListenableFuture;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;

View File

@ -19,7 +19,6 @@
package org.apache.hadoop.hdfs.server.datanode.checker;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.collect.Sets;
import com.google.common.util.concurrent.FutureCallback;
@ -44,6 +43,7 @@ import javax.annotation.Nullable;
import java.nio.channels.ClosedChannelException;
import java.util.Collections;
import java.util.HashSet;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;

View File

@ -20,7 +20,6 @@ package org.apache.hadoop.hdfs.server.datanode.checker;
import static org.apache.hadoop.hdfs.DFSConfigKeys.*;
import com.google.common.base.Optional;
import com.google.common.collect.Maps;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
@ -47,6 +46,7 @@ import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executors;

View File

@ -18,7 +18,6 @@
package org.apache.hadoop.hdfs.server.datanode.checker;
import com.google.common.base.Optional;
import com.google.common.util.concurrent.FutureCallback;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
@ -35,6 +34,7 @@ import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import java.util.WeakHashMap;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
@ -120,7 +120,7 @@ public class ThrottledAsyncChecker<K, V> implements AsyncChecker<K, V> {
public Optional<ListenableFuture<V>> schedule(Checkable<K, V> target,
K context) {
if (checksInProgress.containsKey(target)) {
return Optional.absent();
return Optional.empty();
}
if (completedChecks.containsKey(target)) {
@ -130,7 +130,7 @@ public class ThrottledAsyncChecker<K, V> implements AsyncChecker<K, V> {
LOG.debug("Skipped checking {}. Time since last check {}ms " +
"is less than the min gap {}ms.",
target, msSinceLastCheck, minMsBetweenChecks);
return Optional.absent();
return Optional.empty();
}
}

View File

@ -18,7 +18,6 @@
package org.apache.hadoop.hdfs.server.datanode.checker;
import com.google.common.base.Optional;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import org.apache.hadoop.hdfs.HdfsConfiguration;
@ -40,6 +39,7 @@ import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;

View File

@ -18,7 +18,6 @@
package org.apache.hadoop.hdfs.server.datanode.checker;
import com.google.common.base.Optional;
import com.google.common.base.Supplier;
import com.google.common.util.concurrent.ListenableFuture;
import org.apache.hadoop.test.GenericTestUtils;
@ -27,6 +26,7 @@ import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Optional;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.ScheduledThreadPoolExecutor;

View File

@ -17,28 +17,10 @@
*/
package org.apache.hadoop.hdfs.server.datanode.checker;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyObject;
import static org.mockito.Matchers.anySet;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.timeout;
import static org.mockito.Mockito.verify;
import com.google.common.base.Optional;
import com.google.common.util.concurrent.FutureCallback;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.LogVerificationAppender;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeSpi;
import org.apache.hadoop.util.FakeTimer;
import org.apache.log4j.Logger;
import org.apache.log4j.spi.LoggingEvent;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.*;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
@ -46,15 +28,22 @@ import org.junit.rules.TestName;
import org.junit.rules.Timeout;
import org.slf4j.LoggerFactory;
import java.util.Set;
import java.util.Optional;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.ReentrantLock;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.timeout;
import static org.mockito.Mockito.verify;
public class TestThrottledAsyncCheckerTimeout {
public static final org.slf4j.Logger LOG =
LoggerFactory.getLogger(TestThrottledAsyncCheckerTimeout.class);

View File

@ -19,7 +19,6 @@
package org.apache.hadoop.hdfs.server.namenode;
import com.google.common.base.Joiner;
import com.google.common.base.Optional;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
@ -35,6 +34,8 @@ import org.junit.rules.Timeout;
import java.net.Inet4Address;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.*;
@ -122,8 +123,7 @@ public class TestAuditLogAtDebug {
@Test
public void testEmptyDebugCommands() {
DefaultAuditLogger logger = makeSpyLogger(
Level.INFO, Optional.<List<String>>absent());
DefaultAuditLogger logger = makeSpyLogger(Level.INFO, Optional.empty());
logDummyCommandToAuditLog(logger, DUMMY_COMMAND_1);
logDummyCommandToAuditLog(logger, DUMMY_COMMAND_2);
verify(logger, times(2)).logAuditMessage(anyString());