HADOOP-16213. Update guava to 27.0-jre. Contributed by Gabor Bota.
This commit is contained in:
parent
a9b8310584
commit
fee1e67453
|
@ -409,6 +409,13 @@
|
||||||
<Bug pattern="NP_NULL_PARAM_DEREF"/>
|
<Bug pattern="NP_NULL_PARAM_DEREF"/>
|
||||||
</Match>
|
</Match>
|
||||||
|
|
||||||
|
<!-- propertyName is checked with isNullOrEmpty (fix after guava 27) -->
|
||||||
|
<Match>
|
||||||
|
<Class name="org.apache.hadoop.conf.Configuration"/>
|
||||||
|
<Method name="asXmlDocument"/>
|
||||||
|
<Bug pattern="NP_PARAMETER_MUST_BE_NONNULL_BUT_MARKED_AS_NULLABLE"/>
|
||||||
|
</Match>
|
||||||
|
|
||||||
<Match>
|
<Match>
|
||||||
<Class name="org.apache.hadoop.ipc.ExternalCall"/>
|
<Class name="org.apache.hadoop.ipc.ExternalCall"/>
|
||||||
<Filed name="done"/>
|
<Filed name="done"/>
|
||||||
|
|
|
@ -69,6 +69,7 @@ import java.util.concurrent.TimeUnit;
|
||||||
import java.util.concurrent.atomic.AtomicBoolean;
|
import java.util.concurrent.atomic.AtomicBoolean;
|
||||||
import java.util.concurrent.atomic.AtomicReference;
|
import java.util.concurrent.atomic.AtomicReference;
|
||||||
|
|
||||||
|
import javax.annotation.Nullable;
|
||||||
import javax.xml.parsers.DocumentBuilderFactory;
|
import javax.xml.parsers.DocumentBuilderFactory;
|
||||||
import javax.xml.parsers.ParserConfigurationException;
|
import javax.xml.parsers.ParserConfigurationException;
|
||||||
import javax.xml.stream.XMLInputFactory;
|
import javax.xml.stream.XMLInputFactory;
|
||||||
|
@ -3445,7 +3446,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
|
||||||
writeXml(new OutputStreamWriter(out, "UTF-8"));
|
writeXml(new OutputStreamWriter(out, "UTF-8"));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void writeXml(Writer out) throws IOException {
|
public void writeXml(@Nullable Writer out) throws IOException {
|
||||||
writeXml(null, out);
|
writeXml(null, out);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3473,7 +3474,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
|
||||||
* <p>
|
* <p>
|
||||||
* @param out the writer to write to.
|
* @param out the writer to write to.
|
||||||
*/
|
*/
|
||||||
public void writeXml(String propertyName, Writer out)
|
public void writeXml(@Nullable String propertyName, Writer out)
|
||||||
throws IOException, IllegalArgumentException {
|
throws IOException, IllegalArgumentException {
|
||||||
Document doc = asXmlDocument(propertyName);
|
Document doc = asXmlDocument(propertyName);
|
||||||
|
|
||||||
|
@ -3495,7 +3496,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
|
||||||
/**
|
/**
|
||||||
* Return the XML DOM corresponding to this Configuration.
|
* Return the XML DOM corresponding to this Configuration.
|
||||||
*/
|
*/
|
||||||
private synchronized Document asXmlDocument(String propertyName)
|
private synchronized Document asXmlDocument(@Nullable String propertyName)
|
||||||
throws IOException, IllegalArgumentException {
|
throws IOException, IllegalArgumentException {
|
||||||
Document doc;
|
Document doc;
|
||||||
try {
|
try {
|
||||||
|
|
|
@ -375,7 +375,7 @@ public class Groups {
|
||||||
backgroundRefreshException.incrementAndGet();
|
backgroundRefreshException.incrementAndGet();
|
||||||
backgroundRefreshRunning.decrementAndGet();
|
backgroundRefreshRunning.decrementAndGet();
|
||||||
}
|
}
|
||||||
});
|
}, MoreExecutors.directExecutor());
|
||||||
return listenableFuture;
|
return listenableFuture;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -107,7 +107,7 @@ public class SemaphoredDelegatingExecutor extends
|
||||||
queueingPermits.acquire();
|
queueingPermits.acquire();
|
||||||
} catch (InterruptedException e) {
|
} catch (InterruptedException e) {
|
||||||
Thread.currentThread().interrupt();
|
Thread.currentThread().interrupt();
|
||||||
return Futures.immediateFailedCheckedFuture(e);
|
return Futures.immediateFailedFuture(e);
|
||||||
}
|
}
|
||||||
return super.submit(new CallableWithPermitRelease<>(task));
|
return super.submit(new CallableWithPermitRelease<>(task));
|
||||||
}
|
}
|
||||||
|
@ -118,7 +118,7 @@ public class SemaphoredDelegatingExecutor extends
|
||||||
queueingPermits.acquire();
|
queueingPermits.acquire();
|
||||||
} catch (InterruptedException e) {
|
} catch (InterruptedException e) {
|
||||||
Thread.currentThread().interrupt();
|
Thread.currentThread().interrupt();
|
||||||
return Futures.immediateFailedCheckedFuture(e);
|
return Futures.immediateFailedFuture(e);
|
||||||
}
|
}
|
||||||
return super.submit(new RunnableWithPermitRelease(task), result);
|
return super.submit(new RunnableWithPermitRelease(task), result);
|
||||||
}
|
}
|
||||||
|
@ -129,7 +129,7 @@ public class SemaphoredDelegatingExecutor extends
|
||||||
queueingPermits.acquire();
|
queueingPermits.acquire();
|
||||||
} catch (InterruptedException e) {
|
} catch (InterruptedException e) {
|
||||||
Thread.currentThread().interrupt();
|
Thread.currentThread().interrupt();
|
||||||
return Futures.immediateFailedCheckedFuture(e);
|
return Futures.immediateFailedFuture(e);
|
||||||
}
|
}
|
||||||
return super.submit(new RunnableWithPermitRelease(task));
|
return super.submit(new RunnableWithPermitRelease(task));
|
||||||
}
|
}
|
||||||
|
@ -173,10 +173,10 @@ public class SemaphoredDelegatingExecutor extends
|
||||||
public String toString() {
|
public String toString() {
|
||||||
final StringBuilder sb = new StringBuilder(
|
final StringBuilder sb = new StringBuilder(
|
||||||
"SemaphoredDelegatingExecutor{");
|
"SemaphoredDelegatingExecutor{");
|
||||||
sb.append("permitCount=").append(getPermitCount());
|
sb.append("permitCount=").append(getPermitCount())
|
||||||
sb.append(", available=").append(getAvailablePermits());
|
.append(", available=").append(getAvailablePermits())
|
||||||
sb.append(", waiting=").append(getWaitingCount());
|
.append(", waiting=").append(getWaitingCount())
|
||||||
sb.append('}');
|
.append('}');
|
||||||
return sb.toString();
|
return sb.toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -172,7 +172,7 @@ public class ZKUtil {
|
||||||
return valInConf;
|
return valInConf;
|
||||||
}
|
}
|
||||||
String path = valInConf.substring(1).trim();
|
String path = valInConf.substring(1).trim();
|
||||||
return Files.toString(new File(path), Charsets.UTF_8).trim();
|
return Files.asCharSource(new File(path), Charsets.UTF_8).read().trim();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -41,8 +41,8 @@ public class TestTableMapping {
|
||||||
public void testResolve() throws IOException {
|
public void testResolve() throws IOException {
|
||||||
File mapFile = File.createTempFile(getClass().getSimpleName() +
|
File mapFile = File.createTempFile(getClass().getSimpleName() +
|
||||||
".testResolve", ".txt");
|
".testResolve", ".txt");
|
||||||
Files.write(hostName1 + " /rack1\n" +
|
Files.asCharSink(mapFile, Charsets.UTF_8).write(
|
||||||
hostName2 + "\t/rack2\n", mapFile, Charsets.UTF_8);
|
hostName1 + " /rack1\n" + hostName2 + "\t/rack2\n");
|
||||||
mapFile.deleteOnExit();
|
mapFile.deleteOnExit();
|
||||||
TableMapping mapping = new TableMapping();
|
TableMapping mapping = new TableMapping();
|
||||||
|
|
||||||
|
@ -64,8 +64,8 @@ public class TestTableMapping {
|
||||||
public void testTableCaching() throws IOException {
|
public void testTableCaching() throws IOException {
|
||||||
File mapFile = File.createTempFile(getClass().getSimpleName() +
|
File mapFile = File.createTempFile(getClass().getSimpleName() +
|
||||||
".testTableCaching", ".txt");
|
".testTableCaching", ".txt");
|
||||||
Files.write(hostName1 + " /rack1\n" +
|
Files.asCharSink(mapFile, Charsets.UTF_8).write(
|
||||||
hostName2 + "\t/rack2\n", mapFile, Charsets.UTF_8);
|
hostName1 + " /rack1\n" + hostName2 + "\t/rack2\n");
|
||||||
mapFile.deleteOnExit();
|
mapFile.deleteOnExit();
|
||||||
TableMapping mapping = new TableMapping();
|
TableMapping mapping = new TableMapping();
|
||||||
|
|
||||||
|
@ -128,8 +128,8 @@ public class TestTableMapping {
|
||||||
public void testClearingCachedMappings() throws IOException {
|
public void testClearingCachedMappings() throws IOException {
|
||||||
File mapFile = File.createTempFile(getClass().getSimpleName() +
|
File mapFile = File.createTempFile(getClass().getSimpleName() +
|
||||||
".testClearingCachedMappings", ".txt");
|
".testClearingCachedMappings", ".txt");
|
||||||
Files.write(hostName1 + " /rack1\n" +
|
Files.asCharSink(mapFile, Charsets.UTF_8).write(
|
||||||
hostName2 + "\t/rack2\n", mapFile, Charsets.UTF_8);
|
hostName1 + " /rack1\n" + hostName2 + "\t/rack2\n");
|
||||||
mapFile.deleteOnExit();
|
mapFile.deleteOnExit();
|
||||||
|
|
||||||
TableMapping mapping = new TableMapping();
|
TableMapping mapping = new TableMapping();
|
||||||
|
@ -147,7 +147,7 @@ public class TestTableMapping {
|
||||||
assertEquals("/rack1", result.get(0));
|
assertEquals("/rack1", result.get(0));
|
||||||
assertEquals("/rack2", result.get(1));
|
assertEquals("/rack2", result.get(1));
|
||||||
|
|
||||||
Files.write("", mapFile, Charsets.UTF_8);
|
Files.asCharSink(mapFile, Charsets.UTF_8).write("");
|
||||||
|
|
||||||
mapping.reloadCachedMappings();
|
mapping.reloadCachedMappings();
|
||||||
|
|
||||||
|
@ -166,7 +166,7 @@ public class TestTableMapping {
|
||||||
public void testBadFile() throws IOException {
|
public void testBadFile() throws IOException {
|
||||||
File mapFile = File.createTempFile(getClass().getSimpleName() +
|
File mapFile = File.createTempFile(getClass().getSimpleName() +
|
||||||
".testBadFile", ".txt");
|
".testBadFile", ".txt");
|
||||||
Files.write("bad contents", mapFile, Charsets.UTF_8);
|
Files.asCharSink(mapFile, Charsets.UTF_8).write("bad contents");
|
||||||
mapFile.deleteOnExit();
|
mapFile.deleteOnExit();
|
||||||
TableMapping mapping = new TableMapping();
|
TableMapping mapping = new TableMapping();
|
||||||
|
|
||||||
|
|
|
@ -434,7 +434,8 @@ public class TestSecurityUtil {
|
||||||
Configuration conf = new Configuration();
|
Configuration conf = new Configuration();
|
||||||
File passwordTxtFile = File.createTempFile(
|
File passwordTxtFile = File.createTempFile(
|
||||||
getClass().getSimpleName() + ".testAuthAtPathNotation-", ".txt");
|
getClass().getSimpleName() + ".testAuthAtPathNotation-", ".txt");
|
||||||
Files.write(ZK_AUTH_VALUE, passwordTxtFile, StandardCharsets.UTF_8);
|
Files.asCharSink(passwordTxtFile, StandardCharsets.UTF_8)
|
||||||
|
.write(ZK_AUTH_VALUE);
|
||||||
try {
|
try {
|
||||||
conf.set(CommonConfigurationKeys.ZK_AUTH,
|
conf.set(CommonConfigurationKeys.ZK_AUTH,
|
||||||
"@" + passwordTxtFile.getAbsolutePath());
|
"@" + passwordTxtFile.getAbsolutePath());
|
||||||
|
|
|
@ -131,7 +131,7 @@ public class TestZKUtil {
|
||||||
assertEquals("x", ZKUtil.resolveConfIndirection("x"));
|
assertEquals("x", ZKUtil.resolveConfIndirection("x"));
|
||||||
|
|
||||||
TEST_FILE.getParentFile().mkdirs();
|
TEST_FILE.getParentFile().mkdirs();
|
||||||
Files.write("hello world", TEST_FILE, Charsets.UTF_8);
|
Files.asCharSink(TEST_FILE, Charsets.UTF_8).write("hello world");
|
||||||
assertEquals("hello world", ZKUtil.resolveConfIndirection(
|
assertEquals("hello world", ZKUtil.resolveConfIndirection(
|
||||||
"@" + TEST_FILE.getAbsolutePath()));
|
"@" + TEST_FILE.getAbsolutePath()));
|
||||||
|
|
||||||
|
|
|
@ -196,7 +196,7 @@ public class LocalResolver extends RouterResolver<String, String> {
|
||||||
try {
|
try {
|
||||||
String nsId = nn.getNameserviceId();
|
String nsId = nn.getNameserviceId();
|
||||||
String rpcAddress = nn.getRpcAddress();
|
String rpcAddress = nn.getRpcAddress();
|
||||||
String hostname = HostAndPort.fromString(rpcAddress).getHostText();
|
String hostname = HostAndPort.fromString(rpcAddress).getHost();
|
||||||
ret.put(hostname, nsId);
|
ret.put(hostname, nsId);
|
||||||
if (hostname.equals(localHostname)) {
|
if (hostname.equals(localHostname)) {
|
||||||
ret.put(localIp, nsId);
|
ret.put(localIp, nsId);
|
||||||
|
|
|
@ -192,6 +192,12 @@
|
||||||
<Bug pattern="NP_NULL_PARAM_DEREF" />
|
<Bug pattern="NP_NULL_PARAM_DEREF" />
|
||||||
</Match>
|
</Match>
|
||||||
|
|
||||||
|
<!-- guava 27.0 update: @Nullable is not detected, however it's there -->
|
||||||
|
<Match>
|
||||||
|
<Class name="org.apache.hadoop.hdfs.qjournal.server.Journal" />
|
||||||
|
<Method name="getPersistedPaxosData" />
|
||||||
|
<Bug pattern="NP_NULL_PARAM_DEREF" />
|
||||||
|
</Match>
|
||||||
|
|
||||||
<!-- guava 27.0 update: @Nullable is not detected, however it's there -->
|
<!-- guava 27.0 update: @Nullable is not detected, however it's there -->
|
||||||
<Match>
|
<Match>
|
||||||
|
|
|
@ -446,7 +446,7 @@ public class IPCLoggerChannel implements AsyncLogger {
|
||||||
public void onSuccess(Void t) {
|
public void onSuccess(Void t) {
|
||||||
unreserveQueueSpace(data.length);
|
unreserveQueueSpace(data.length);
|
||||||
}
|
}
|
||||||
});
|
}, MoreExecutors.directExecutor());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return ret;
|
return ret;
|
||||||
|
|
|
@ -22,6 +22,7 @@ import java.util.Map.Entry;
|
||||||
import java.util.concurrent.TimeoutException;
|
import java.util.concurrent.TimeoutException;
|
||||||
import java.util.concurrent.TimeUnit;
|
import java.util.concurrent.TimeUnit;
|
||||||
|
|
||||||
|
import com.google.common.util.concurrent.MoreExecutors;
|
||||||
import org.apache.hadoop.ipc.RemoteException;
|
import org.apache.hadoop.ipc.RemoteException;
|
||||||
import org.apache.hadoop.util.StopWatch;
|
import org.apache.hadoop.util.StopWatch;
|
||||||
import org.apache.hadoop.util.Timer;
|
import org.apache.hadoop.util.Timer;
|
||||||
|
@ -80,7 +81,7 @@ class QuorumCall<KEY, RESULT> {
|
||||||
public void onSuccess(RESULT res) {
|
public void onSuccess(RESULT res) {
|
||||||
qr.addResult(e.getKey(), res);
|
qr.addResult(e.getKey(), res);
|
||||||
}
|
}
|
||||||
});
|
}, MoreExecutors.directExecutor());
|
||||||
}
|
}
|
||||||
return qr;
|
return qr;
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,6 +24,7 @@ import com.google.common.collect.Sets;
|
||||||
import com.google.common.util.concurrent.FutureCallback;
|
import com.google.common.util.concurrent.FutureCallback;
|
||||||
import com.google.common.util.concurrent.Futures;
|
import com.google.common.util.concurrent.Futures;
|
||||||
import com.google.common.util.concurrent.ListenableFuture;
|
import com.google.common.util.concurrent.ListenableFuture;
|
||||||
|
import com.google.common.util.concurrent.MoreExecutors;
|
||||||
import com.google.common.util.concurrent.ThreadFactoryBuilder;
|
import com.google.common.util.concurrent.ThreadFactoryBuilder;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
||||||
|
@ -224,12 +225,12 @@ public class DatasetVolumeChecker {
|
||||||
Futures.addCallback(olf.get(),
|
Futures.addCallback(olf.get(),
|
||||||
new ResultHandler(reference, healthyVolumes, failedVolumes,
|
new ResultHandler(reference, healthyVolumes, failedVolumes,
|
||||||
numVolumes, new Callback() {
|
numVolumes, new Callback() {
|
||||||
@Override
|
@Override
|
||||||
public void call(Set<FsVolumeSpi> ignored1,
|
public void call(Set<FsVolumeSpi> ignored1,
|
||||||
Set<FsVolumeSpi> ignored2) {
|
Set<FsVolumeSpi> ignored2) {
|
||||||
latch.countDown();
|
latch.countDown();
|
||||||
}
|
}
|
||||||
}));
|
}), MoreExecutors.directExecutor());
|
||||||
} else {
|
} else {
|
||||||
IOUtils.cleanup(null, reference);
|
IOUtils.cleanup(null, reference);
|
||||||
if (numVolumes.decrementAndGet() == 0) {
|
if (numVolumes.decrementAndGet() == 0) {
|
||||||
|
|
|
@ -182,7 +182,7 @@ public class ThrottledAsyncChecker<K, V> implements AsyncChecker<K, V> {
|
||||||
t, timer.monotonicNow()));
|
t, timer.monotonicNow()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
}, MoreExecutors.directExecutor());
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.hadoop.hdfs.server.datanode.checker;
|
||||||
import com.google.common.util.concurrent.FutureCallback;
|
import com.google.common.util.concurrent.FutureCallback;
|
||||||
import com.google.common.util.concurrent.Futures;
|
import com.google.common.util.concurrent.Futures;
|
||||||
import com.google.common.util.concurrent.ListenableFuture;
|
import com.google.common.util.concurrent.ListenableFuture;
|
||||||
|
import com.google.common.util.concurrent.MoreExecutors;
|
||||||
import org.apache.hadoop.util.FakeTimer;
|
import org.apache.hadoop.util.FakeTimer;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Rule;
|
import org.junit.Rule;
|
||||||
|
@ -101,7 +102,7 @@ public class TestThrottledAsyncCheckerTimeout {
|
||||||
numCallbackInvocationsFailure.incrementAndGet();
|
numCallbackInvocationsFailure.incrementAndGet();
|
||||||
callbackResult.set(true);
|
callbackResult.set(true);
|
||||||
}
|
}
|
||||||
});
|
}, MoreExecutors.directExecutor());
|
||||||
|
|
||||||
while (!callbackResult.get()) {
|
while (!callbackResult.get()) {
|
||||||
// Wait for the callback
|
// Wait for the callback
|
||||||
|
@ -133,7 +134,8 @@ public class TestThrottledAsyncCheckerTimeout {
|
||||||
.schedule(target, true);
|
.schedule(target, true);
|
||||||
|
|
||||||
assertTrue(olf1.isPresent());
|
assertTrue(olf1.isPresent());
|
||||||
Futures.addCallback(olf1.get(), futureCallback);
|
Futures.addCallback(olf1.get(), futureCallback,
|
||||||
|
MoreExecutors.directExecutor());
|
||||||
|
|
||||||
// Verify that timeout results in only 1 onFailure call and 0 onSuccess
|
// Verify that timeout results in only 1 onFailure call and 0 onSuccess
|
||||||
// calls.
|
// calls.
|
||||||
|
@ -149,7 +151,8 @@ public class TestThrottledAsyncCheckerTimeout {
|
||||||
.schedule(target, true);
|
.schedule(target, true);
|
||||||
|
|
||||||
assertTrue(olf2.isPresent());
|
assertTrue(olf2.isPresent());
|
||||||
Futures.addCallback(olf2.get(), futureCallback);
|
Futures.addCallback(olf2.get(), futureCallback,
|
||||||
|
MoreExecutors.directExecutor());
|
||||||
|
|
||||||
// Verify that normal check (dummy) results in only 1 onSuccess call.
|
// Verify that normal check (dummy) results in only 1 onSuccess call.
|
||||||
// Number of times onFailure is invoked should remain the same i.e. 1.
|
// Number of times onFailure is invoked should remain the same i.e. 1.
|
||||||
|
@ -187,7 +190,7 @@ public class TestThrottledAsyncCheckerTimeout {
|
||||||
throwable[0] = t;
|
throwable[0] = t;
|
||||||
callbackResult.set(true);
|
callbackResult.set(true);
|
||||||
}
|
}
|
||||||
});
|
}, MoreExecutors.directExecutor());
|
||||||
|
|
||||||
while (!callbackResult.get()) {
|
while (!callbackResult.get()) {
|
||||||
// Wait for the callback
|
// Wait for the callback
|
||||||
|
|
|
@ -163,7 +163,7 @@ public class TestDFSHAAdminMiniCluster {
|
||||||
assertEquals(0, runTool("-ns", "minidfs-ns", "-failover", "nn2", "nn1"));
|
assertEquals(0, runTool("-ns", "minidfs-ns", "-failover", "nn2", "nn1"));
|
||||||
|
|
||||||
// Fencer has not run yet, since none of the above required fencing
|
// Fencer has not run yet, since none of the above required fencing
|
||||||
assertEquals("", Files.toString(tmpFile, Charsets.UTF_8));
|
assertEquals("", Files.asCharSource(tmpFile, Charsets.UTF_8).read());
|
||||||
|
|
||||||
// Test failover with fencer and forcefence option
|
// Test failover with fencer and forcefence option
|
||||||
assertEquals(0, runTool("-failover", "nn1", "nn2", "--forcefence"));
|
assertEquals(0, runTool("-failover", "nn1", "nn2", "--forcefence"));
|
||||||
|
@ -171,8 +171,8 @@ public class TestDFSHAAdminMiniCluster {
|
||||||
// The fence script should run with the configuration from the target
|
// The fence script should run with the configuration from the target
|
||||||
// node, rather than the configuration from the fencing node. Strip
|
// node, rather than the configuration from the fencing node. Strip
|
||||||
// out any trailing spaces and CR/LFs which may be present on Windows.
|
// out any trailing spaces and CR/LFs which may be present on Windows.
|
||||||
String fenceCommandOutput =Files.toString(tmpFile, Charsets.UTF_8).
|
String fenceCommandOutput = Files.asCharSource(tmpFile, Charsets.UTF_8)
|
||||||
replaceAll(" *[\r\n]+", "");
|
.read().replaceAll(" *[\r\n]+", "");
|
||||||
assertEquals("minidfs-ns.nn1 " + nn1Port + " nn1", fenceCommandOutput);
|
assertEquals("minidfs-ns.nn1 " + nn1Port + " nn1", fenceCommandOutput);
|
||||||
tmpFile.delete();
|
tmpFile.delete();
|
||||||
|
|
||||||
|
|
|
@ -120,7 +120,8 @@ public class LocatedFileStatusFetcher {
|
||||||
runningTasks.incrementAndGet();
|
runningTasks.incrementAndGet();
|
||||||
ListenableFuture<ProcessInitialInputPathCallable.Result> future = exec
|
ListenableFuture<ProcessInitialInputPathCallable.Result> future = exec
|
||||||
.submit(new ProcessInitialInputPathCallable(p, conf, inputFilter));
|
.submit(new ProcessInitialInputPathCallable(p, conf, inputFilter));
|
||||||
Futures.addCallback(future, processInitialInputPathCallback);
|
Futures.addCallback(future, processInitialInputPathCallback,
|
||||||
|
MoreExecutors.directExecutor());
|
||||||
}
|
}
|
||||||
|
|
||||||
runningTasks.decrementAndGet();
|
runningTasks.decrementAndGet();
|
||||||
|
@ -267,7 +268,8 @@ public class LocatedFileStatusFetcher {
|
||||||
ListenableFuture<ProcessInputDirCallable.Result> future = exec
|
ListenableFuture<ProcessInputDirCallable.Result> future = exec
|
||||||
.submit(new ProcessInputDirCallable(result.fs, fileStatus,
|
.submit(new ProcessInputDirCallable(result.fs, fileStatus,
|
||||||
recursive, inputFilter));
|
recursive, inputFilter));
|
||||||
Futures.addCallback(future, processInputDirCallback);
|
Futures.addCallback(future, processInputDirCallback,
|
||||||
|
MoreExecutors.directExecutor());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
decrementRunningAndCheckCompletion();
|
decrementRunningAndCheckCompletion();
|
||||||
|
@ -353,7 +355,8 @@ public class LocatedFileStatusFetcher {
|
||||||
ListenableFuture<ProcessInputDirCallable.Result> future = exec
|
ListenableFuture<ProcessInputDirCallable.Result> future = exec
|
||||||
.submit(new ProcessInputDirCallable(result.fs, matched,
|
.submit(new ProcessInputDirCallable(result.fs, matched,
|
||||||
recursive, inputFilter));
|
recursive, inputFilter));
|
||||||
Futures.addCallback(future, processInputDirCallback);
|
Futures.addCallback(future, processInputDirCallback,
|
||||||
|
MoreExecutors.directExecutor());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
decrementRunningAndCheckCompletion();
|
decrementRunningAndCheckCompletion();
|
||||||
|
|
|
@ -88,7 +88,7 @@
|
||||||
<spotbugs.version>3.1.0-RC1</spotbugs.version>
|
<spotbugs.version>3.1.0-RC1</spotbugs.version>
|
||||||
<dnsjava.version>2.1.7</dnsjava.version>
|
<dnsjava.version>2.1.7</dnsjava.version>
|
||||||
|
|
||||||
<guava.version>11.0.2</guava.version>
|
<guava.version>27.0-jre</guava.version>
|
||||||
<guice.version>4.0</guice.version>
|
<guice.version>4.0</guice.version>
|
||||||
<joda-time.version>2.9.9</joda-time.version>
|
<joda-time.version>2.9.9</joda-time.version>
|
||||||
|
|
||||||
|
|
|
@ -669,4 +669,39 @@
|
||||||
<Bug pattern="EI_EXPOSE_REP" />
|
<Bug pattern="EI_EXPOSE_REP" />
|
||||||
</Match>
|
</Match>
|
||||||
|
|
||||||
|
<!-- The called method signature is String emptyToNull(@Nullable String string) in guava 27, so this should be ignored -->
|
||||||
|
<Match>
|
||||||
|
<Class name="org.apache.hadoop.yarn.server.nodemanager.NodeHealthCheckerService"/>
|
||||||
|
<Method name="getHealthReport" />
|
||||||
|
<Bug pattern="NP_NULL_PARAM_DEREF"/>
|
||||||
|
</Match>
|
||||||
|
|
||||||
|
<!-- The variable is not used, but it's defined for the document model. -->
|
||||||
|
<Match>
|
||||||
|
<Class name="org.apache.hadoop.yarn.server.timelineservice.documentstore.collection.document.entity.TimelineEventSubDoc"/>
|
||||||
|
<Method name="setValid" />
|
||||||
|
<Bug pattern="URF_UNREAD_FIELD"/>
|
||||||
|
</Match>
|
||||||
|
|
||||||
|
<!-- The variable is not used, but it's defined for the document model. -->
|
||||||
|
<Match>
|
||||||
|
<Class name="org.apache.hadoop.yarn.server.timelineservice.documentstore.collection.document.entity.TimelineMetricSubDoc"/>
|
||||||
|
<Method name="setValid" />
|
||||||
|
<Bug pattern="URF_UNREAD_FIELD"/>
|
||||||
|
</Match>
|
||||||
|
|
||||||
|
<!-- The called method signature is public boolean set(@Nullable V value) in guava 27, so this should be ignored -->
|
||||||
|
<Match>
|
||||||
|
<Class name="org.apache.hadoop.yarn.server.resourcemanager.recovery.RMStateStore$UpdateAppTransition"/>
|
||||||
|
<Method name="transition" />
|
||||||
|
<Bug pattern="NP_NONNULL_PARAM_VIOLATION"/>
|
||||||
|
</Match>
|
||||||
|
|
||||||
|
<!-- The called method signature is public boolean set(@Nullable V value) in guava 27, so this should be ignored -->
|
||||||
|
<Match>
|
||||||
|
<Class name="org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacityScheduler"/>
|
||||||
|
<Method name="updateApplicationPriority" />
|
||||||
|
<Bug pattern="NP_NONNULL_PARAM_VIOLATION"/>
|
||||||
|
</Match>
|
||||||
|
|
||||||
</FindBugsFilter>
|
</FindBugsFilter>
|
||||||
|
|
|
@ -87,7 +87,7 @@ public class ZookeeperUtils {
|
||||||
public static String buildHostsOnlyList(List<HostAndPort> hostAndPorts) {
|
public static String buildHostsOnlyList(List<HostAndPort> hostAndPorts) {
|
||||||
StringBuilder sb = new StringBuilder();
|
StringBuilder sb = new StringBuilder();
|
||||||
for (HostAndPort hostAndPort : hostAndPorts) {
|
for (HostAndPort hostAndPort : hostAndPorts) {
|
||||||
sb.append(hostAndPort.getHostText()).append(",");
|
sb.append(hostAndPort.getHost()).append(",");
|
||||||
}
|
}
|
||||||
if (sb.length() > 0) {
|
if (sb.length() > 0) {
|
||||||
sb.delete(sb.length() - 1, sb.length());
|
sb.delete(sb.length() - 1, sb.length());
|
||||||
|
|
|
@ -386,7 +386,8 @@ public class ServiceTestUtils {
|
||||||
fs = new SliderFileSystem(conf);
|
fs = new SliderFileSystem(conf);
|
||||||
fs.setAppDir(new Path(serviceBasePath.toString()));
|
fs.setAppDir(new Path(serviceBasePath.toString()));
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
Throwables.propagate(e);
|
Throwables.throwIfUnchecked(e);
|
||||||
|
throw new RuntimeException(e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1172,7 +1172,8 @@ public class TestAMRestart extends ParameterizedSchedulerTestBase {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
Throwables.propagate(e);
|
Throwables.throwIfUnchecked(e);
|
||||||
|
throw new RuntimeException(e);
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
}, 2000, 200000);
|
}, 2000, 200000);
|
||||||
|
|
Loading…
Reference in New Issue