HADOOP-14386. Rewind trunk from Guava 21.0 back to Guava 11.0.2.
This commit is contained in:
parent
c60164fb60
commit
543aac9f28
|
@ -20,12 +20,9 @@ package org.apache.hadoop.fs.shell;
|
|||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedList;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
|
||||
import com.google.common.base.Enums;
|
||||
import com.google.common.base.Function;
|
||||
import com.google.common.base.Preconditions;
|
||||
|
||||
import org.apache.hadoop.HadoopIllegalArgumentException;
|
||||
|
@ -66,8 +63,6 @@ class XAttrCommands extends FsCommand {
|
|||
" and values encoded as hexadecimal and base64 are prefixed with " +
|
||||
"0x and 0s, respectively.\n" +
|
||||
"<path>: The file or directory.\n";
|
||||
private final static Function<String, XAttrCodec> enValueOfFunc =
|
||||
Enums.stringConverter(XAttrCodec.class);
|
||||
|
||||
private String name = null;
|
||||
private boolean dump = false;
|
||||
|
@ -79,7 +74,7 @@ class XAttrCommands extends FsCommand {
|
|||
String en = StringUtils.popOptionWithArgument("-e", args);
|
||||
if (en != null) {
|
||||
try {
|
||||
encoding = enValueOfFunc.apply(StringUtils.toUpperCase(en));
|
||||
encoding = XAttrCodec.valueOf(StringUtils.toUpperCase(en));
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw new IllegalArgumentException(
|
||||
"Invalid/unsupported encoding option specified: " + en);
|
||||
|
|
|
@ -65,7 +65,6 @@ import com.google.common.annotations.VisibleForTesting;
|
|||
import com.google.common.collect.ComparisonChain;
|
||||
import com.google.common.collect.Lists;
|
||||
import com.google.common.collect.Maps;
|
||||
import com.google.common.util.concurrent.Futures;
|
||||
|
||||
/**
|
||||
* Data storage information file.
|
||||
|
@ -1109,7 +1108,14 @@ public class DataStorage extends Storage {
|
|||
}
|
||||
linkWorkers.shutdown();
|
||||
for (Future<Void> f : futures) {
|
||||
Futures.getChecked(f, IOException.class);
|
||||
try {
|
||||
f.get();
|
||||
} catch (InterruptedException e) {
|
||||
Thread.currentThread().interrupt();
|
||||
throw new IOException(e);
|
||||
} catch (ExecutionException e) {
|
||||
throw new IOException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -28,7 +28,6 @@ import java.util.EnumSet;
|
|||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
||||
import com.google.common.base.MoreObjects;
|
||||
import com.google.common.collect.ComparisonChain;
|
||||
import com.google.common.collect.Lists;
|
||||
import com.google.common.collect.Maps;
|
||||
|
@ -366,8 +365,10 @@ final class AclTransformation {
|
|||
for (AclEntry entry: aclBuilder) {
|
||||
scopeFound.add(entry.getScope());
|
||||
if (entry.getType() == GROUP || entry.getName() != null) {
|
||||
FsAction scopeUnionPerms = MoreObjects.firstNonNull(
|
||||
unionPerms.get(entry.getScope()), FsAction.NONE);
|
||||
FsAction scopeUnionPerms = unionPerms.get(entry.getScope());
|
||||
if (scopeUnionPerms == null) {
|
||||
scopeUnionPerms = FsAction.NONE;
|
||||
}
|
||||
unionPerms.put(entry.getScope(),
|
||||
scopeUnionPerms.or(entry.getPermission()));
|
||||
}
|
||||
|
|
|
@ -40,7 +40,6 @@ import org.apache.hadoop.hdfs.server.protocol.RemoteEditLog;
|
|||
import org.apache.hadoop.hdfs.server.protocol.RemoteEditLogManifest;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import com.google.common.collect.ComparisonChain;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.collect.ImmutableListMultimap;
|
||||
import com.google.common.collect.Lists;
|
||||
|
@ -57,28 +56,18 @@ public class JournalSet implements JournalManager {
|
|||
|
||||
static final Log LOG = LogFactory.getLog(FSEditLog.class);
|
||||
|
||||
// we want local logs to be ordered earlier in the collection, and true
|
||||
// is considered larger than false, so reverse the comparator
|
||||
private static final Comparator<EditLogInputStream>
|
||||
LOCAL_LOG_PREFERENCE_COMPARATOR = new Comparator<EditLogInputStream>() {
|
||||
@Override
|
||||
public int compare(EditLogInputStream elis1, EditLogInputStream elis2) {
|
||||
// we want local logs to be ordered earlier in the collection, and true
|
||||
// is considered larger than false, so we want to invert the booleans here
|
||||
return ComparisonChain.start().compareFalseFirst(!elis1.isLocalLog(),
|
||||
!elis2.isLocalLog()).result();
|
||||
}
|
||||
};
|
||||
|
||||
static final public Comparator<EditLogInputStream>
|
||||
EDIT_LOG_INPUT_STREAM_COMPARATOR = new Comparator<EditLogInputStream>() {
|
||||
@Override
|
||||
public int compare(EditLogInputStream a, EditLogInputStream b) {
|
||||
return ComparisonChain.start().
|
||||
compare(a.getFirstTxId(), b.getFirstTxId()).
|
||||
compare(b.getLastTxId(), a.getLastTxId()).
|
||||
result();
|
||||
}
|
||||
};
|
||||
|
||||
LOCAL_LOG_PREFERENCE_COMPARATOR = Comparator
|
||||
.comparing(EditLogInputStream::isLocalLog)
|
||||
.reversed();
|
||||
|
||||
public static final Comparator<EditLogInputStream>
|
||||
EDIT_LOG_INPUT_STREAM_COMPARATOR = Comparator
|
||||
.comparing(EditLogInputStream::getFirstTxId)
|
||||
.thenComparing(EditLogInputStream::getLastTxId);
|
||||
|
||||
/**
|
||||
* Container for a JournalManager paired with its currently
|
||||
* active stream.
|
||||
|
|
|
@ -0,0 +1,154 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hdfs.qjournal.client;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.Callable;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Future;
|
||||
import java.util.concurrent.RejectedExecutionException;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
/**
|
||||
* A very basic ExecutorService for running submitted Callables serially.
|
||||
* Many bits of functionality are not implemented.
|
||||
*/
|
||||
public class DirectExecutorService implements ExecutorService {
|
||||
|
||||
private static class DirectFuture<V> implements Future<V> {
|
||||
private V result = null;
|
||||
private Exception ex = null;
|
||||
|
||||
DirectFuture(Callable<V> c) {
|
||||
try {
|
||||
result = c.call();
|
||||
} catch (Exception e) {
|
||||
ex = e;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean cancel(boolean mayInterruptIfRunning) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isCancelled() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isDone() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public V get() throws InterruptedException, ExecutionException {
|
||||
if (ex != null) {
|
||||
throw new ExecutionException(ex);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public V get(long timeout, TimeUnit unit)
|
||||
throws InterruptedException, ExecutionException, TimeoutException {
|
||||
return get();
|
||||
}
|
||||
}
|
||||
|
||||
private boolean isShutdown = false;
|
||||
|
||||
@Override
|
||||
synchronized public void shutdown() {
|
||||
isShutdown = true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Runnable> shutdownNow() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isShutdown() {
|
||||
return isShutdown;
|
||||
}
|
||||
|
||||
@Override
|
||||
synchronized public boolean isTerminated() {
|
||||
return isShutdown;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean awaitTermination(long timeout, TimeUnit unit)
|
||||
throws InterruptedException {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
synchronized public <T> Future<T> submit(Callable<T> task) {
|
||||
if (isShutdown) {
|
||||
throw new RejectedExecutionException("ExecutorService was shutdown");
|
||||
}
|
||||
return new DirectFuture<>(task);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> Future<T> submit(Runnable task, T result) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Future<?> submit(Runnable task) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> List<Future<T>> invokeAll(Collection<? extends Callable<T>> tasks)
|
||||
throws InterruptedException {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> List<Future<T>> invokeAll(Collection<? extends Callable<T>> tasks,
|
||||
long timeout, TimeUnit unit) throws InterruptedException {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> T invokeAny(Collection<? extends Callable<T>> tasks)
|
||||
throws InterruptedException, ExecutionException {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> T invokeAny(Collection<? extends Callable<T>> tasks, long timeout,
|
||||
TimeUnit unit)
|
||||
throws InterruptedException, ExecutionException, TimeoutException {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
synchronized public void execute(Runnable command) {
|
||||
command.run();
|
||||
}
|
||||
}
|
|
@ -65,7 +65,6 @@ import com.google.common.base.Joiner;
|
|||
import com.google.common.base.Preconditions;
|
||||
import com.google.common.collect.Maps;
|
||||
import com.google.common.collect.Sets;
|
||||
import com.google.common.util.concurrent.MoreExecutors;
|
||||
|
||||
|
||||
public class TestQJMWithFaults {
|
||||
|
@ -402,7 +401,7 @@ public class TestQJMWithFaults {
|
|||
|
||||
@Override
|
||||
protected ExecutorService createSingleThreadExecutor() {
|
||||
return MoreExecutors.newDirectExecutorService();
|
||||
return new DirectExecutorService();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -66,7 +66,6 @@ import org.mockito.Mockito;
|
|||
import org.mockito.stubbing.Stubber;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
import com.google.common.util.concurrent.MoreExecutors;
|
||||
|
||||
/**
|
||||
* Functional tests for QuorumJournalManager.
|
||||
|
@ -946,7 +945,7 @@ public class TestQuorumJournalManager {
|
|||
protected ExecutorService createSingleThreadExecutor() {
|
||||
// Don't parallelize calls to the quorum in the tests.
|
||||
// This makes the tests more deterministic.
|
||||
return MoreExecutors.newDirectExecutorService();
|
||||
return new DirectExecutorService();
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -90,7 +90,7 @@
|
|||
<findbugs.version>3.0.0</findbugs.version>
|
||||
<spotbugs.version>3.1.0-RC1</spotbugs.version>
|
||||
|
||||
<guava.version>21.0</guava.version>
|
||||
<guava.version>11.0.2</guava.version>
|
||||
<guice.version>4.0</guice.version>
|
||||
<joda-time.version>2.9.4</joda-time.version>
|
||||
|
||||
|
|
|
@ -51,7 +51,6 @@ import org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto;
|
|||
import org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto;
|
||||
|
||||
import com.google.common.base.CharMatcher;
|
||||
import com.google.protobuf.TextFormat;
|
||||
|
||||
@Private
|
||||
|
@ -286,7 +285,7 @@ extends ApplicationSubmissionContext {
|
|||
"maximum allowed length of a tag is " +
|
||||
YarnConfiguration.APPLICATION_MAX_TAG_LENGTH);
|
||||
}
|
||||
if (!CharMatcher.ascii().matchesAllOf(tag)) {
|
||||
if (!org.apache.commons.lang3.StringUtils.isAsciiPrintable(tag)) {
|
||||
throw new IllegalArgumentException("A tag can only have ASCII " +
|
||||
"characters! Invalid tag - " + tag);
|
||||
}
|
||||
|
|
|
@ -26,6 +26,7 @@ import java.util.HashMap;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.http.HttpServer2;
|
||||
|
@ -33,7 +34,6 @@ import org.apache.hadoop.yarn.webapp.view.RobotsTextPage;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.google.common.base.CharMatcher;
|
||||
import com.google.common.base.Splitter;
|
||||
import com.google.common.collect.Lists;
|
||||
import com.google.inject.Provides;
|
||||
|
@ -275,7 +275,7 @@ public abstract class WebApp extends ServletModule {
|
|||
|
||||
static String getPrefix(String pathSpec) {
|
||||
int start = 0;
|
||||
while (CharMatcher.whitespace().matches(pathSpec.charAt(start))) {
|
||||
while (StringUtils.isAnyBlank(Character.toString(pathSpec.charAt(start)))) {
|
||||
++start;
|
||||
}
|
||||
if (pathSpec.charAt(start) != '/') {
|
||||
|
@ -291,7 +291,7 @@ public abstract class WebApp extends ServletModule {
|
|||
char c;
|
||||
do {
|
||||
c = pathSpec.charAt(--ci);
|
||||
} while (c == '/' || CharMatcher.whitespace().matches(c));
|
||||
} while (c == '/' || StringUtils.isAnyBlank(Character.toString(c)));
|
||||
return pathSpec.substring(start, ci + 1);
|
||||
}
|
||||
|
||||
|
|
|
@ -21,6 +21,8 @@ import java.util.Collections;
|
|||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.Future;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
@ -68,7 +70,6 @@ import org.apache.hadoop.yarn.server.security.ApplicationACLsManager;
|
|||
import org.apache.hadoop.yarn.server.utils.BuilderUtils;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.util.concurrent.Futures;
|
||||
import com.google.common.util.concurrent.SettableFuture;
|
||||
|
||||
/**
|
||||
|
@ -210,6 +211,17 @@ public class RMAppManager implements EventHandler<RMAppManagerEvent>,
|
|||
ApplicationSummary.logAppSummary(rmContext.getRMApps().get(appId));
|
||||
}
|
||||
|
||||
private static <V> V getChecked(Future<V> future) throws YarnException {
|
||||
try {
|
||||
return future.get();
|
||||
} catch (InterruptedException e) {
|
||||
Thread.currentThread().interrupt();
|
||||
throw new YarnException(e);
|
||||
} catch (ExecutionException e) {
|
||||
throw new YarnException(e);
|
||||
}
|
||||
}
|
||||
|
||||
protected synchronized int getCompletedAppsListSize() {
|
||||
return this.completedApps.size();
|
||||
}
|
||||
|
@ -641,7 +653,7 @@ public class RMAppManager implements EventHandler<RMAppManagerEvent>,
|
|||
this.rmContext.getStateStore()
|
||||
.updateApplicationStateSynchronously(appState, false, future);
|
||||
|
||||
Futures.getChecked(future, YarnException.class);
|
||||
getChecked(future);
|
||||
|
||||
// update in-memory
|
||||
((RMAppImpl) app).updateApplicationTimeout(newExpireTime);
|
||||
|
@ -678,7 +690,7 @@ public class RMAppManager implements EventHandler<RMAppManagerEvent>,
|
|||
return;
|
||||
}
|
||||
|
||||
Futures.getChecked(future, YarnException.class);
|
||||
getChecked(future);
|
||||
|
||||
// update in-memory
|
||||
((RMAppImpl) app).setApplicationPriority(appPriority);
|
||||
|
@ -761,7 +773,7 @@ public class RMAppManager implements EventHandler<RMAppManagerEvent>,
|
|||
false, future);
|
||||
|
||||
try {
|
||||
Futures.getChecked(future, YarnException.class);
|
||||
getChecked(future);
|
||||
} catch (YarnException ex) {
|
||||
if (!toSuppressException) {
|
||||
throw ex;
|
||||
|
|
|
@ -58,7 +58,6 @@ import org.w3c.dom.NodeList;
|
|||
import org.w3c.dom.Text;
|
||||
import org.xml.sax.SAXException;
|
||||
|
||||
import com.google.common.base.CharMatcher;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
|
||||
@Public
|
||||
|
@ -465,7 +464,7 @@ public class AllocationFileLoaderService extends AbstractService {
|
|||
Set<String> reservableQueues,
|
||||
Set<String> nonPreemptableQueues)
|
||||
throws AllocationConfigurationException {
|
||||
String queueName = CharMatcher.whitespace().trimFrom(
|
||||
String queueName = FairSchedulerUtilities.trimQueueName(
|
||||
element.getAttribute("name"));
|
||||
|
||||
if (queueName.contains(".")) {
|
||||
|
|
|
@ -0,0 +1,69 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair;
|
||||
|
||||
/**
|
||||
* Utility class for the Fair Scheduler.
|
||||
*/
|
||||
public final class FairSchedulerUtilities {
|
||||
|
||||
/**
|
||||
* Table copied from Google Guava v19:
|
||||
* com/google/common/base/CharMatcher.java
|
||||
* <p>
|
||||
* Licensed under the Apache License Version 2.0.
|
||||
*/
|
||||
static final String WHITESPACE_TABLE =
|
||||
"\u2002\u3000\r\u0085\u200A\u2005\u2000\u3000"
|
||||
+ "\u2029\u000B\u3000\u2008\u2003\u205F\u3000\u1680"
|
||||
+ "\u0009\u0020\u2006\u2001\u202F\u00A0\u000C\u2009"
|
||||
+ "\u3000\u2004\u3000\u3000\u2028\n\u2007\u3000";
|
||||
|
||||
private FairSchedulerUtilities() {
|
||||
// private constructor because this is a utility class.
|
||||
}
|
||||
|
||||
private static boolean isWhitespace(char c) {
|
||||
for (int i = 0; i < WHITESPACE_TABLE.length(); i++) {
|
||||
if (WHITESPACE_TABLE.charAt(i) == c) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public static String trimQueueName(String name) {
|
||||
if (name == null) {
|
||||
return null;
|
||||
}
|
||||
int start = 0;
|
||||
while (start < name.length()
|
||||
&& isWhitespace(name.charAt(start))
|
||||
&& start < name.length()) {
|
||||
start++;
|
||||
}
|
||||
int end = name.length() - 1;
|
||||
while (end >= 0
|
||||
&& isWhitespace(name.charAt(end))
|
||||
&& end > start) {
|
||||
end--;
|
||||
}
|
||||
return name.substring(start, end+1);
|
||||
}
|
||||
|
||||
}
|
|
@ -38,7 +38,6 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration;
|
|||
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.policies.FifoPolicy;
|
||||
import org.xml.sax.SAXException;
|
||||
|
||||
import com.google.common.base.CharMatcher;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import java.util.Iterator;
|
||||
import java.util.Set;
|
||||
|
@ -533,8 +532,9 @@ public class QueueManager {
|
|||
@VisibleForTesting
|
||||
boolean isQueueNameValid(String node) {
|
||||
// use the same white space trim as in QueueMetrics() otherwise things fail
|
||||
// guava uses a different definition for whitespace than java.
|
||||
// This needs to trim additional Unicode whitespace characters beyond what
|
||||
// the built-in JDK methods consider whitespace. See YARN-5272.
|
||||
return !node.isEmpty() &&
|
||||
node.equals(CharMatcher.whitespace().trimFrom(node));
|
||||
node.equals(FairSchedulerUtilities.trimQueueName(node));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,67 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair;
|
||||
|
||||
import org.junit.Test;
|
||||
|
||||
import static org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.FairSchedulerUtilities.trimQueueName;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNull;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
/**
|
||||
* Tests for {@link FairSchedulerUtilities}.
|
||||
*/
|
||||
public class TestFairSchedulerUtilities {
|
||||
|
||||
@Test
|
||||
public void testTrimQueueNameEquals() throws Exception {
|
||||
final String[] equalsStrings = {
|
||||
// no spaces
|
||||
"a",
|
||||
// leading spaces
|
||||
" a",
|
||||
" \u3000a",
|
||||
"\u2002\u3000\r\u0085\u200A\u2005\u2000\u3000a",
|
||||
"\u2029\u000B\u3000\u2008\u2003\u205F\u3000\u1680a",
|
||||
"\u0009\u0020\u2006\u2001\u202F\u00A0\u000C\u2009a",
|
||||
"\u3000\u2004\u3000\u3000\u2028\n\u2007\u3000a",
|
||||
// trailing spaces
|
||||
"a\u200A",
|
||||
"a \u0085 ",
|
||||
// spaces on both sides
|
||||
" a ",
|
||||
" a\u00A0",
|
||||
"\u0009\u0020\u2006\u2001\u202F\u00A0\u000C\u2009a" +
|
||||
"\u3000\u2004\u3000\u3000\u2028\n\u2007\u3000",
|
||||
};
|
||||
for (String s : equalsStrings) {
|
||||
assertEquals("a", trimQueueName(s));
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTrimQueueNamesEmpty() throws Exception {
|
||||
assertNull(trimQueueName(null));
|
||||
final String spaces = "\u2002\u3000\r\u0085\u200A\u2005\u2000\u3000"
|
||||
+ "\u2029\u000B\u3000\u2008\u2003\u205F\u3000\u1680"
|
||||
+ "\u0009\u0020\u2006\u2001\u202F\u00A0\u000C\u2009"
|
||||
+ "\u3000\u2004\u3000\u3000\u2028\n\u2007\u3000";
|
||||
assertTrue(trimQueueName(spaces).isEmpty());
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue