HDFS-12460. Make addErasureCodingPolicy an idempotent operation. Contributed by Sammi Chen
This commit is contained in:
parent
e81596d06d
commit
0f9af246e8
|
@ -242,14 +242,15 @@ public final class ErasureCodingPolicyManager {
|
||||||
policy.getSchema(), policy.getCellSize());
|
policy.getSchema(), policy.getCellSize());
|
||||||
for (ErasureCodingPolicy p : getPolicies()) {
|
for (ErasureCodingPolicy p : getPolicies()) {
|
||||||
if (p.getName().equals(assignedNewName)) {
|
if (p.getName().equals(assignedNewName)) {
|
||||||
throw new HadoopIllegalArgumentException("The policy name " +
|
LOG.info("The policy name " + assignedNewName + " already exists");
|
||||||
assignedNewName + " already exists");
|
return p;
|
||||||
}
|
}
|
||||||
if (p.getSchema().equals(policy.getSchema()) &&
|
if (p.getSchema().equals(policy.getSchema()) &&
|
||||||
p.getCellSize() == policy.getCellSize()) {
|
p.getCellSize() == policy.getCellSize()) {
|
||||||
throw new HadoopIllegalArgumentException("A policy with same schema "
|
LOG.info("A policy with same schema "
|
||||||
+ policy.getSchema().toString() + " and cell size "
|
+ policy.getSchema().toString() + " and cell size "
|
||||||
+ p.getCellSize() + " already exists");
|
+ p.getCellSize() + " already exists");
|
||||||
|
return p;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
policy.setName(assignedNewName);
|
policy.setName(assignedNewName);
|
||||||
|
|
|
@ -718,7 +718,7 @@ public class TestErasureCodingPolicies {
|
||||||
policyArray = new ErasureCodingPolicy[]{policy0};
|
policyArray = new ErasureCodingPolicy[]{policy0};
|
||||||
responses = fs.addErasureCodingPolicies(policyArray);
|
responses = fs.addErasureCodingPolicies(policyArray);
|
||||||
assertEquals(1, responses.length);
|
assertEquals(1, responses.length);
|
||||||
assertFalse(responses[0].isSucceed());
|
assertTrue(responses[0].isSucceed());
|
||||||
|
|
||||||
// Test add policy successfully
|
// Test add policy successfully
|
||||||
newPolicy =
|
newPolicy =
|
||||||
|
|
|
@ -436,7 +436,7 @@ public class TestNamenodeRetryCache {
|
||||||
|
|
||||||
LightWeightCache<CacheEntry, CacheEntry> cacheSet =
|
LightWeightCache<CacheEntry, CacheEntry> cacheSet =
|
||||||
(LightWeightCache<CacheEntry, CacheEntry>) namesystem.getRetryCache().getCacheSet();
|
(LightWeightCache<CacheEntry, CacheEntry>) namesystem.getRetryCache().getCacheSet();
|
||||||
assertEquals("Retry cache size is wrong", 26, cacheSet.size());
|
assertEquals("Retry cache size is wrong", 34, cacheSet.size());
|
||||||
|
|
||||||
Map<CacheEntry, CacheEntry> oldEntries =
|
Map<CacheEntry, CacheEntry> oldEntries =
|
||||||
new HashMap<CacheEntry, CacheEntry>();
|
new HashMap<CacheEntry, CacheEntry>();
|
||||||
|
@ -455,7 +455,7 @@ public class TestNamenodeRetryCache {
|
||||||
assertTrue(namesystem.hasRetryCache());
|
assertTrue(namesystem.hasRetryCache());
|
||||||
cacheSet = (LightWeightCache<CacheEntry, CacheEntry>) namesystem
|
cacheSet = (LightWeightCache<CacheEntry, CacheEntry>) namesystem
|
||||||
.getRetryCache().getCacheSet();
|
.getRetryCache().getCacheSet();
|
||||||
assertEquals("Retry cache size is wrong", 26, cacheSet.size());
|
assertEquals("Retry cache size is wrong", 34, cacheSet.size());
|
||||||
iter = cacheSet.iterator();
|
iter = cacheSet.iterator();
|
||||||
while (iter.hasNext()) {
|
while (iter.hasNext()) {
|
||||||
CacheEntry entry = iter.next();
|
CacheEntry entry = iter.next();
|
||||||
|
|
|
@ -166,7 +166,7 @@ public class TestRetryCacheWithHA {
|
||||||
FSNamesystem fsn0 = cluster.getNamesystem(0);
|
FSNamesystem fsn0 = cluster.getNamesystem(0);
|
||||||
LightWeightCache<CacheEntry, CacheEntry> cacheSet =
|
LightWeightCache<CacheEntry, CacheEntry> cacheSet =
|
||||||
(LightWeightCache<CacheEntry, CacheEntry>) fsn0.getRetryCache().getCacheSet();
|
(LightWeightCache<CacheEntry, CacheEntry>) fsn0.getRetryCache().getCacheSet();
|
||||||
assertEquals("Retry cache size is wrong", 26, cacheSet.size());
|
assertEquals("Retry cache size is wrong", 34, cacheSet.size());
|
||||||
|
|
||||||
Map<CacheEntry, CacheEntry> oldEntries =
|
Map<CacheEntry, CacheEntry> oldEntries =
|
||||||
new HashMap<CacheEntry, CacheEntry>();
|
new HashMap<CacheEntry, CacheEntry>();
|
||||||
|
@ -187,7 +187,7 @@ public class TestRetryCacheWithHA {
|
||||||
FSNamesystem fsn1 = cluster.getNamesystem(1);
|
FSNamesystem fsn1 = cluster.getNamesystem(1);
|
||||||
cacheSet = (LightWeightCache<CacheEntry, CacheEntry>) fsn1
|
cacheSet = (LightWeightCache<CacheEntry, CacheEntry>) fsn1
|
||||||
.getRetryCache().getCacheSet();
|
.getRetryCache().getCacheSet();
|
||||||
assertEquals("Retry cache size is wrong", 26, cacheSet.size());
|
assertEquals("Retry cache size is wrong", 34, cacheSet.size());
|
||||||
iter = cacheSet.iterator();
|
iter = cacheSet.iterator();
|
||||||
while (iter.hasNext()) {
|
while (iter.hasNext()) {
|
||||||
CacheEntry entry = iter.next();
|
CacheEntry entry = iter.next();
|
||||||
|
|
Loading…
Reference in New Issue