HDFS-11652. Improve ECSchema and ErasureCodingPolicy toString, hashCode, equals.

This commit is contained in:
Andrew Wang 2017-04-17 16:30:25 -07:00
parent ad49098eb3
commit c0ca785dbb
4 changed files with 180 additions and 37 deletions

View File

@ -21,6 +21,8 @@ import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import org.apache.commons.lang.builder.EqualsBuilder;
import org.apache.commons.lang.builder.HashCodeBuilder;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
@ -199,34 +201,31 @@ public final class ECSchema {
@Override @Override
public boolean equals(Object o) { public boolean equals(Object o) {
if (this == o) { if (o == null) {
return false;
}
if (o == this) {
return true; return true;
} }
if (o == null || getClass() != o.getClass()) { if (o.getClass() != getClass()) {
return false; return false;
} }
ECSchema rhs = (ECSchema) o;
ECSchema ecSchema = (ECSchema) o; return new EqualsBuilder()
.append(codecName, rhs.codecName)
if (numDataUnits != ecSchema.numDataUnits) { .append(extraOptions, rhs.extraOptions)
return false; .append(numDataUnits, rhs.numDataUnits)
} .append(numParityUnits, rhs.numParityUnits)
if (numParityUnits != ecSchema.numParityUnits) { .isEquals();
return false;
}
if (!codecName.equals(ecSchema.codecName)) {
return false;
}
return extraOptions.equals(ecSchema.extraOptions);
} }
@Override @Override
public int hashCode() { public int hashCode() {
int result = codecName.hashCode(); return new HashCodeBuilder(1273158869, 1555022101)
result = 31 * result + extraOptions.hashCode(); .append(codecName)
result = 31 * result + numDataUnits; .append(extraOptions)
result = 31 * result + numParityUnits; .append(numDataUnits)
.append(numParityUnits)
return result; .toHashCode();
} }
} }

View File

@ -22,13 +22,16 @@ import org.junit.Test;
import org.junit.rules.Timeout; import org.junit.rules.Timeout;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.TreeMap;
public class TestECSchema { public class TestECSchema {
@Rule @Rule
public Timeout globalTimeout = new Timeout(300000); public Timeout globalTimeout = new Timeout(300000);
@Test @Test
public void testGoodSchema() { public void testGoodSchema() {
@ -51,5 +54,45 @@ public class TestECSchema {
assertEquals(numParityUnits, schema.getNumParityUnits()); assertEquals(numParityUnits, schema.getNumParityUnits());
assertEquals(codec, schema.getCodecName()); assertEquals(codec, schema.getCodecName());
assertEquals(extraOptionValue, schema.getExtraOptions().get(extraOption)); assertEquals(extraOptionValue, schema.getExtraOptions().get(extraOption));
Map<String, String> extraMap = new TreeMap<>();
extraMap.put(extraOption, extraOptionValue);
ECSchema sameSchema = new ECSchema(codec, numDataUnits, numParityUnits,
extraMap);
assertEquals("Different constructors not equal", sameSchema, schema);
}
@Test
public void testEqualsAndHashCode() {
Map<String, String> extraMap = new TreeMap<>();
extraMap.put("key", "value");
ECSchema[] schemas = new ECSchema[]{
new ECSchema("one", 1, 2, null),
new ECSchema("two", 1, 2, null),
new ECSchema("one", 2, 2, null),
new ECSchema("one", 1, 1, null),
new ECSchema("one", 1, 2, extraMap),
};
for (int i = 0; i < schemas.length; i++) {
final ECSchema ei = schemas[i];
// Check identity
ECSchema temp = new ECSchema(ei.getCodecName(), ei.getNumDataUnits(),
ei.getNumParityUnits(), ei.getExtraOptions());
assertEquals(ei, temp);
assertEquals(ei.hashCode(), temp.hashCode());
// Check against other schemas
for (int j = 0; j < schemas.length; j++) {
final ECSchema ej = schemas[j];
if (i == j) {
assertEquals(ei, ej);
assertEquals(ei.hashCode(), ej.hashCode());
} else {
assertNotEquals(ei, ej);
assertNotEquals(ei, ej.hashCode());
}
}
}
} }
} }

View File

@ -17,6 +17,9 @@
*/ */
package org.apache.hadoop.hdfs.protocol; package org.apache.hadoop.hdfs.protocol;
import com.google.common.base.Preconditions;
import org.apache.commons.lang.builder.EqualsBuilder;
import org.apache.commons.lang.builder.HashCodeBuilder;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.io.erasurecode.ECSchema; import org.apache.hadoop.io.erasurecode.ECSchema;
@ -35,6 +38,9 @@ public final class ErasureCodingPolicy {
public ErasureCodingPolicy(String name, ECSchema schema, public ErasureCodingPolicy(String name, ECSchema schema,
int cellSize, byte id) { int cellSize, byte id) {
Preconditions.checkNotNull(name);
Preconditions.checkNotNull(schema);
Preconditions.checkArgument(cellSize > 0, "cellSize must be positive");
this.name = name; this.name = name;
this.schema = schema; this.schema = schema;
this.cellSize = cellSize; this.cellSize = cellSize;
@ -82,31 +88,40 @@ public final class ErasureCodingPolicy {
@Override @Override
public boolean equals(Object o) { public boolean equals(Object o) {
if (this == o) { if (o == null) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false; return false;
} }
ErasureCodingPolicy that = (ErasureCodingPolicy) o; if (o == this) {
return true;
return that.getName().equals(name) && }
that.getCellSize() == cellSize && if (o.getClass() != getClass()) {
that.getSchema().equals(schema); return false;
}
ErasureCodingPolicy rhs = (ErasureCodingPolicy) o;
return new EqualsBuilder()
.append(name, rhs.name)
.append(schema, rhs.schema)
.append(cellSize, rhs.cellSize)
.append(id, rhs.id)
.isEquals();
} }
@Override @Override
public int hashCode() { public int hashCode() {
int result = name.hashCode(); return new HashCodeBuilder(303855623, 582626729)
result = 31 * result + schema.hashCode(); .append(name)
result = 31 * result + cellSize; .append(schema)
return result; .append(cellSize)
.append(id)
.toHashCode();
} }
@Override @Override
public String toString() { public String toString() {
return "ErasureCodingPolicy=[" + "Name=" + name + ", " return "ErasureCodingPolicy=[" + "Name=" + name + ", "
+ "Schema=[" + schema.toString() + "], " + "Schema=[" + schema.toString() + "], "
+ "CellSize=" + cellSize + " " + "]"; + "CellSize=" + cellSize + ", "
+ "Id=" + id
+ "]";
} }
} }

View File

@ -0,0 +1,86 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.protocol;
import org.apache.hadoop.io.erasurecode.ECSchema;
import org.apache.hadoop.test.GenericTestUtils;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.fail;
/**
* Test ErasureCodingPolicy.
*/
public class TestErasureCodingPolicy {
private static final ECSchema SCHEMA_1 = new ECSchema("one", 1, 2, null);
private static final ECSchema SCHEMA_2 = new ECSchema("two", 1, 2, null);
@Test
public void testInvalid() {
try {
new ErasureCodingPolicy(null, SCHEMA_1, 123, (byte) -1);
fail("Instantiated invalid ErasureCodingPolicy");
} catch (NullPointerException e) {
}
try {
new ErasureCodingPolicy("policy", null, 123, (byte) -1);
fail("Instantiated invalid ErasureCodingPolicy");
} catch (NullPointerException e) {
}
try {
new ErasureCodingPolicy("policy", SCHEMA_1, -1, (byte) -1);
fail("Instantiated invalid ErasureCodingPolicy");
} catch (IllegalArgumentException e) {
GenericTestUtils.assertExceptionContains("cellSize", e);
}
}
@Test
public void testEqualsAndHashCode() {
ErasureCodingPolicy[] policies = new ErasureCodingPolicy[]{
new ErasureCodingPolicy("one", SCHEMA_1, 321, (byte) 1),
new ErasureCodingPolicy("two", SCHEMA_1, 321, (byte) 1),
new ErasureCodingPolicy("one", SCHEMA_2, 321, (byte) 1),
new ErasureCodingPolicy("one", SCHEMA_1, 123, (byte) 1),
new ErasureCodingPolicy("one", SCHEMA_1, 321, (byte) 3),
};
for (int i = 0; i < policies.length; i++) {
final ErasureCodingPolicy ei = policies[i];
// Check identity
ErasureCodingPolicy temp = new ErasureCodingPolicy(ei.getName(), ei
.getSchema(), ei.getCellSize(), ei.getId());
assertEquals(ei, temp);
assertEquals(ei.hashCode(), temp.hashCode());
// Check against other policies
for (int j = 0; j < policies.length; j++) {
final ErasureCodingPolicy ej = policies[j];
if (i == j) {
assertEquals(ei, ej);
assertEquals(ei.hashCode(), ej.hashCode());
} else {
assertNotEquals(ei, ej);
assertNotEquals(ei, ej.hashCode());
}
}
}
}
}