HDFS-11652. Improve ECSchema and ErasureCodingPolicy toString, hashCode, equals.

This commit is contained in:
Andrew Wang 2017-04-17 16:30:25 -07:00
parent ad49098eb3
commit c0ca785dbb
4 changed files with 180 additions and 37 deletions

View File

@ -21,6 +21,8 @@ import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import org.apache.commons.lang.builder.EqualsBuilder;
import org.apache.commons.lang.builder.HashCodeBuilder;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
@ -199,34 +201,31 @@ public final class ECSchema {
@Override
public boolean equals(Object o) {
if (this == o) {
if (o == null) {
return false;
}
if (o == this) {
return true;
}
if (o == null || getClass() != o.getClass()) {
if (o.getClass() != getClass()) {
return false;
}
ECSchema ecSchema = (ECSchema) o;
if (numDataUnits != ecSchema.numDataUnits) {
return false;
}
if (numParityUnits != ecSchema.numParityUnits) {
return false;
}
if (!codecName.equals(ecSchema.codecName)) {
return false;
}
return extraOptions.equals(ecSchema.extraOptions);
ECSchema rhs = (ECSchema) o;
return new EqualsBuilder()
.append(codecName, rhs.codecName)
.append(extraOptions, rhs.extraOptions)
.append(numDataUnits, rhs.numDataUnits)
.append(numParityUnits, rhs.numParityUnits)
.isEquals();
}
@Override
public int hashCode() {
int result = codecName.hashCode();
result = 31 * result + extraOptions.hashCode();
result = 31 * result + numDataUnits;
result = 31 * result + numParityUnits;
return result;
return new HashCodeBuilder(1273158869, 1555022101)
.append(codecName)
.append(extraOptions)
.append(numDataUnits)
.append(numParityUnits)
.toHashCode();
}
}

View File

@ -22,8 +22,11 @@ import org.junit.Test;
import org.junit.rules.Timeout;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
import java.util.HashMap;
import java.util.Map;
import java.util.TreeMap;
public class TestECSchema {
@ -51,5 +54,45 @@ public class TestECSchema {
assertEquals(numParityUnits, schema.getNumParityUnits());
assertEquals(codec, schema.getCodecName());
assertEquals(extraOptionValue, schema.getExtraOptions().get(extraOption));
Map<String, String> extraMap = new TreeMap<>();
extraMap.put(extraOption, extraOptionValue);
ECSchema sameSchema = new ECSchema(codec, numDataUnits, numParityUnits,
extraMap);
assertEquals("Different constructors not equal", sameSchema, schema);
}
@Test
public void testEqualsAndHashCode() {
Map<String, String> extraMap = new TreeMap<>();
extraMap.put("key", "value");
ECSchema[] schemas = new ECSchema[]{
new ECSchema("one", 1, 2, null),
new ECSchema("two", 1, 2, null),
new ECSchema("one", 2, 2, null),
new ECSchema("one", 1, 1, null),
new ECSchema("one", 1, 2, extraMap),
};
for (int i = 0; i < schemas.length; i++) {
final ECSchema ei = schemas[i];
// Check identity
ECSchema temp = new ECSchema(ei.getCodecName(), ei.getNumDataUnits(),
ei.getNumParityUnits(), ei.getExtraOptions());
assertEquals(ei, temp);
assertEquals(ei.hashCode(), temp.hashCode());
// Check against other schemas
for (int j = 0; j < schemas.length; j++) {
final ECSchema ej = schemas[j];
if (i == j) {
assertEquals(ei, ej);
assertEquals(ei.hashCode(), ej.hashCode());
} else {
assertNotEquals(ei, ej);
assertNotEquals(ei, ej.hashCode());
}
}
}
}
}

View File

@ -17,6 +17,9 @@
*/
package org.apache.hadoop.hdfs.protocol;
import com.google.common.base.Preconditions;
import org.apache.commons.lang.builder.EqualsBuilder;
import org.apache.commons.lang.builder.HashCodeBuilder;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.io.erasurecode.ECSchema;
@ -35,6 +38,9 @@ public final class ErasureCodingPolicy {
public ErasureCodingPolicy(String name, ECSchema schema,
int cellSize, byte id) {
Preconditions.checkNotNull(name);
Preconditions.checkNotNull(schema);
Preconditions.checkArgument(cellSize > 0, "cellSize must be positive");
this.name = name;
this.schema = schema;
this.cellSize = cellSize;
@ -82,31 +88,40 @@ public final class ErasureCodingPolicy {
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
if (o == null) {
return false;
}
ErasureCodingPolicy that = (ErasureCodingPolicy) o;
return that.getName().equals(name) &&
that.getCellSize() == cellSize &&
that.getSchema().equals(schema);
if (o == this) {
return true;
}
if (o.getClass() != getClass()) {
return false;
}
ErasureCodingPolicy rhs = (ErasureCodingPolicy) o;
return new EqualsBuilder()
.append(name, rhs.name)
.append(schema, rhs.schema)
.append(cellSize, rhs.cellSize)
.append(id, rhs.id)
.isEquals();
}
@Override
public int hashCode() {
int result = name.hashCode();
result = 31 * result + schema.hashCode();
result = 31 * result + cellSize;
return result;
return new HashCodeBuilder(303855623, 582626729)
.append(name)
.append(schema)
.append(cellSize)
.append(id)
.toHashCode();
}
@Override
public String toString() {
return "ErasureCodingPolicy=[" + "Name=" + name + ", "
+ "Schema=[" + schema.toString() + "], "
+ "CellSize=" + cellSize + " " + "]";
+ "CellSize=" + cellSize + ", "
+ "Id=" + id
+ "]";
}
}

View File

@ -0,0 +1,86 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.protocol;
import org.apache.hadoop.io.erasurecode.ECSchema;
import org.apache.hadoop.test.GenericTestUtils;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.fail;
/**
* Test ErasureCodingPolicy.
*/
public class TestErasureCodingPolicy {
private static final ECSchema SCHEMA_1 = new ECSchema("one", 1, 2, null);
private static final ECSchema SCHEMA_2 = new ECSchema("two", 1, 2, null);
@Test
public void testInvalid() {
try {
new ErasureCodingPolicy(null, SCHEMA_1, 123, (byte) -1);
fail("Instantiated invalid ErasureCodingPolicy");
} catch (NullPointerException e) {
}
try {
new ErasureCodingPolicy("policy", null, 123, (byte) -1);
fail("Instantiated invalid ErasureCodingPolicy");
} catch (NullPointerException e) {
}
try {
new ErasureCodingPolicy("policy", SCHEMA_1, -1, (byte) -1);
fail("Instantiated invalid ErasureCodingPolicy");
} catch (IllegalArgumentException e) {
GenericTestUtils.assertExceptionContains("cellSize", e);
}
}
@Test
public void testEqualsAndHashCode() {
ErasureCodingPolicy[] policies = new ErasureCodingPolicy[]{
new ErasureCodingPolicy("one", SCHEMA_1, 321, (byte) 1),
new ErasureCodingPolicy("two", SCHEMA_1, 321, (byte) 1),
new ErasureCodingPolicy("one", SCHEMA_2, 321, (byte) 1),
new ErasureCodingPolicy("one", SCHEMA_1, 123, (byte) 1),
new ErasureCodingPolicy("one", SCHEMA_1, 321, (byte) 3),
};
for (int i = 0; i < policies.length; i++) {
final ErasureCodingPolicy ei = policies[i];
// Check identity
ErasureCodingPolicy temp = new ErasureCodingPolicy(ei.getName(), ei
.getSchema(), ei.getCellSize(), ei.getId());
assertEquals(ei, temp);
assertEquals(ei.hashCode(), temp.hashCode());
// Check against other policies
for (int j = 0; j < policies.length; j++) {
final ErasureCodingPolicy ej = policies[j];
if (i == j) {
assertEquals(ei, ej);
assertEquals(ei.hashCode(), ej.hashCode());
} else {
assertNotEquals(ei, ej);
assertNotEquals(ei, ej.hashCode());
}
}
}
}
}