HBASE-11463 (findbugs) HE: Class defines equals() and uses Object.hashCode()
This commit is contained in:
parent
80f1271ee5
commit
6da1a485fc
|
@ -157,6 +157,11 @@ public class MetricsRegionSourceImpl implements MetricsRegionSource {
|
|||
.compareTo(impl.regionWrapper.getRegionName());
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return this.regionWrapper.getRegionName().hashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == this) return true;
|
||||
|
|
|
@ -22,12 +22,13 @@ import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
|
|||
import org.junit.Test;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
public class TestMetricsRegionSourceImpl {
|
||||
|
||||
@Test
|
||||
public void testCompareTo() throws Exception {
|
||||
public void testCompareToHashCodeEquals() throws Exception {
|
||||
MetricsRegionServerSourceFactory fact = CompatibilitySingletonFactory.getInstance(MetricsRegionServerSourceFactory.class);
|
||||
|
||||
MetricsRegionSource one = fact.createRegion(new RegionWrapperStub("TEST"));
|
||||
|
@ -35,6 +36,8 @@ public class TestMetricsRegionSourceImpl {
|
|||
MetricsRegionSource two = fact.createRegion(new RegionWrapperStub("TWO"));
|
||||
|
||||
assertEquals(0, one.compareTo(oneClone));
|
||||
assertEquals(one.hashCode(), oneClone.hashCode());
|
||||
assertNotEquals(one, two);
|
||||
|
||||
assertTrue( one.compareTo(two) < 0);
|
||||
assertTrue( two.compareTo(one) > 0);
|
||||
|
|
|
@ -101,6 +101,11 @@ public class LruCachedBlock implements HeapSize, Comparable<LruCachedBlock> {
|
|||
return this.accessTime < that.accessTime ? 1 : -1;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return (int)(accessTime ^ (accessTime >>> 32));
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.mapred;
|
|||
import java.io.DataInput;
|
||||
import java.io.DataOutput;
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
|
@ -138,4 +139,13 @@ public class TableSplit implements InputSplit, Comparable<TableSplit> {
|
|||
Bytes.equals(m_endRow, other.m_endRow) &&
|
||||
m_regionLocation.equals(other.m_regionLocation);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = m_tableName != null ? m_tableName.hashCode() : 0;
|
||||
result = 31 * result + Arrays.hashCode(m_startRow);
|
||||
result = 31 * result + Arrays.hashCode(m_endRow);
|
||||
result = 31 * result + (m_regionLocation != null ? m_regionLocation.hashCode() : 0);
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -116,6 +116,11 @@ public class RegionPlan implements Comparable<RegionPlan> {
|
|||
return getRegionName().compareTo(o.getRegionName());
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return getRegionName().hashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
|
|
|
@ -50,6 +50,13 @@ class ServerAndLoad implements Comparable<ServerAndLoad>, Serializable {
|
|||
return diff != 0 ? diff : this.sn.compareTo(other.getServerName());
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = load;
|
||||
result = 31 * result + ((sn == null) ? 0 : sn.hashCode());
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (o instanceof ServerAndLoad) {
|
||||
|
|
|
@ -733,6 +733,11 @@ class MemStoreFlusher implements FlushRequester {
|
|||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return (int) getDelay(TimeUnit.MILLISECONDS);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
|
|
|
@ -19,9 +19,11 @@
|
|||
package org.apache.hadoop.hbase.security.token;
|
||||
|
||||
import javax.crypto.SecretKey;
|
||||
|
||||
import java.io.DataInput;
|
||||
import java.io.DataOutput;
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.io.Writable;
|
||||
|
@ -62,6 +64,14 @@ public class AuthenticationKey implements Writable {
|
|||
return secret;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = id;
|
||||
result = 31 * result + (int) (expirationDate ^ (expirationDate >>> 32));
|
||||
result = 31 * result + ((secret == null) ? 0 : Arrays.hashCode(secret.getEncoded()));
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null || !(obj instanceof AuthenticationKey)) {
|
||||
|
|
|
@ -0,0 +1,57 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase.io.hfile;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotEquals;
|
||||
|
||||
import org.apache.hadoop.hbase.SmallTests;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
import org.mockito.Mockito;
|
||||
|
||||
@Category(SmallTests.class)
|
||||
public class TestLruCachedBlock {
|
||||
|
||||
LruCachedBlock block;
|
||||
LruCachedBlock blockEqual;
|
||||
LruCachedBlock blockNotEqual;
|
||||
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
BlockCacheKey cacheKey = new BlockCacheKey("name", 0);
|
||||
BlockCacheKey otherKey = new BlockCacheKey("name2", 1);
|
||||
|
||||
Cacheable cacheable = Mockito.mock(Cacheable.class);
|
||||
Cacheable otheCacheable = Mockito.mock(Cacheable.class);
|
||||
|
||||
block = new LruCachedBlock(cacheKey, cacheable, 0);
|
||||
blockEqual = new LruCachedBlock(otherKey, otheCacheable, 0);
|
||||
blockNotEqual = new LruCachedBlock(cacheKey, cacheable, 1);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testEquality() {
|
||||
assertEquals(block.hashCode(), blockEqual.hashCode());
|
||||
assertNotEquals(block.hashCode(), blockNotEqual.hashCode());
|
||||
|
||||
assertEquals(block, blockEqual);
|
||||
assertNotEquals(block, blockNotEqual);
|
||||
}
|
||||
}
|
|
@ -18,7 +18,8 @@
|
|||
*/
|
||||
package org.apache.hadoop.hbase.mapred;
|
||||
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import org.apache.hadoop.hbase.SmallTests;
|
||||
|
@ -60,29 +61,31 @@ public class TestSplitTable {
|
|||
@Test
|
||||
@SuppressWarnings("deprecation")
|
||||
public void testSplitTableEquals() {
|
||||
assertFalse(new TableSplit(Bytes.toBytes("tableA"), Bytes.toBytes("aaa"),
|
||||
Bytes.toBytes("ddd"), "locationA").equals(new TableSplit(Bytes
|
||||
.toBytes("tableB"), Bytes.toBytes("aaa"), Bytes.toBytes("ddd"),
|
||||
"locationA")));
|
||||
byte[] tableA = Bytes.toBytes("tableA");
|
||||
byte[] aaa = Bytes.toBytes("aaa");
|
||||
byte[] ddd = Bytes.toBytes("ddd");
|
||||
String locationA = "locationA";
|
||||
|
||||
assertFalse(new TableSplit(Bytes.toBytes("tableA"), Bytes.toBytes("aaa"),
|
||||
Bytes.toBytes("ddd"), "locationA").equals(new TableSplit(Bytes
|
||||
.toBytes("tableA"), Bytes.toBytes("bbb"), Bytes.toBytes("ddd"),
|
||||
"locationA")));
|
||||
TableSplit tablesplit = new TableSplit(tableA, aaa, ddd, locationA);
|
||||
|
||||
assertFalse(new TableSplit(Bytes.toBytes("tableA"), Bytes.toBytes("aaa"),
|
||||
Bytes.toBytes("ddd"), "locationA").equals(new TableSplit(Bytes
|
||||
.toBytes("tableA"), Bytes.toBytes("aaa"), Bytes.toBytes("eee"),
|
||||
"locationA")));
|
||||
TableSplit tableB = new TableSplit(Bytes.toBytes("tableB"), aaa, ddd, locationA);
|
||||
assertNotEquals(tablesplit.hashCode(), tableB.hashCode());
|
||||
assertNotEquals(tablesplit, tableB);
|
||||
|
||||
assertFalse(new TableSplit(Bytes.toBytes("tableA"), Bytes.toBytes("aaa"),
|
||||
Bytes.toBytes("ddd"), "locationA").equals(new TableSplit(Bytes
|
||||
.toBytes("tableA"), Bytes.toBytes("aaa"), Bytes.toBytes("ddd"),
|
||||
"locationB")));
|
||||
TableSplit startBbb = new TableSplit(tableA, Bytes.toBytes("bbb"), ddd, locationA);
|
||||
assertNotEquals(tablesplit.hashCode(), startBbb.hashCode());
|
||||
assertNotEquals(tablesplit, startBbb);
|
||||
|
||||
assertTrue(new TableSplit(Bytes.toBytes("tableA"), Bytes.toBytes("aaa"),
|
||||
Bytes.toBytes("ddd"), "locationA").equals(new TableSplit(Bytes
|
||||
.toBytes("tableA"), Bytes.toBytes("aaa"), Bytes.toBytes("ddd"),
|
||||
"locationA")));
|
||||
TableSplit endEee = new TableSplit(tableA, aaa, Bytes.toBytes("eee"), locationA);
|
||||
assertNotEquals(tablesplit.hashCode(), endEee.hashCode());
|
||||
assertNotEquals(tablesplit, endEee);
|
||||
|
||||
TableSplit locationB = new TableSplit(tableA, aaa, ddd, "locationB");
|
||||
assertNotEquals(tablesplit.hashCode(), locationB.hashCode());
|
||||
assertNotEquals(tablesplit, locationB);
|
||||
|
||||
TableSplit same = new TableSplit(tableA, aaa, ddd, locationA);
|
||||
assertEquals(tablesplit.hashCode(), same.hashCode());
|
||||
assertEquals(tablesplit, same);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,52 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase.master;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotEquals;
|
||||
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.ServerName;
|
||||
import org.apache.hadoop.hbase.SmallTests;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category(SmallTests.class)
|
||||
public class TestRegionPlan {
|
||||
@Test
|
||||
public void test() {
|
||||
HRegionInfo hri = new HRegionInfo(TableName.valueOf("table"));
|
||||
ServerName source = ServerName.valueOf("source", 1234, 2345);
|
||||
ServerName dest = ServerName.valueOf("dest", 1234, 2345);
|
||||
|
||||
// Identiy equality
|
||||
RegionPlan plan = new RegionPlan(hri, source, dest);
|
||||
assertEquals(plan.hashCode(), new RegionPlan(hri, source, dest).hashCode());
|
||||
assertEquals(plan, new RegionPlan(hri, source, dest));
|
||||
|
||||
// Source and destination not used for equality
|
||||
assertEquals(plan.hashCode(), new RegionPlan(hri, dest, source).hashCode());
|
||||
assertEquals(plan, new RegionPlan(hri, dest, source));
|
||||
|
||||
// HRI is used for equality
|
||||
HRegionInfo other = new HRegionInfo(TableName.valueOf("other"));
|
||||
assertNotEquals(plan.hashCode(), new RegionPlan(other, source, dest).hashCode());
|
||||
assertNotEquals(plan, new RegionPlan(other, source, dest));
|
||||
}
|
||||
}
|
|
@ -0,0 +1,48 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase.master.balancer;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotEquals;
|
||||
|
||||
import org.apache.hadoop.hbase.ServerName;
|
||||
import org.apache.hadoop.hbase.SmallTests;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category(SmallTests.class)
|
||||
public class TestServerAndLoad {
|
||||
|
||||
@Test
|
||||
public void test() {
|
||||
ServerName server = ServerName.valueOf("host", 12345, 112244);
|
||||
int startcode = 12;
|
||||
|
||||
ServerAndLoad sal = new ServerAndLoad(server, startcode);
|
||||
assertEquals(sal.hashCode(), new ServerAndLoad(server, startcode).hashCode());
|
||||
assertEquals(sal, new ServerAndLoad(server, startcode));
|
||||
|
||||
assertNotEquals(sal.hashCode(), new ServerAndLoad(server, startcode + 1).hashCode());
|
||||
assertNotEquals(sal, new ServerAndLoad(server, startcode + 1));
|
||||
|
||||
ServerName other = ServerName.valueOf("other", 12345, 112244);
|
||||
assertNotEquals(sal.hashCode(), new ServerAndLoad(other, startcode).hashCode());
|
||||
assertNotEquals(sal, new ServerAndLoad(other, startcode));
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,47 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
|
||||
* agreements. See the NOTICE file distributed with this work for additional information regarding
|
||||
* copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance with the License. You may obtain a
|
||||
* copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable
|
||||
* law or agreed to in writing, software distributed under the License is distributed on an "AS IS"
|
||||
* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License
|
||||
* for the specific language governing permissions and limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase.regionserver;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import org.apache.hadoop.hbase.SmallTests;
|
||||
import org.apache.hadoop.hbase.regionserver.MemStoreFlusher.FlushRegionEntry;
|
||||
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
|
||||
import org.apache.hadoop.hbase.util.ManualEnvironmentEdge;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
import org.mockito.Mockito;
|
||||
|
||||
@Category(SmallTests.class)
|
||||
public class TestFlushRegionEntry {
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
ManualEnvironmentEdge edge = new ManualEnvironmentEdge();
|
||||
edge.setValue(12345);
|
||||
EnvironmentEdgeManager.injectEdge(edge);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void test() {
|
||||
FlushRegionEntry entry = new FlushRegionEntry(Mockito.mock(HRegion.class));
|
||||
FlushRegionEntry other = new FlushRegionEntry(Mockito.mock(HRegion.class));
|
||||
|
||||
assertEquals(entry.hashCode(), other.hashCode());
|
||||
assertEquals(entry, other);
|
||||
}
|
||||
|
||||
@After
|
||||
public void teardown() {
|
||||
EnvironmentEdgeManager.reset();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,60 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase.security.token;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotEquals;
|
||||
|
||||
import java.io.UnsupportedEncodingException;
|
||||
|
||||
import javax.crypto.SecretKey;
|
||||
|
||||
import org.apache.hadoop.hbase.SmallTests;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
import org.mockito.Mockito;
|
||||
|
||||
@Category(SmallTests.class)
|
||||
public class TestAuthenticationKey {
|
||||
|
||||
@Test
|
||||
public void test() throws UnsupportedEncodingException {
|
||||
SecretKey secret = Mockito.mock(SecretKey.class);
|
||||
Mockito.when(secret.getEncoded()).thenReturn("secret".getBytes("UTF-8"));
|
||||
|
||||
AuthenticationKey key = new AuthenticationKey(0, 1234, secret);
|
||||
assertEquals(key.hashCode(), new AuthenticationKey(0, 1234, secret).hashCode());
|
||||
assertEquals(key, new AuthenticationKey(0, 1234, secret));
|
||||
|
||||
AuthenticationKey otherID = new AuthenticationKey(1, 1234, secret);
|
||||
assertNotEquals(key.hashCode(), otherID.hashCode());
|
||||
assertNotEquals(key, otherID);
|
||||
|
||||
AuthenticationKey otherExpiry = new AuthenticationKey(0, 8765, secret);
|
||||
assertNotEquals(key.hashCode(), otherExpiry.hashCode());
|
||||
assertNotEquals(key, otherExpiry);
|
||||
|
||||
SecretKey other = Mockito.mock(SecretKey.class);
|
||||
Mockito.when(secret.getEncoded()).thenReturn("other".getBytes("UTF-8"));
|
||||
|
||||
AuthenticationKey otherSecret = new AuthenticationKey(0, 1234, other);
|
||||
assertNotEquals(key.hashCode(), otherSecret.hashCode());
|
||||
assertNotEquals(key, otherSecret);
|
||||
}
|
||||
|
||||
}
|
Loading…
Reference in New Issue