Remove unused DoubleObjectPagedHashMap.

This commit is contained in:
Adrien Grand 2015-01-13 10:49:32 +01:00
parent be1610ba63
commit d583080f20
2 changed files with 0 additions and 258 deletions

View File

@ -1,199 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.util;
import com.google.common.collect.UnmodifiableIterator;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.lease.Releasables;
import java.util.Iterator;
import java.util.NoSuchElementException;
/**
* A hash table from native doubles to objects. This implementation resolves collisions
* using open-addressing and does not support null values. This class is not thread-safe.
*/
public class DoubleObjectPagedHashMap<T> extends AbstractPagedHashMap implements Iterable<DoubleObjectPagedHashMap.Cursor<T>> {
private DoubleArray keys;
private ObjectArray<T> values;
public DoubleObjectPagedHashMap(BigArrays bigArrays) {
this(16, bigArrays);
}
public DoubleObjectPagedHashMap(long capacity, BigArrays bigArrays) {
this(capacity, DEFAULT_MAX_LOAD_FACTOR, bigArrays);
}
public DoubleObjectPagedHashMap(long capacity, float maxLoadFactor, BigArrays bigArrays) {
super(capacity, maxLoadFactor, bigArrays);
keys = bigArrays.newDoubleArray(capacity(), false);
values = bigArrays.newObjectArray(capacity());
}
/**
* Get the value that is associated with <code>key</code> or null if <code>key</code>
* was not present in the hash table.
*/
public T get(double key) {
for (long i = slot(hash(key), mask); ; i = nextSlot(i, mask)) {
final T value = values.get(i);
if (value == null) {
return null;
} else if (keys.get(i) == key) {
return value;
}
}
}
/**
* Put this new (key, value) pair into this hash table and return the value
* that was previously associated with <code>key</code> or null in case of
* an insertion.
*/
public T put(double key, T value) {
if (size >= maxSize) {
assert size == maxSize;
grow();
}
assert size < maxSize;
return set(key, value);
}
/**
* Remove the entry which has this key in the hash table and return the
* associated value or null if there was no entry associated with this key.
*/
public T remove(double key) {
for (long i = slot(hash(key), mask); ; i = nextSlot(i, mask)) {
final T previous = values.set(i, null);
if (previous == null) {
return null;
} else if (keys.get(i) == key) {
--size;
for (long j = nextSlot(i, mask); used(j); j = nextSlot(j, mask)) {
removeAndAdd(j);
}
return previous;
} else {
// repair and continue
values.set(i, previous);
}
}
}
private T set(double key, T value) {
if (value == null) {
throw new IllegalArgumentException("Null values are not supported");
}
for (long i = slot(hash(key), mask); ; i = nextSlot(i, mask)) {
final T previous = values.set(i, value);
if (previous == null) {
// slot was free
keys.set(i, key);
++size;
return null;
} else if (key == keys.get(i)) {
// we just updated the value
return previous;
} else {
// not the right key, repair and continue
values.set(i, previous);
}
}
}
@Override
public Iterator<Cursor<T>> iterator() {
return new UnmodifiableIterator<Cursor<T>>() {
boolean cached;
final Cursor<T> cursor;
{
cursor = new Cursor<>();
cursor.index = -1;
cached = false;
}
@Override
public boolean hasNext() {
if (!cached) {
while (true) {
++cursor.index;
if (cursor.index >= capacity()) {
break;
} else if (used(cursor.index)) {
cursor.key = keys.get(cursor.index);
cursor.value = values.get(cursor.index);
break;
}
}
cached = true;
}
return cursor.index < capacity();
}
@Override
public Cursor<T> next() {
if (!hasNext()) {
throw new NoSuchElementException();
}
cached = false;
return cursor;
}
};
}
@Override
public void close() throws ElasticsearchException {
Releasables.close(keys, values);
}
@Override
protected void resize(long capacity) {
keys = bigArrays.resize(keys, capacity);
values = bigArrays.resize(values, capacity);
}
@Override
protected boolean used(long bucket) {
return values.get(bucket) != null;
}
@Override
protected void removeAndAdd(long index) {
final double key = keys.get(index);
final T value = values.set(index, null);
--size;
final T removed = set(key, value);
assert removed == null;
}
public static final class Cursor<T> {
public long index;
public double key;
public T value;
}
}

View File

@ -1,59 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.util;
import com.carrotsearch.hppc.DoubleObjectOpenHashMap;
import org.elasticsearch.test.ElasticsearchSingleNodeTest;
import org.junit.Test;
public class DoubleObjectHashMapTests extends ElasticsearchSingleNodeTest {
@Test
public void duel() {
final DoubleObjectOpenHashMap<Object> map1 = new DoubleObjectOpenHashMap<>();
final DoubleObjectPagedHashMap<Object> map2 = new DoubleObjectPagedHashMap<>(randomInt(42), 0.6f + randomFloat() * 0.39f, BigArraysTests.randombigArrays());
final int maxKey = randomIntBetween(1, 10000);
final int iters = scaledRandomIntBetween(10000, 100000);
for (int i = 0; i < iters; ++i) {
final boolean put = randomBoolean();
final int iters2 = randomIntBetween(1, 100);
for (int j = 0; j < iters2; ++j) {
final double key = randomInt(maxKey);
if (put) {
final Object value = new Object();
assertSame(map1.put(key, value), map2.put(key, value));
} else {
assertSame(map1.remove(key), map2.remove(key));
}
assertEquals(map1.size(), map2.size());
}
}
for (int i = 0; i <= maxKey; ++i) {
assertSame(map1.get(i), map2.get(i));
}
final DoubleObjectOpenHashMap<Object> copy = new DoubleObjectOpenHashMap<>();
for (DoubleObjectPagedHashMap.Cursor<Object> cursor : map2) {
copy.put(cursor.key, cursor.value);
}
map2.close();
assertEquals(map1, copy);
}
}