Merge pull request #544 from metamx/java-8-fix-tests

Make tests pass with Java 8
This commit is contained in:
fjy 2014-05-19 18:11:52 -06:00
commit 1b6e816c28
4 changed files with 63 additions and 19 deletions

View File

@ -19,8 +19,11 @@
package io.druid.timeline; package io.druid.timeline;
import com.google.common.base.Function;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.common.collect.Ordering; import com.google.common.collect.Ordering;
import com.google.common.collect.Sets;
import com.metamx.common.Pair; import com.metamx.common.Pair;
import io.druid.timeline.partition.ImmutablePartitionHolder; import io.druid.timeline.partition.ImmutablePartitionHolder;
import io.druid.timeline.partition.IntegerPartitionChunk; import io.druid.timeline.partition.IntegerPartitionChunk;
@ -38,6 +41,7 @@ import org.junit.Test;
import java.util.Arrays; import java.util.Arrays;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Set;
/** /**
*/ */
@ -1104,13 +1108,13 @@ public class VersionedIntervalTimelineTest
add("2011-01-01/2011-01-20", "3", 5); add("2011-01-01/2011-01-20", "3", 5);
assertValues( assertValues(
Arrays.asList( Sets.newHashSet(
createExpected("2011-01-02/2011-01-08", "2", 3), createExpected("2011-01-02/2011-01-08", "2", 3),
createExpected("2011-01-10/2011-01-16", "2", 4), createExpected("2011-01-10/2011-01-16", "2", 4),
createExpected("2011-01-03/2011-01-06", "1", 1), createExpected("2011-01-03/2011-01-06", "1", 1),
createExpected("2011-01-09/2011-01-12", "1", 2) createExpected("2011-01-09/2011-01-12", "1", 2)
), ),
timeline.findOvershadowed() Sets.newHashSet(timeline.findOvershadowed())
); );
} }
@ -1128,12 +1132,12 @@ public class VersionedIntervalTimelineTest
add("2011-01-01/2011-01-10", "3", 4); add("2011-01-01/2011-01-10", "3", 4);
assertValues( assertValues(
Arrays.asList( Sets.newHashSet(
createExpected("2011-01-01/2011-01-05", "2", 1), createExpected("2011-01-01/2011-01-05", "2", 1),
createExpected("2011-01-05/2011-01-10", "2", 2), createExpected("2011-01-05/2011-01-10", "2", 2),
createExpected("2011-01-01/2011-01-10", "1", 3) createExpected("2011-01-01/2011-01-10", "1", 3)
), ),
timeline.findOvershadowed() Sets.newHashSet(timeline.findOvershadowed())
); );
} }
@ -1151,11 +1155,11 @@ public class VersionedIntervalTimelineTest
add("2011-01-01/2011-01-10", "3", 4); add("2011-01-01/2011-01-10", "3", 4);
assertValues( assertValues(
Arrays.asList( Sets.newHashSet(
createExpected("2011-01-03/2011-01-12", "1", 3), createExpected("2011-01-03/2011-01-12", "1", 3),
createExpected("2011-01-01/2011-01-05", "2", 1) createExpected("2011-01-01/2011-01-05", "2", 1)
), ),
timeline.findOvershadowed() Sets.newHashSet(timeline.findOvershadowed())
); );
} }
@ -1342,12 +1346,12 @@ public class VersionedIntervalTimelineTest
add("2011-04-01/2011-04-12", "2", 1); add("2011-04-01/2011-04-12", "2", 1);
assertValues( assertValues(
Arrays.asList( Sets.newHashSet(
createExpected("2011-04-01/2011-04-03", "1", 2), createExpected("2011-04-01/2011-04-03", "1", 2),
createExpected("2011-04-03/2011-04-06", "1", 3), createExpected("2011-04-03/2011-04-06", "1", 3),
createExpected("2011-04-09/2011-04-12", "1", 4) createExpected("2011-04-09/2011-04-12", "1", 4)
), ),
timeline.findOvershadowed() Sets.newHashSet(timeline.findOvershadowed())
); );
} }
@ -1444,11 +1448,11 @@ public class VersionedIntervalTimelineTest
add("2011-04-03/2011-04-06", "1", 3); add("2011-04-03/2011-04-06", "1", 3);
assertValues( assertValues(
Arrays.asList( Sets.newHashSet(
createExpected("2011-04-03/2011-04-06", "1", 3), createExpected("2011-04-03/2011-04-06", "1", 3),
createExpected("2011-04-09/2011-04-12", "1", 3) createExpected("2011-04-09/2011-04-12", "1", 3)
), ),
timeline.findOvershadowed() Sets.newHashSet(timeline.findOvershadowed())
); );
} }
@ -1462,11 +1466,11 @@ public class VersionedIntervalTimelineTest
add("2011-04-01/2011-04-09", "2", 3); add("2011-04-01/2011-04-09", "2", 3);
assertValues( assertValues(
Arrays.asList( Sets.newHashSet(
createExpected("2011-04-01/2011-04-09", "2", 3), createExpected("2011-04-01/2011-04-09", "2", 3),
createExpected("2011-04-01/2011-04-09", "1", 1) createExpected("2011-04-01/2011-04-09", "1", 1)
), ),
timeline.findOvershadowed() Sets.newHashSet(timeline.findOvershadowed())
); );
} }
@ -1481,11 +1485,11 @@ public class VersionedIntervalTimelineTest
add("2011-04-01/2011-04-09", "9", 4); add("2011-04-01/2011-04-09", "9", 4);
assertValues( assertValues(
Arrays.asList( Sets.newHashSet(
createExpected("2011-04-01/2011-04-09", "2", 3), createExpected("2011-04-01/2011-04-09", "2", 3),
createExpected("2011-04-01/2011-04-09", "1", 1) createExpected("2011-04-01/2011-04-09", "1", 1)
), ),
timeline.findOvershadowed() Sets.newHashSet(timeline.findOvershadowed())
); );
} }
@ -1559,6 +1563,33 @@ public class VersionedIntervalTimelineTest
} }
} }
private void assertValues(
Set<Pair<Interval, Pair<String, PartitionHolder<Integer>>>> expected,
Set<TimelineObjectHolder<String, Integer>> actual
)
{
Assert.assertEquals("Sizes did not match.", expected.size(), actual.size());
Set<Pair<Interval, Pair<String, PartitionHolder<Integer>>>> actualSet =
Sets.newHashSet(
Iterables.transform(
actual,
new Function<TimelineObjectHolder<String, Integer>, Pair<Interval, Pair<String, PartitionHolder<Integer>>>>()
{
@Override
public Pair<Interval, Pair<String, PartitionHolder<Integer>>> apply(
TimelineObjectHolder<String, Integer> input
)
{
return new Pair<>(input.getInterval(), new Pair<>(input.getVersion(), input.getObject()));
}
}
)
);
Assert.assertEquals(expected, actualSet);
}
private VersionedIntervalTimeline<String, Integer> makeStringIntegerTimeline() private VersionedIntervalTimeline<String, Integer> makeStringIntegerTimeline()
{ {
return new VersionedIntervalTimeline<String, Integer>(Ordering.<String>natural()); return new VersionedIntervalTimeline<String, Integer>(Ordering.<String>natural());

View File

@ -22,6 +22,7 @@ package io.druid.client.cache;
import com.metamx.common.logger.Logger; import com.metamx.common.logger.Logger;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.Collections;
import java.util.LinkedHashMap; import java.util.LinkedHashMap;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
@ -110,13 +111,13 @@ class ByteCountingLRUMap extends LinkedHashMap<ByteBuffer, byte[]>
} }
/** /**
* We want keySet().iterator().remove() to account for object removal * Don't allow key removal using the underlying keySet iterator
* The underlying Map calls this.remove(key) so we do not need to override this * All removal operations must use ByteCountingLRUMap.remove()
*/ */
@Override @Override
public Set<ByteBuffer> keySet() public Set<ByteBuffer> keySet()
{ {
return super.keySet(); return Collections.unmodifiableSet(super.keySet());
} }
@Override @Override

View File

@ -19,12 +19,14 @@
package io.druid.client.cache; package io.druid.client.cache;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps; import com.google.common.collect.Maps;
import com.google.common.primitives.Ints; import com.google.common.primitives.Ints;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.Collections; import java.util.Collections;
import java.util.Iterator; import java.util.Iterator;
import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicLong;
@ -121,6 +123,7 @@ public class MapCache implements Cache
} }
synchronized (clearLock) { synchronized (clearLock) {
Iterator<ByteBuffer> iter = baseMap.keySet().iterator(); Iterator<ByteBuffer> iter = baseMap.keySet().iterator();
List<ByteBuffer> toRemove = Lists.newLinkedList();
while (iter.hasNext()) { while (iter.hasNext()) {
ByteBuffer next = iter.next(); ByteBuffer next = iter.next();
@ -128,9 +131,12 @@ public class MapCache implements Cache
&& next.get(1) == idBytes[1] && next.get(1) == idBytes[1]
&& next.get(2) == idBytes[2] && next.get(2) == idBytes[2]
&& next.get(3) == idBytes[3]) { && next.get(3) == idBytes[3]) {
iter.remove(); toRemove.add(next);
} }
} }
for(ByteBuffer key : toRemove) {
baseMap.remove(key);
}
} }
} }

View File

@ -19,12 +19,14 @@
package io.druid.client.cache; package io.druid.client.cache;
import com.google.common.collect.Lists;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.Iterator; import java.util.Iterator;
import java.util.List;
/** /**
*/ */
@ -68,12 +70,16 @@ public class ByteCountingLRUMapTest
Assert.assertEquals(oneByte, ByteBuffer.wrap(map.get(twoByte))); Assert.assertEquals(oneByte, ByteBuffer.wrap(map.get(twoByte)));
Iterator<ByteBuffer> it = map.keySet().iterator(); Iterator<ByteBuffer> it = map.keySet().iterator();
List<ByteBuffer> toRemove = Lists.newLinkedList();
while(it.hasNext()) { while(it.hasNext()) {
ByteBuffer buf = it.next(); ByteBuffer buf = it.next();
if(buf.remaining() == 10) { if(buf.remaining() == 10) {
it.remove(); toRemove.add(buf);
} }
} }
for(ByteBuffer buf : toRemove) {
map.remove(buf);
}
assertMapValues(1, 3, 2); assertMapValues(1, 3, 2);
map.remove(twoByte); map.remove(twoByte);