From 5792e670f2ef9854feeafef6c9dc5f8a70bc4ef9 Mon Sep 17 00:00:00 2001 From: Ben Manes Date: Mon, 22 Feb 2016 01:52:37 -0800 Subject: [PATCH] Avoid estimatedSize in trySplit tests --- .../caffeine/cache/BoundedLocalCache.java | 4 +- .../benmanes/caffeine/cache/AsMapTest.java | 92 +++++++++++++++---- .../java/jsr166/ConcurrentHashMap8Test.java | 6 +- 3 files changed, 78 insertions(+), 24 deletions(-) diff --git a/caffeine/src/main/java/com/github/benmanes/caffeine/cache/BoundedLocalCache.java b/caffeine/src/main/java/com/github/benmanes/caffeine/cache/BoundedLocalCache.java index 313aeb27e4..3f53835358 100644 --- a/caffeine/src/main/java/com/github/benmanes/caffeine/cache/BoundedLocalCache.java +++ b/caffeine/src/main/java/com/github/benmanes/caffeine/cache/BoundedLocalCache.java @@ -2549,7 +2549,7 @@ public void forEachRemaining(Consumer> action) { K key = node.getKey(); V value = node.getValue(); if ((key != null) && (value != null) && !cache.hasExpired(node, now) && node.isAlive()) { - action.accept(new WriteThroughEntry(cache, key, value)); + action.accept(new WriteThroughEntry<>(cache, key, value)); } }; spliterator.forEachRemaining(consumer); @@ -2564,7 +2564,7 @@ public boolean tryAdvance(Consumer> action) { K key = node.getKey(); V value = node.getValue(); if ((key != null) && (value != null) && !cache.hasExpired(node, now) && node.isAlive()) { - action.accept(new WriteThroughEntry(cache, key, value)); + action.accept(new WriteThroughEntry<>(cache, key, value)); advanced[0] = true; } }; diff --git a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/AsMapTest.java b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/AsMapTest.java index c4dd9f73f8..ae5ac3d515 100644 --- a/caffeine/src/test/java/com/github/benmanes/caffeine/cache/AsMapTest.java +++ b/caffeine/src/test/java/com/github/benmanes/caffeine/cache/AsMapTest.java @@ -1571,6 +1571,14 @@ public void keyIterator_writerFails(Map map, CacheContext cont } } + @CacheSpec + @CheckNoWriter @CheckNoStats + @Test(dataProvider = "caches", expectedExceptions = NullPointerException.class) + public void keySpliterator_forEachRemaining_null( + Map map, CacheContext context) { + map.keySet().spliterator().forEachRemaining(null); + } + @CacheSpec @CheckNoWriter @CheckNoStats @Test(dataProvider = "caches") @@ -1580,6 +1588,14 @@ public void keySpliterator_forEachRemaining(Map map, CacheCont assertThat(count[0], is(map.size())); } + @CacheSpec + @CheckNoWriter @CheckNoStats + @Test(dataProvider = "caches", expectedExceptions = NullPointerException.class) + public void keySpliterator_tryAdvance_null( + Map map, CacheContext context) { + map.keySet().spliterator().tryAdvance(null); + } + @CacheSpec @CheckNoWriter @CheckNoStats @Test(dataProvider = "caches") @@ -1593,19 +1609,21 @@ public void keySpliterator_tryAdvance(Map map, CacheContext co assertThat(count[0], is(map.size())); } - // FIXME: ConcurrentHashMap bug for SINGLETON and PARTIAL resulting in two empty spliterators + @CacheSpec @CheckNoWriter @CheckNoStats @Test(dataProvider = "caches") - @CacheSpec(population = {Population.EMPTY, Population.FULL}) public void keySpliterator_trySplit(Map map, CacheContext context) { Spliterator spliterator = map.keySet().spliterator(); Spliterator other = MoreObjects.firstNonNull( spliterator.trySplit(), Spliterators.emptySpliterator()); - int size = (int) (spliterator.estimateSize() + other.estimateSize()); - assertThat(size, is(map.size())); + + int[] count = new int[1]; + spliterator.forEachRemaining(key -> count[0]++); + other.forEachRemaining(key -> count[0]++); + assertThat(count[0], is(map.size())); } - @CacheSpec(population = Population.SINGLETON) + @CacheSpec @CheckNoWriter @CheckNoStats @Test(dataProvider = "caches") public void keySpliterator_estimateSize(Map map, CacheContext context) { @@ -1768,6 +1786,14 @@ public void valueIterator_writerFails(Map map, CacheContext co } } + @CacheSpec + @CheckNoWriter @CheckNoStats + @Test(dataProvider = "caches", expectedExceptions = NullPointerException.class) + public void valueSpliterator_forEachRemaining_null( + Map map, CacheContext context) { + map.values().spliterator().forEachRemaining(null); + } + @CacheSpec @CheckNoWriter @CheckNoStats @Test(dataProvider = "caches") @@ -1777,6 +1803,14 @@ public void valueSpliterator_forEachRemaining(Map map, CacheCo assertThat(count[0], is(map.size())); } + @CacheSpec + @CheckNoWriter @CheckNoStats + @Test(dataProvider = "caches", expectedExceptions = NullPointerException.class) + public void valueSpliterator_tryAdvance_null( + Map map, CacheContext context) { + map.values().spliterator().tryAdvance(null); + } + @CacheSpec @CheckNoWriter @CheckNoStats @Test(dataProvider = "caches") @@ -1790,19 +1824,21 @@ public void valueSpliterator_tryAdvance(Map map, CacheContext assertThat(count[0], is(map.size())); } - // FIXME: ConcurrentHashMap bug for SINGLETON and PARTIAL resulting in two empty spliterators + @CacheSpec @CheckNoWriter @CheckNoStats @Test(dataProvider = "caches") - @CacheSpec(population = {Population.EMPTY, Population.FULL}) public void valueSpliterator_trySplit(Map map, CacheContext context) { Spliterator spliterator = map.values().spliterator(); Spliterator other = MoreObjects.firstNonNull( spliterator.trySplit(), Spliterators.emptySpliterator()); - int size = (int) (spliterator.estimateSize() + other.estimateSize()); - assertThat(size, is(map.size())); + + int[] count = new int[1]; + spliterator.forEachRemaining(value -> count[0]++); + other.forEachRemaining(value -> count[0]++); + assertThat(count[0], is(map.size())); } - @CacheSpec(population = Population.SINGLETON) + @CacheSpec @CheckNoWriter @CheckNoStats @Test(dataProvider = "caches") public void valueSpliterator_estimateSize(Map map, CacheContext context) { @@ -1969,10 +2005,18 @@ public void entryIterator_writerFails(Map map, CacheContext co } } + @CacheSpec + @CheckNoWriter @CheckNoStats + @Test(dataProvider = "caches", expectedExceptions = NullPointerException.class) + public void entrySpliterator_forEachRemaining_null( + Map map, CacheContext context) { + map.entrySet().spliterator().forEachRemaining(null); + } + @CacheSpec @CheckNoWriter @CheckNoStats @Test(dataProvider = "caches") - public void entrySetSpliterator_forEachRemaining( + public void entrySpliterator_forEachRemaining( Map map, CacheContext context) { int[] count = new int[1]; map.entrySet().spliterator().forEachRemaining(entry -> { @@ -1984,10 +2028,18 @@ public void entrySetSpliterator_forEachRemaining( assertThat(count[0], is(map.size())); } + @CacheSpec + @CheckNoWriter @CheckNoStats + @Test(dataProvider = "caches", expectedExceptions = NullPointerException.class) + public void entrySpliterator_tryAdvance_null( + Map map, CacheContext context) { + map.entrySet().spliterator().tryAdvance(null); + } + @CacheSpec @CheckNoWriter @CheckNoStats @Test(dataProvider = "caches") - public void entrySetSpliterator_tryAdvance(Map map, CacheContext context) { + public void entrySpliterator_tryAdvance(Map map, CacheContext context) { Spliterator> spliterator = map.entrySet().spliterator(); int[] count = new int[1]; boolean advanced; @@ -2002,22 +2054,24 @@ public void entrySetSpliterator_tryAdvance(Map map, CacheConte assertThat(count[0], is(map.size())); } - // FIXME: ConcurrentHashMap bug for SINGLETON and PARTIAL resulting in two empty spliterators + @CacheSpec @CheckNoWriter @CheckNoStats @Test(dataProvider = "caches") - @CacheSpec(population = {Population.EMPTY, Population.FULL}) - public void entrySetSpliterator_trySplit(Map map, CacheContext context) { + public void entrySpliterator_trySplit(Map map, CacheContext context) { Spliterator> spliterator = map.entrySet().spliterator(); Spliterator> other = MoreObjects.firstNonNull( spliterator.trySplit(), Spliterators.emptySpliterator()); - int size = (int) (spliterator.estimateSize() + other.estimateSize()); - assertThat(size, is(map.size())); + + int[] count = new int[1]; + spliterator.forEachRemaining(entry -> count[0]++); + other.forEachRemaining(entry -> count[0]++); + assertThat(count[0], is(map.size())); } - @CacheSpec(population = Population.SINGLETON) + @CacheSpec @CheckNoWriter @CheckNoStats @Test(dataProvider = "caches") - public void entrySetSpliterator_estimateSize(Map map, CacheContext context) { + public void entrySpliterator_estimateSize(Map map, CacheContext context) { Spliterator> spliterator = map.entrySet().spliterator(); assertThat((int) spliterator.estimateSize(), is(map.size())); } diff --git a/guava/src/test/java/jsr166/ConcurrentHashMap8Test.java b/guava/src/test/java/jsr166/ConcurrentHashMap8Test.java index 682ec36eda..b7acdb1124 100644 --- a/guava/src/test/java/jsr166/ConcurrentHashMap8Test.java +++ b/guava/src/test/java/jsr166/ConcurrentHashMap8Test.java @@ -19,11 +19,11 @@ import java.util.concurrent.ConcurrentMap; import java.util.concurrent.atomic.LongAdder; +import com.github.benmanes.caffeine.cache.Caffeine; + import junit.framework.Test; import junit.framework.TestSuite; -import com.github.benmanes.caffeine.cache.Caffeine; - @SuppressWarnings({"rawtypes", "unchecked"}) public class ConcurrentHashMap8Test extends JSR166TestCase { public static void main(String[] args) { @@ -306,7 +306,7 @@ void checkSpliteratorCharacteristics(Spliterator sp, /** * KeySetView.spliterator returns spliterator over the elements in this set */ - public void disabled_testKeySetSpliterator() { + public void testKeySetSpliterator() { LongAdder adder = new LongAdder(); ConcurrentMap map = map5(); Set set = map.keySet();