Skip to content

Commit

Permalink
Avoid estimatedSize in trySplit tests
Browse files Browse the repository at this point in the history
  • Loading branch information
ben-manes committed Feb 22, 2016
1 parent da23464 commit 5792e67
Show file tree
Hide file tree
Showing 3 changed files with 78 additions and 24 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -2549,7 +2549,7 @@ public void forEachRemaining(Consumer<? super Entry<K, V>> action) {
K key = node.getKey();
V value = node.getValue();
if ((key != null) && (value != null) && !cache.hasExpired(node, now) && node.isAlive()) {
action.accept(new WriteThroughEntry<K, V>(cache, key, value));
action.accept(new WriteThroughEntry<>(cache, key, value));
}
};
spliterator.forEachRemaining(consumer);
Expand All @@ -2564,7 +2564,7 @@ public boolean tryAdvance(Consumer<? super Entry<K, V>> action) {
K key = node.getKey();
V value = node.getValue();
if ((key != null) && (value != null) && !cache.hasExpired(node, now) && node.isAlive()) {
action.accept(new WriteThroughEntry<K, V>(cache, key, value));
action.accept(new WriteThroughEntry<>(cache, key, value));
advanced[0] = true;
}
};
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1571,6 +1571,14 @@ public void keyIterator_writerFails(Map<Integer, Integer> map, CacheContext cont
}
}

@CacheSpec
@CheckNoWriter @CheckNoStats
@Test(dataProvider = "caches", expectedExceptions = NullPointerException.class)
public void keySpliterator_forEachRemaining_null(
Map<Integer, Integer> map, CacheContext context) {
map.keySet().spliterator().forEachRemaining(null);
}

@CacheSpec
@CheckNoWriter @CheckNoStats
@Test(dataProvider = "caches")
Expand All @@ -1580,6 +1588,14 @@ public void keySpliterator_forEachRemaining(Map<Integer, Integer> map, CacheCont
assertThat(count[0], is(map.size()));
}

@CacheSpec
@CheckNoWriter @CheckNoStats
@Test(dataProvider = "caches", expectedExceptions = NullPointerException.class)
public void keySpliterator_tryAdvance_null(
Map<Integer, Integer> map, CacheContext context) {
map.keySet().spliterator().tryAdvance(null);
}

@CacheSpec
@CheckNoWriter @CheckNoStats
@Test(dataProvider = "caches")
Expand All @@ -1593,19 +1609,21 @@ public void keySpliterator_tryAdvance(Map<Integer, Integer> map, CacheContext co
assertThat(count[0], is(map.size()));
}

// FIXME: ConcurrentHashMap bug for SINGLETON and PARTIAL resulting in two empty spliterators
@CacheSpec
@CheckNoWriter @CheckNoStats
@Test(dataProvider = "caches")
@CacheSpec(population = {Population.EMPTY, Population.FULL})
public void keySpliterator_trySplit(Map<Integer, Integer> map, CacheContext context) {
Spliterator<Integer> spliterator = map.keySet().spliterator();
Spliterator<Integer> other = MoreObjects.firstNonNull(
spliterator.trySplit(), Spliterators.emptySpliterator());
int size = (int) (spliterator.estimateSize() + other.estimateSize());
assertThat(size, is(map.size()));

int[] count = new int[1];
spliterator.forEachRemaining(key -> count[0]++);
other.forEachRemaining(key -> count[0]++);
assertThat(count[0], is(map.size()));
}

@CacheSpec(population = Population.SINGLETON)
@CacheSpec
@CheckNoWriter @CheckNoStats
@Test(dataProvider = "caches")
public void keySpliterator_estimateSize(Map<Integer, Integer> map, CacheContext context) {
Expand Down Expand Up @@ -1768,6 +1786,14 @@ public void valueIterator_writerFails(Map<Integer, Integer> map, CacheContext co
}
}

@CacheSpec
@CheckNoWriter @CheckNoStats
@Test(dataProvider = "caches", expectedExceptions = NullPointerException.class)
public void valueSpliterator_forEachRemaining_null(
Map<Integer, Integer> map, CacheContext context) {
map.values().spliterator().forEachRemaining(null);
}

@CacheSpec
@CheckNoWriter @CheckNoStats
@Test(dataProvider = "caches")
Expand All @@ -1777,6 +1803,14 @@ public void valueSpliterator_forEachRemaining(Map<Integer, Integer> map, CacheCo
assertThat(count[0], is(map.size()));
}

@CacheSpec
@CheckNoWriter @CheckNoStats
@Test(dataProvider = "caches", expectedExceptions = NullPointerException.class)
public void valueSpliterator_tryAdvance_null(
Map<Integer, Integer> map, CacheContext context) {
map.values().spliterator().tryAdvance(null);
}

@CacheSpec
@CheckNoWriter @CheckNoStats
@Test(dataProvider = "caches")
Expand All @@ -1790,19 +1824,21 @@ public void valueSpliterator_tryAdvance(Map<Integer, Integer> map, CacheContext
assertThat(count[0], is(map.size()));
}

// FIXME: ConcurrentHashMap bug for SINGLETON and PARTIAL resulting in two empty spliterators
@CacheSpec
@CheckNoWriter @CheckNoStats
@Test(dataProvider = "caches")
@CacheSpec(population = {Population.EMPTY, Population.FULL})
public void valueSpliterator_trySplit(Map<Integer, Integer> map, CacheContext context) {
Spliterator<Integer> spliterator = map.values().spliterator();
Spliterator<Integer> other = MoreObjects.firstNonNull(
spliterator.trySplit(), Spliterators.emptySpliterator());
int size = (int) (spliterator.estimateSize() + other.estimateSize());
assertThat(size, is(map.size()));

int[] count = new int[1];
spliterator.forEachRemaining(value -> count[0]++);
other.forEachRemaining(value -> count[0]++);
assertThat(count[0], is(map.size()));
}

@CacheSpec(population = Population.SINGLETON)
@CacheSpec
@CheckNoWriter @CheckNoStats
@Test(dataProvider = "caches")
public void valueSpliterator_estimateSize(Map<Integer, Integer> map, CacheContext context) {
Expand Down Expand Up @@ -1969,10 +2005,18 @@ public void entryIterator_writerFails(Map<Integer, Integer> map, CacheContext co
}
}

@CacheSpec
@CheckNoWriter @CheckNoStats
@Test(dataProvider = "caches", expectedExceptions = NullPointerException.class)
public void entrySpliterator_forEachRemaining_null(
Map<Integer, Integer> map, CacheContext context) {
map.entrySet().spliterator().forEachRemaining(null);
}

@CacheSpec
@CheckNoWriter @CheckNoStats
@Test(dataProvider = "caches")
public void entrySetSpliterator_forEachRemaining(
public void entrySpliterator_forEachRemaining(
Map<Integer, Integer> map, CacheContext context) {
int[] count = new int[1];
map.entrySet().spliterator().forEachRemaining(entry -> {
Expand All @@ -1984,10 +2028,18 @@ public void entrySetSpliterator_forEachRemaining(
assertThat(count[0], is(map.size()));
}

@CacheSpec
@CheckNoWriter @CheckNoStats
@Test(dataProvider = "caches", expectedExceptions = NullPointerException.class)
public void entrySpliterator_tryAdvance_null(
Map<Integer, Integer> map, CacheContext context) {
map.entrySet().spliterator().tryAdvance(null);
}

@CacheSpec
@CheckNoWriter @CheckNoStats
@Test(dataProvider = "caches")
public void entrySetSpliterator_tryAdvance(Map<Integer, Integer> map, CacheContext context) {
public void entrySpliterator_tryAdvance(Map<Integer, Integer> map, CacheContext context) {
Spliterator<Entry<Integer, Integer>> spliterator = map.entrySet().spliterator();
int[] count = new int[1];
boolean advanced;
Expand All @@ -2002,22 +2054,24 @@ public void entrySetSpliterator_tryAdvance(Map<Integer, Integer> map, CacheConte
assertThat(count[0], is(map.size()));
}

// FIXME: ConcurrentHashMap bug for SINGLETON and PARTIAL resulting in two empty spliterators
@CacheSpec
@CheckNoWriter @CheckNoStats
@Test(dataProvider = "caches")
@CacheSpec(population = {Population.EMPTY, Population.FULL})
public void entrySetSpliterator_trySplit(Map<Integer, Integer> map, CacheContext context) {
public void entrySpliterator_trySplit(Map<Integer, Integer> map, CacheContext context) {
Spliterator<Entry<Integer, Integer>> spliterator = map.entrySet().spliterator();
Spliterator<Entry<Integer, Integer>> other = MoreObjects.firstNonNull(
spliterator.trySplit(), Spliterators.emptySpliterator());
int size = (int) (spliterator.estimateSize() + other.estimateSize());
assertThat(size, is(map.size()));

int[] count = new int[1];
spliterator.forEachRemaining(entry -> count[0]++);
other.forEachRemaining(entry -> count[0]++);
assertThat(count[0], is(map.size()));
}

@CacheSpec(population = Population.SINGLETON)
@CacheSpec
@CheckNoWriter @CheckNoStats
@Test(dataProvider = "caches")
public void entrySetSpliterator_estimateSize(Map<Integer, Integer> map, CacheContext context) {
public void entrySpliterator_estimateSize(Map<Integer, Integer> map, CacheContext context) {
Spliterator<Entry<Integer, Integer>> spliterator = map.entrySet().spliterator();
assertThat((int) spliterator.estimateSize(), is(map.size()));
}
Expand Down
6 changes: 3 additions & 3 deletions guava/src/test/java/jsr166/ConcurrentHashMap8Test.java
Original file line number Diff line number Diff line change
Expand Up @@ -19,11 +19,11 @@
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.LongAdder;

import com.github.benmanes.caffeine.cache.Caffeine;

import junit.framework.Test;
import junit.framework.TestSuite;

import com.github.benmanes.caffeine.cache.Caffeine;

@SuppressWarnings({"rawtypes", "unchecked"})
public class ConcurrentHashMap8Test extends JSR166TestCase {
public static void main(String[] args) {
Expand Down Expand Up @@ -306,7 +306,7 @@ void checkSpliteratorCharacteristics(Spliterator<?> sp,
/**
* KeySetView.spliterator returns spliterator over the elements in this set
*/
public void disabled_testKeySetSpliterator() {
public void testKeySetSpliterator() {
LongAdder adder = new LongAdder();
ConcurrentMap map = map5();
Set set = map.keySet();
Expand Down

0 comments on commit 5792e67

Please sign in to comment.