@Test(dataProvider = "StreamTestData<Integer>", dataProviderClass = StreamTestDataProvider.class) public void testSimpleGroupBy(String name, TestData.OfRef<Integer> data) throws ReflectiveOperationException { Function<Integer, Integer> classifier = i -> i % 3; // Single-level groupBy exerciseMapTabulation(data, groupingBy(classifier), new GroupedMapAssertion<>(classifier, HashMap.class, new ListAssertion<>())); exerciseMapTabulation(data, groupingByConcurrent(classifier), new GroupedMapAssertion<>(classifier, ConcurrentHashMap.class, new ListAssertion<>())); // With explicit constructors exerciseMapTabulation(data, groupingBy(classifier, TreeMap::new, toCollection(HashSet::new)), new GroupedMapAssertion<>(classifier, TreeMap.class, new CollectionAssertion<Integer>(HashSet.class, false))); exerciseMapTabulation(data, groupingByConcurrent(classifier, ConcurrentSkipListMap::new, toCollection(HashSet::new)), new GroupedMapAssertion<>(classifier, ConcurrentSkipListMap.class, new CollectionAssertion<Integer>(HashSet.class, false))); }
@Test(dataProvider = "StreamTestData<Integer>", dataProviderClass = StreamTestDataProvider.class) public void testTwoLevelPartition(String name, TestData.OfRef<Integer> data) throws ReflectiveOperationException { Predicate<Integer> classifier = i -> i % 3 == 0; Predicate<Integer> classifier2 = i -> i % 7 == 0; // Two level partition exerciseMapTabulation(data, partitioningBy(classifier, partitioningBy(classifier2)), new PartitionAssertion<>(classifier, new PartitionAssertion(classifier2, new ListAssertion<>()))); // Two level partition with reduce exerciseMapTabulation(data, partitioningBy(classifier, reducing(0, Integer::sum)), new PartitionAssertion<>(classifier, new ReduceAssertion<>(0, LambdaTestHelpers.identity(), Integer::sum))); }
@Test(dataProvider = "StreamTestData<Integer>", dataProviderClass = StreamTestDataProvider.class) public void testOps(String name, TestData.OfRef<Integer> data) { // @@@ More things to test here: // - Every value in data is present in right bucket // - Total number of values equals size of data for (MapperData<Integer, ?> md : getMapperData(data)) { Collector<Integer, ?, Map<Object, List<Integer>>> tab = Collectors.groupingBy(md.m); Map<Object, List<Integer>> result = withData(data) .terminal(s -> s, s -> s.collect(tab)) .resultAsserter((act, exp, ord, par) -> { if (par & !ord) { GroupByOpTest.assertMultiMapEquals(act, exp); } else { GroupByOpTest.assertObjectEquals(act, exp); } }) .exercise(); assertEquals(result.keySet().size(), md.expectedSize); } }
@Test(dataProvider = "StreamTestData<Integer>", dataProviderClass = StreamTestDataProvider.class, groups = { "serialization-hostile" }) public void testSkipOps(String name, TestData.OfRef<Integer> data) { List<Integer> skips = sizes(data.size()); for (int s : skips) { setContext("skip", s); testSliceMulti(data, sliceSize(data.size(), s), st -> st.skip(s), st -> st.skip(s), st -> st.skip(s), st -> st.skip(s)); testSliceMulti(data, sliceSize(sliceSize(data.size(), s), s/2), st -> st.skip(s).skip(s / 2), st -> st.skip(s).skip(s / 2), st -> st.skip(s).skip(s / 2), st -> st.skip(s).skip(s / 2)); } }
@Test(dataProvider = "StreamTestData<Integer>", dataProviderClass = StreamTestDataProvider.class, groups = { "serialization-hostile" }) public void testSkipLimitOps(String name, TestData.OfRef<Integer> data) { List<Integer> skips = sizes(data.size()); List<Integer> limits = skips; for (int s : skips) { setContext("skip", s); for (int l : limits) { setContext("limit", l); testSliceMulti(data, sliceSize(sliceSize(data.size(), s), 0, l), st -> st.skip(s).limit(l), st -> st.skip(s).limit(l), st -> st.skip(s).limit(l), st -> st.skip(s).limit(l)); } } }
@Test(dataProvider = "StreamTestData<Integer>", dataProviderClass = StreamTestDataProvider.class) public void testSimpleGroupingBy(String name, TestData.OfRef<Integer> data) throws ReflectiveOperationException { Function<Integer, Integer> classifier = i -> i % 3; // Single-level groupBy exerciseMapCollection(data, groupingBy(classifier), new GroupingByAssertion<>(classifier, HashMap.class, new ToListAssertion<>())); exerciseMapCollection(data, groupingByConcurrent(classifier), new GroupingByAssertion<>(classifier, ConcurrentHashMap.class, new ToListAssertion<>())); // With explicit constructors exerciseMapCollection(data, groupingBy(classifier, TreeMap::new, toCollection(HashSet::new)), new GroupingByAssertion<>(classifier, TreeMap.class, new ToCollectionAssertion<Integer>(HashSet.class, false))); exerciseMapCollection(data, groupingByConcurrent(classifier, ConcurrentSkipListMap::new, toCollection(HashSet::new)), new GroupingByAssertion<>(classifier, ConcurrentSkipListMap.class, new ToCollectionAssertion<Integer>(HashSet.class, false))); }
@Test(dataProvider = "StreamTestData<Integer>", dataProviderClass = StreamTestDataProvider.class) public void testGroupingByWithFlatMapping(String name, TestData.OfRef<Integer> data) throws ReflectiveOperationException { Function<Integer, Integer> classifier = i -> i % 3; Function<Integer, Stream<Integer>> flatMapperByNull = i -> null; Function<Integer, Stream<Integer>> flatMapperBy0 = i -> Stream.empty(); Function<Integer, Stream<Integer>> flatMapperBy2 = i -> Stream.of(i, i); exerciseMapCollection(data, groupingBy(classifier, flatMapping(flatMapperByNull, toList())), new GroupingByAssertion<>(classifier, HashMap.class, new FlatMappingAssertion<>(flatMapperBy0, new ToListAssertion<>()))); exerciseMapCollection(data, groupingBy(classifier, flatMapping(flatMapperBy0, toList())), new GroupingByAssertion<>(classifier, HashMap.class, new FlatMappingAssertion<>(flatMapperBy0, new ToListAssertion<>()))); exerciseMapCollection(data, groupingBy(classifier, flatMapping(flatMapperBy2, toList())), new GroupingByAssertion<>(classifier, HashMap.class, new FlatMappingAssertion<>(flatMapperBy2, new ToListAssertion<>()))); }
@Test(dataProvider = "StreamTestData<Integer>", dataProviderClass = StreamTestDataProvider.class) public void testTwoLevelPartitioningBy(String name, TestData.OfRef<Integer> data) throws ReflectiveOperationException { Predicate<Integer> classifier = i -> i % 3 == 0; Predicate<Integer> classifier2 = i -> i % 7 == 0; // Two level partition exerciseMapCollection(data, partitioningBy(classifier, partitioningBy(classifier2)), new PartitioningByAssertion<>(classifier, new PartitioningByAssertion(classifier2, new ToListAssertion<>()))); // Two level partition with reduce exerciseMapCollection(data, partitioningBy(classifier, reducing(0, Integer::sum)), new PartitioningByAssertion<>(classifier, new ReducingAssertion<>(0, LambdaTestHelpers.identity(), Integer::sum))); }
@Test(dataProvider = "StreamTestData<Integer>", dataProviderClass = StreamTestDataProvider.class, groups = { "serialization-hostile" }) public void testTakeWhileOps(String name, TestData.OfRef<Integer> data) { for (int size : sizes(data.size())) { setContext("takeWhile", size); testWhileMulti(data, whileResultAsserter(data, WhileOp.Take, e -> e < size), s -> s.takeWhile(e -> e < size), s -> s.takeWhile(e -> e < size), s -> s.takeWhile(e -> e < size), s -> s.takeWhile(e -> e < size)); testWhileMulti(data, whileResultAsserter(data, WhileOp.Take, e -> e < size / 2), s -> s.takeWhile(e -> e < size).takeWhile(e -> e < size / 2), s -> s.takeWhile(e -> e < size).takeWhile(e -> e < size / 2), s -> s.takeWhile(e -> e < size).takeWhile(e -> e < size / 2), s -> s.takeWhile(e -> e < size).takeWhile(e -> e < size / 2)); } }
@Test(dataProvider = "StreamTestData<Integer>", dataProviderClass = StreamTestDataProvider.class, groups = { "serialization-hostile" }) public void testDropWhileOps(String name, TestData.OfRef<Integer> data) { for (int size : sizes(data.size())) { setContext("dropWhile", size); testWhileMulti(data, whileResultAsserter(data, WhileOp.Drop, e -> e < size), s -> s.dropWhile(e -> e < size), s -> s.dropWhile(e -> e < size), s -> s.dropWhile(e -> e < size), s -> s.dropWhile(e -> e < size)); testWhileMulti(data, whileResultAsserter(data, WhileOp.Drop, e -> e < size), s -> s.dropWhile(e -> e < size / 2).dropWhile(e -> e < size), s -> s.dropWhile(e -> e < size / 2).dropWhile(e -> e < size), s -> s.dropWhile(e -> e < size / 2).dropWhile(e -> e < size), s -> s.dropWhile(e -> e < size / 2).dropWhile(e -> e < size)); } }