import
java
.
util
.
AbstractMap
;
import
java
.
util
.
AbstractSet
;
import
java
.
util
.
ArrayList
;
import
java
.
util
.
Arrays
;
import
java
.
util
.
Collection
;
import
java
.
util
.
Collections
;
import
java
.
util
.
Comparator
;
import
java
.
util
.
DoubleSummaryStatistics
;
import
java
.
util
.
EnumSet
;
import
java
.
util
.
HashMap
;
import
java
.
util
.
HashSet
;
import
java
.
util
.
IntSummaryStatistics
;
import
java
.
util
.
Iterator
;
import
java
.
util
.
List
;
import
java
.
util
.
LongSummaryStatistics
;
import
java
.
util
.
Map
;
import
java
.
util
.
Objects
;
import
java
.
util
.
Optional
;
import
java
.
util
.
Set
;
import
java
.
util
.
StringJoiner
;
import
java
.
util
.
concurrent
.
ConcurrentHashMap
;
import
java
.
util
.
concurrent
.
ConcurrentMap
;
import
java
.
util
.
function
.
BiConsumer
;
import
java
.
util
.
function
.
BiFunction
;
import
java
.
util
.
function
.
BinaryOperator
;
import
java
.
util
.
function
.
Consumer
;
import
java
.
util
.
function
.
Function
;
import
java
.
util
.
function
.
Predicate
;
import
java
.
util
.
function
.
Supplier
;
import
java
.
util
.
function
.
ToDoubleFunction
;
import
java
.
util
.
function
.
ToIntFunction
;
import
java
.
util
.
function
.
ToLongFunction
;
* Implementations of {@link Collector} that implement various useful reduction
* operations, such as accumulating elements into collections, summarizing
* elements according to various criteria, etc.
* <p>The following are examples of using the predefined collectors to perform
* common mutable reduction tasks:
* // Accumulate names into a List
* List<String> list = people.stream().map(Person::getName).collect(Collectors.toList());
* // Accumulate names into a TreeSet
* Set<String> set = people.stream().map(Person::getName).collect(Collectors.toCollection(TreeSet::new));
* // Convert elements to strings and concatenate them, separated by commas
* String joined = things.stream()
* .collect(Collectors.joining(", "));
* // Compute sum of salaries of employee
* int total = employees.stream()
* .collect(Collectors.summingInt(Employee::getSalary)));
* // Group employees by department
* Map<Department, List<Employee>> byDept
* .collect(Collectors.groupingBy(Employee::getDepartment));
* // Compute sum of salaries by department
* Map<Department, Integer> totalByDept
* .collect(Collectors.groupingBy(Employee::getDepartment,
* Collectors.summingInt(Employee::getSalary)));
* // Partition students into passing and failing
* Map<Boolean, List<Student>> passingFailing =
* .collect(Collectors.partitioningBy(s -> s.getGrade() >= PASS_THRESHOLD));
public
final
class
Collectors
{
static
final
Set
<
Collector
.
Characteristics
>
CH_CONCURRENT_ID
=
Collections
.
unmodifiableSet
(
EnumSet
.
of
(
Collector
.
Characteristics
.
CONCURRENT
,
Collector
.
Characteristics
.
UNORDERED
,
Collector
.
Characteristics
.
IDENTITY_FINISH
));
static
final
Set
<
Collector
.
Characteristics
>
CH_CONCURRENT_NOID
=
Collections
.
unmodifiableSet
(
EnumSet
.
of
(
Collector
.
Characteristics
.
CONCURRENT
,
Collector
.
Characteristics
.
UNORDERED
));
static
final
Set
<
Collector
.
Characteristics
>
CH_ID
=
Collections
.
unmodifiableSet
(
EnumSet
.
of
(
Collector
.
Characteristics
.
IDENTITY_FINISH
));
static
final
Set
<
Collector
.
Characteristics
>
CH_UNORDERED_ID
=
Collections
.
unmodifiableSet
(
EnumSet
.
of
(
Collector
.
Characteristics
.
UNORDERED
,
Collector
.
Characteristics
.
IDENTITY_FINISH
));
static
final
Set
<
Collector
.
Characteristics
>
CH_NOID
=
Collections
.
emptySet
();
private
Collectors
() { }
* Returns a merge function, suitable for use in
* {@link Map#merge(Object, Object, BiFunction) Map.merge()} or
* {@link #toMap(Function, Function, BinaryOperator) toMap()}, which always
* throws {@code IllegalStateException}. This can be used to enforce the
* assumption that the elements being collected are distinct.
* @param <T> the type of input arguments to the merge function
* @return a merge function which always throw {@code IllegalStateException}
private
static
<
T
>
BinaryOperator
<
T
>
throwingMerger
() {
return
(
u
,
v
) -> {
throw
new
IllegalStateException
(
String
.
format
(
"Duplicate key %s"
,
u
)); };
@
SuppressWarnings
(
"unchecked"
)
private
static
<
I
,
R
>
Function
<
I
,
R
>
castingIdentity
() {
* Simple implementation class for {@code Collector}.
* @param <T> the type of elements to be collected
* @param <R> the type of the result
static
class
CollectorImpl
<
T
,
A
,
R
>
implements
Collector
<
T
,
A
,
R
> {
private
final
Supplier
<
A
>
supplier
;
private
final
BiConsumer
<
A
,
T
>
accumulator
;
private
final
BinaryOperator
<
A
>
combiner
;
private
final
Function
<
A
,
R
>
finisher
;
private
final
Set
<
Characteristics
>
characteristics
;
CollectorImpl
(
Supplier
<
A
>
supplier
,
BiConsumer
<
A
,
T
>
accumulator
,
BinaryOperator
<
A
>
combiner
,
Function
<
A
,
R
>
finisher
,
Set
<
Characteristics
>
characteristics
) {
this
.
supplier
=
supplier
;
this
.
accumulator
=
accumulator
;
this
.
combiner
=
combiner
;
this
.
finisher
=
finisher
;
this
.
characteristics
=
characteristics
;
CollectorImpl
(
Supplier
<
A
>
supplier
,
BiConsumer
<
A
,
T
>
accumulator
,
BinaryOperator
<
A
>
combiner
,
Set
<
Characteristics
>
characteristics
) {
this
(
supplier
,
accumulator
,
combiner
,
castingIdentity
(),
characteristics
);
public
BiConsumer
<
A
,
T
>
accumulator
() {
public
Supplier
<
A
>
supplier
() {
public
BinaryOperator
<
A
>
combiner
() {
public
Function
<
A
,
R
>
finisher
() {
public
Set
<
Characteristics
>
characteristics
() {
* Returns a {@code Collector} that accumulates the input elements into a
* new {@code Collection}, in encounter order. The {@code Collection} is
* created by the provided factory.
* @param <T> the type of the input elements
* @param <C> the type of the resulting {@code Collection}
* @param collectionFactory a {@code Supplier} which returns a new, empty
* {@code Collection} of the appropriate type
* @return a {@code Collector} which collects all the input elements into a
* {@code Collection}, in encounter order
public
static
<
T
,
C
extends
Collection
<
T
>>
Collector
<
T
, ?,
C
>
toCollection
(
Supplier
<
C
>
collectionFactory
) {
return
new
CollectorImpl
<>(
collectionFactory
,
Collection
<
T
>::
add
,
(
r1
,
r2
) -> {
r1
.
addAll
(
r2
);
return
r1
; },
* Returns a {@code Collector} that accumulates the input elements into a
* new {@code List}. There are no guarantees on the type, mutability,
* serializability, or thread-safety of the {@code List} returned; if more
* control over the returned {@code List} is required, use {@link #toCollection(Supplier)}.
* @param <T> the type of the input elements
* @return a {@code Collector} which collects all the input elements into a
* {@code List}, in encounter order
Collector
<
T
, ?,
List
<
T
>>
toList
() {
return
new
CollectorImpl
<>((
Supplier
<
List
<
T
>>)
ArrayList
::
new
,
List
::
add
,
(
left
,
right
) -> {
left
.
addAll
(
right
);
return
left
; },
* Returns a {@code Collector} that accumulates the input elements into a
* new {@code Set}. There are no guarantees on the type, mutability,
* serializability, or thread-safety of the {@code Set} returned; if more
* control over the returned {@code Set} is required, use
* {@link #toCollection(Supplier)}.
* <p>This is an {@link Collector.Characteristics#UNORDERED unordered}
* @param <T> the type of the input elements
* @return a {@code Collector} which collects all the input elements into a
Collector
<
T
, ?,
Set
<
T
>>
toSet
() {
return
new
CollectorImpl
<>((
Supplier
<
Set
<
T
>>)
HashSet
::
new
,
Set
::
add
,
(
left
,
right
) -> {
left
.
addAll
(
right
);
return
left
; },
* Returns a {@code Collector} that concatenates the input elements into a
* {@code String}, in encounter order.
* @return a {@code Collector} that concatenates the input elements into a
* {@code String}, in encounter order
public
static
Collector
<
CharSequence
, ?,
String
>
joining
() {
return
new
CollectorImpl
<
CharSequence
,
StringBuilder
,
String
>(
StringBuilder
::
new
,
StringBuilder
::
append
,
(
r1
,
r2
) -> {
r1
.
append
(
r2
);
return
r1
; },
StringBuilder
::
toString
,
CH_NOID
);
* Returns a {@code Collector} that concatenates the input elements,
* separated by the specified delimiter, in encounter order.
* @param delimiter the delimiter to be used between each element
* @return A {@code Collector} which concatenates CharSequence elements,
* separated by the specified delimiter, in encounter order
public
static
Collector
<
CharSequence
, ?,
String
>
joining
(
CharSequence
delimiter
) {
return
joining
(
delimiter
,
""
,
""
);
* Returns a {@code Collector} that concatenates the input elements,
* separated by the specified delimiter, with the specified prefix and
* suffix, in encounter order.
* @param delimiter the delimiter to be used between each element
* @param prefix the sequence of characters to be used at the beginning
* @param suffix the sequence of characters to be used at the end
* @return A {@code Collector} which concatenates CharSequence elements,
* separated by the specified delimiter, in encounter order
public
static
Collector
<
CharSequence
, ?,
String
>
joining
(
CharSequence
delimiter
,
return
new
CollectorImpl
<>(
() ->
new
StringJoiner
(
delimiter
,
prefix
,
suffix
),
StringJoiner
::
add
,
StringJoiner
::
merge
,
StringJoiner
::
toString
,
CH_NOID
);
* {@code BinaryOperator<Map>} that merges the contents of its right
* argument into its left argument, using the provided merge function to
* @param <K> type of the map keys
* @param <V> type of the map values
* @param <M> type of the map
* @param mergeFunction A merge function suitable for
* {@link Map#merge(Object, Object, BiFunction) Map.merge()}
* @return a merge function for two maps
private
static
<
K
,
V
,
M
extends
Map
<
K
,
V
>>
BinaryOperator
<
M
>
mapMerger
(
BinaryOperator
<
V
>
mergeFunction
) {
for
(
Map
.
Entry
<
K
,
V
>
e
:
m2
.
entrySet
())
m1
.
merge
(
e
.
getKey
(),
e
.
getValue
(),
mergeFunction
);
* Adapts a {@code Collector} accepting elements of type {@code U} to one
* accepting elements of type {@code T} by applying a mapping function to
* each input element before accumulation.
* The {@code mapping()} collectors are most useful when used in a
* multi-level reduction, such as downstream of a {@code groupingBy} or
* {@code partitioningBy}. For example, given a stream of
* {@code Person}, to accumulate the set of last names in each city:
* Map<City, Set<String>> lastNamesByCity
* = people.stream().collect(groupingBy(Person::getCity,
* mapping(Person::getLastName, toSet())));
* @param <T> the type of the input elements
* @param <U> type of elements accepted by downstream collector
* @param <A> intermediate accumulation type of the downstream collector
* @param <R> result type of collector
* @param mapper a function to be applied to the input elements
* @param downstream a collector which will accept mapped values
* @return a collector which applies the mapping function to the input
* elements and provides the mapped results to the downstream collector
public
static
<
T
,
U
,
A
,
R
>
Collector
<
T
, ?,
R
>
mapping
(
Function
<?
super
T
, ?
extends
U
>
mapper
,
Collector
<?
super
U
,
A
,
R
>
downstream
) {
BiConsumer
<
A
, ?
super
U
>
downstreamAccumulator
=
downstream
.
accumulator
();
return
new
CollectorImpl
<>(
downstream
.
supplier
(),
(
r
,
t
) ->
downstreamAccumulator
.
accept
(
r
,
mapper
.
apply
(
t
)),
downstream
.
combiner
(),
downstream
.
finisher
(),
downstream
.
characteristics
());
* Adapts a {@code Collector} to perform an additional finishing
* transformation. For example, one could adapt the {@link #toList()}
* collector to always produce an immutable list with:
* = people.stream().collect(collectingAndThen(toList(), Collections::unmodifiableList));
* @param <T> the type of the input elements
* @param <A> intermediate accumulation type of the downstream collector
* @param <R> result type of the downstream collector
* @param <RR> result type of the resulting collector
* @param downstream a collector
* @param finisher a function to be applied to the final result of the downstream collector
* @return a collector which performs the action of the downstream collector,
* followed by an additional finishing step
public
static
<
T
,
A
,
R
,
RR
>
Collector
<
T
,
A
,
RR
>
collectingAndThen
(
Collector
<
T
,
A
,
R
>
downstream
,
Function
<
R
,
RR
>
finisher
) {
Set
<
Collector
.
Characteristics
>
characteristics
=
downstream
.
characteristics
();
if
(
characteristics
.
contains
(
Collector
.
Characteristics
.
IDENTITY_FINISH
)) {
if
(
characteristics
.
size
() ==
1
)
characteristics
=
Collectors
.
CH_NOID
;
characteristics
=
EnumSet
.
copyOf
(
characteristics
);
characteristics
.
remove
(
Collector
.
Characteristics
.
IDENTITY_FINISH
);
characteristics
=
Collections
.
unmodifiableSet
(
characteristics
);
return
new
CollectorImpl
<>(
downstream
.
supplier
(),
downstream
.
accumulator
(),
downstream
.
combiner
(),
downstream
.
finisher
().
andThen
(
finisher
),
* Returns a {@code Collector} accepting elements of type {@code T} that
* counts the number of input elements. If no elements are present, the
* This produces a result equivalent to:
* reducing(0L, e -> 1L, Long::sum)
* @param <T> the type of the input elements
* @return a {@code Collector} that counts the input elements
public
static
<
T
>
Collector
<
T
, ?,
Long
>
return
reducing
(
0L
,
e
->
1L
,
Long
::
sum
);
* Returns a {@code Collector} that produces the minimal element according
* to a given {@code Comparator}, described as an {@code Optional<T>}.
* This produces a result equivalent to:
* reducing(BinaryOperator.minBy(comparator))
* @param <T> the type of the input elements
* @param comparator a {@code Comparator} for comparing elements
* @return a {@code Collector} that produces the minimal value
public
static
<
T
>
Collector
<
T
, ?,
Optional
<
T
>>
minBy
(
Comparator
<?
super
T
>
comparator
) {
return
reducing
(
BinaryOperator
.
minBy
(
comparator
));
* Returns a {@code Collector} that produces the maximal element according
* to a given {@code Comparator}, described as an {@code Optional<T>}.
* This produces a result equivalent to:
* reducing(BinaryOperator.maxBy(comparator))
* @param <T> the type of the input elements
* @param comparator a {@code Comparator} for comparing elements
* @return a {@code Collector} that produces the maximal value
public
static
<
T
>
Collector
<
T
, ?,
Optional
<
T
>>
maxBy
(
Comparator
<?
super
T
>
comparator
) {
return
reducing
(
BinaryOperator
.
maxBy
(
comparator
));
* Returns a {@code Collector} that produces the sum of a integer-valued
* function applied to the input elements. If no elements are present,
* @param <T> the type of the input elements
* @param mapper a function extracting the property to be summed
* @return a {@code Collector} that produces the sum of a derived property
public
static
<
T
>
Collector
<
T
, ?,
Integer
>
summingInt
(
ToIntFunction
<?
super
T
>
mapper
) {
return
new
CollectorImpl
<>(
(
a
,
t
) -> {
a
[
0
] +=
mapper
.
applyAsInt
(
t
); },
(
a
,
b
) -> {
a
[
0
] +=
b
[
0
];
return
a
; },
* Returns a {@code Collector} that produces the sum of a long-valued
* function applied to the input elements. If no elements are present,
* @param <T> the type of the input elements
* @param mapper a function extracting the property to be summed
* @return a {@code Collector} that produces the sum of a derived property
public
static
<
T
>
Collector
<
T
, ?,
Long
>
summingLong
(
ToLongFunction
<?
super
T
>
mapper
) {
return
new
CollectorImpl
<>(
(
a
,
t
) -> {
a
[
0
] +=
mapper
.
applyAsLong
(
t
); },
(
a
,
b
) -> {
a
[
0
] +=
b
[
0
];
return
a
; },
* Returns a {@code Collector} that produces the sum of a double-valued
* function applied to the input elements. If no elements are present,
* <p>The sum returned can vary depending upon the order in which
* values are recorded, due to accumulated rounding error in
* addition of values of differing magnitudes. Values sorted by increasing
* absolute magnitude tend to yield more accurate results. If any recorded
* value is a {@code NaN} or the sum is at any point a {@code NaN} then the
* sum will be {@code NaN}.
* @param <T> the type of the input elements
* @param mapper a function extracting the property to be summed
* @return a {@code Collector} that produces the sum of a derived property
public
static
<
T
>
Collector
<
T
, ?,
Double
>
summingDouble
(
ToDoubleFunction
<?
super
T
>
mapper
) {
* In the arrays allocated for the collect operation, index 0
* holds the high-order bits of the running sum, index 1 holds
* the low-order bits of the sum computed via compensated
* summation, and index 2 holds the simple sum used to compute
* the proper result if the stream contains infinite values of
return
new
CollectorImpl
<>(
(
a
,
t
) -> {
sumWithCompensation
(
a
,
mapper
.
applyAsDouble
(
t
));
a
[
2
] +=
mapper
.
applyAsDouble
(
t
);},
(
a
,
b
) -> {
sumWithCompensation
(
a
,
b
[
0
]);
return
sumWithCompensation
(
a
,
b
[
1
]); },
a
->
computeFinalSum
(
a
),
* Incorporate a new double value using Kahan summation /
* compensation summation.
* High-order bits of the sum are in intermediateSum[0], low-order
* bits of the sum are in intermediateSum[1], any additional
* elements are application-specific.
* @param intermediateSum the high-order and low-order words of the intermediate sum
* @param value the name value to be included in the running sum
static
double
[]
sumWithCompensation
(
double
[]
intermediateSum
,
double
value
) {
double
tmp
=
value
-
intermediateSum
[
1
];
double
sum
=
intermediateSum
[
0
];
double
velvel
=
sum
+
tmp
;
// Little wolf of rounding error
intermediateSum
[
1
] = (
velvel
-
sum
) -
tmp
;
intermediateSum
[
0
] =
velvel
;
* If the compensated sum is spuriously NaN from accumulating one
* or more same-signed infinite values, return the
* correctly-signed infinity stored in the simple sum.
static
double
computeFinalSum
(
double
[]
summands
) {
// Better error bounds to add both terms as the final sum
double
tmp
=
summands
[
0
] +
summands
[
1
];
double
simpleSum
=
summands
[
summands
.
length
-
1
];
if
(
Double
.
isNaN
(
tmp
) &&
Double
.
isInfinite
(
simpleSum
))
* Returns a {@code Collector} that produces the arithmetic mean of an integer-valued
* function applied to the input elements. If no elements are present,
* @param <T> the type of the input elements
* @param mapper a function extracting the property to be summed
* @return a {@code Collector} that produces the sum of a derived property
public
static
<
T
>
Collector
<
T
, ?,
Double
>
averagingInt
(
ToIntFunction
<?
super
T
>
mapper
) {
return
new
CollectorImpl
<>(
(
a
,
t
) -> {
a
[
0
] +=
mapper
.
applyAsInt
(
t
);
a
[
1
]++; },
(
a
,
b
) -> {
a
[
0
] +=
b
[
0
];
a
[
1
] +=
b
[
1
];
return
a
; },
a
-> (
a
[
1
] ==
0
) ?
0.0d
: (
double
)
a
[
0
] /
a
[
1
],
CH_NOID
);
* Returns a {@code Collector} that produces the arithmetic mean of a long-valued
* function applied to the input elements. If no elements are present,
* @param <T> the type of the input elements
* @param mapper a function extracting the property to be summed
* @return a {@code Collector} that produces the sum of a derived property
public
static
<
T
>
Collector
<
T
, ?,
Double
>
averagingLong
(
ToLongFunction
<?
super
T
>
mapper
) {
return
new
CollectorImpl
<>(
(
a
,
t
) -> {
a
[
0
] +=
mapper
.
applyAsLong
(
t
);
a
[
1
]++; },
(
a
,
b
) -> {
a
[
0
] +=
b
[
0
];
a
[
1
] +=
b
[
1
];
return
a
; },
a
-> (
a
[
1
] ==
0
) ?
0.0d
: (
double
)
a
[
0
] /
a
[
1
],
CH_NOID
);
* Returns a {@code Collector} that produces the arithmetic mean of a double-valued
* function applied to the input elements. If no elements are present,
* <p>The average returned can vary depending upon the order in which
* values are recorded, due to accumulated rounding error in
* addition of values of differing magnitudes. Values sorted by increasing
* absolute magnitude tend to yield more accurate results. If any recorded
* value is a {@code NaN} or the sum is at any point a {@code NaN} then the
* average will be {@code NaN}.
* @implNote The {@code double} format can represent all
* consecutive integers in the range -2<sup>53</sup> to
* 2<sup>53</sup>. If the pipeline has more than 2<sup>53</sup>
* values, the divisor in the average computation will saturate at
* 2<sup>53</sup>, leading to additional numerical errors.
* @param <T> the type of the input elements
* @param mapper a function extracting the property to be summed
* @return a {@code Collector} that produces the sum of a derived property
public
static
<
T
>
Collector
<
T
, ?,
Double
>
averagingDouble
(
ToDoubleFunction
<?
super
T
>
mapper
) {
* In the arrays allocated for the collect operation, index 0
* holds the high-order bits of the running sum, index 1 holds
* the low-order bits of the sum computed via compensated
* summation, and index 2 holds the number of values seen.
return
new
CollectorImpl
<>(
(
a
,
t
) -> {
sumWithCompensation
(
a
,
mapper
.
applyAsDouble
(
t
));
a
[
2
]++;
a
[
3
]+=
mapper
.
applyAsDouble
(
t
);},
(
a
,
b
) -> {
sumWithCompensation
(
a
,
b
[
0
]);
sumWithCompensation
(
a
,
b
[
1
]);
a
[
2
] +=
b
[
2
];
a
[
3
] +=
b
[
3
];
return
a
; },
a
-> (
a
[
2
] ==
0
) ?
0.0d
: (
computeFinalSum
(
a
) /
a
[
2
]),
* Returns a {@code Collector} which performs a reduction of its
* input elements under a specified {@code BinaryOperator} using the
* The {@code reducing()} collectors are most useful when used in a
* multi-level reduction, downstream of {@code groupingBy} or
* {@code partitioningBy}. To perform a simple reduction on a stream,
* use {@link Stream#reduce(Object, BinaryOperator)}} instead.
* @param <T> element type for the input and output of the reduction
* @param identity the identity value for the reduction (also, the value
* that is returned when there are no input elements)
* @param op a {@code BinaryOperator<T>} used to reduce the input elements
* @return a {@code Collector} which implements the reduction operation
* @see #reducing(BinaryOperator)
* @see #reducing(Object, Function, BinaryOperator)
public
static
<
T
>
Collector
<
T
, ?,
T
>
reducing
(
T
identity
,
BinaryOperator
<
T
>
op
) {
return
new
CollectorImpl
<>(
boxSupplier
(
identity
),
(
a
,
t
) -> {
a
[
0
] =
op
.
apply
(
a
[
0
],
t
); },
(
a
,
b
) -> {
a
[
0
] =
op
.
apply
(
a
[
0
],
b
[
0
]);
return
a
; },
@
SuppressWarnings
(
"unchecked"
)
private
static
<
T
>
Supplier
<
T
[]>
boxSupplier
(
T
identity
) {
return
() -> (
T
[])
new
Object
[] {
identity
};
* Returns a {@code Collector} which performs a reduction of its
* input elements under a specified {@code BinaryOperator}. The result
* is described as an {@code Optional<T>}.
* The {@code reducing()} collectors are most useful when used in a
* multi-level reduction, downstream of {@code groupingBy} or
* {@code partitioningBy}. To perform a simple reduction on a stream,
* use {@link Stream#reduce(BinaryOperator)} instead.
* <p>For example, given a stream of {@code Person}, to calculate tallest
* Comparator<Person> byHeight = Comparator.comparing(Person::getHeight);
* Map<City, Person> tallestByCity
* = people.stream().collect(groupingBy(Person::getCity, reducing(BinaryOperator.maxBy(byHeight))));
* @param <T> element type for the input and output of the reduction
* @param op a {@code BinaryOperator<T>} used to reduce the input elements
* @return a {@code Collector} which implements the reduction operation
* @see #reducing(Object, BinaryOperator)
* @see #reducing(Object, Function, BinaryOperator)
public
static
<
T
>
Collector
<
T
, ?,
Optional
<
T
>>
reducing
(
BinaryOperator
<
T
>
op
) {
class
OptionalBox
implements
Consumer
<
T
> {
boolean
present
=
false
;
public
void
accept
(
T
t
) {
value
=
op
.
apply
(
value
,
t
);
return
new
CollectorImpl
<
T
,
OptionalBox
,
Optional
<
T
>>(
OptionalBox
::
new
,
OptionalBox
::
accept
,
(
a
,
b
) -> {
if
(
b
.
present
)
a
.
accept
(
b
.
value
);
return
a
; },
a
->
Optional
.
ofNullable
(
a
.
value
),
CH_NOID
);
* Returns a {@code Collector} which performs a reduction of its
* input elements under a specified mapping function and
* {@code BinaryOperator}. This is a generalization of
* {@link #reducing(Object, BinaryOperator)} which allows a transformation
* of the elements before reduction.
* The {@code reducing()} collectors are most useful when used in a
* multi-level reduction, downstream of {@code groupingBy} or
* {@code partitioningBy}. To perform a simple map-reduce on a stream,
* use {@link Stream#map(Function)} and {@link Stream#reduce(Object, BinaryOperator)}
* <p>For example, given a stream of {@code Person}, to calculate the longest
* last name of residents in each city:
* Comparator<String> byLength = Comparator.comparing(String::length);
* Map<City, String> longestLastNameByCity
* = people.stream().collect(groupingBy(Person::getCity,
* reducing(Person::getLastName, BinaryOperator.maxBy(byLength))));
* @param <T> the type of the input elements
* @param <U> the type of the mapped values
* @param identity the identity value for the reduction (also, the value
* that is returned when there are no input elements)
* @param mapper a mapping function to apply to each input value
* @param op a {@code BinaryOperator<U>} used to reduce the mapped values
* @return a {@code Collector} implementing the map-reduce operation
* @see #reducing(Object, BinaryOperator)
* @see #reducing(BinaryOperator)
Collector
<
T
, ?,
U
>
reducing
(
U
identity
,
Function
<?
super
T
, ?
extends
U
>
mapper
,
BinaryOperator
<
U
>
op
) {
return
new
CollectorImpl
<>(
boxSupplier
(
identity
),
(
a
,
t
) -> {
a
[
0
] =
op
.
apply
(
a
[
0
],
mapper
.
apply
(
t
)); },
(
a
,
b
) -> {
a
[
0
] =
op
.
apply
(
a
[
0
],
b
[
0
]);
return
a
; },
* Returns a {@code Collector} implementing a "group by" operation on
* input elements of type {@code T}, grouping elements according to a
* classification function, and returning the results in a {@code Map}.
* <p>The classification function maps elements to some key type {@code K}.
* The collector produces a {@code Map<K, List<T>>} whose keys are the
* values resulting from applying the classification function to the input
* elements, and whose corresponding values are {@code List}s containing the
* input elements which map to the associated key under the classification
* <p>There are no guarantees on the type, mutability, serializability, or
* thread-safety of the {@code Map} or {@code List} objects returned.
* This produces a result similar to:
* groupingBy(classifier, toList());
* The returned {@code Collector} is not concurrent. For parallel stream
* pipelines, the {@code combiner} function operates by merging the keys
* from one map into another, which can be an expensive operation. If
* preservation of the order in which elements appear in the resulting {@code Map}
* collector is not required, using {@link #groupingByConcurrent(Function)}
* may offer better parallel performance.
* @param <T> the type of the input elements
* @param <K> the type of the keys
* @param classifier the classifier function mapping input elements to keys
* @return a {@code Collector} implementing the group-by operation
* @see #groupingBy(Function, Collector)
* @see #groupingBy(Function, Supplier, Collector)
* @see #groupingByConcurrent(Function)
public
static
<
T
,
K
>
Collector
<
T
, ?,
Map
<
K
,
List
<
T
>>>
groupingBy
(
Function
<?
super
T
, ?
extends
K
>
classifier
) {
return
groupingBy
(
classifier
,
toList
());
* Returns a {@code Collector} implementing a cascaded "group by" operation
* on input elements of type {@code T}, grouping elements according to a
* classification function, and then performing a reduction operation on
* the values associated with a given key using the specified downstream
* <p>The classification function maps elements to some key type {@code K}.
* The downstream collector operates on elements of type {@code T} and
* produces a result of type {@code D}. The resulting collector produces a
* <p>There are no guarantees on the type, mutability,
* serializability, or thread-safety of the {@code Map} returned.
* <p>For example, to compute the set of last names of people in each city:
* Map<City, Set<String>> namesByCity
* = people.stream().collect(groupingBy(Person::getCity,
* mapping(Person::getLastName, toSet())));
* The returned {@code Collector} is not concurrent. For parallel stream
* pipelines, the {@code combiner} function operates by merging the keys
* from one map into another, which can be an expensive operation. If
* preservation of the order in which elements are presented to the downstream
* collector is not required, using {@link #groupingByConcurrent(Function, Collector)}
* may offer better parallel performance.
* @param <T> the type of the input elements
* @param <K> the type of the keys
* @param <A> the intermediate accumulation type of the downstream collector
* @param <D> the result type of the downstream reduction
* @param classifier a classifier function mapping input elements to keys
* @param downstream a {@code Collector} implementing the downstream reduction
* @return a {@code Collector} implementing the cascaded group-by operation
* @see #groupingBy(Function)
* @see #groupingBy(Function, Supplier, Collector)
* @see #groupingByConcurrent(Function, Collector)
public
static
<
T
,
K
,
A
,
D
>
Collector
<
T
, ?,
Map
<
K
,
D
>>
groupingBy
(
Function
<?
super
T
, ?
extends
K
>
classifier
,
Collector
<?
super
T
,
A
,
D
>
downstream
) {
return
groupingBy
(
classifier
,
HashMap
::
new
,
downstream
);
* Returns a {@code Collector} implementing a cascaded "group by" operation
* on input elements of type {@code T}, grouping elements according to a
* classification function, and then performing a reduction operation on
* the values associated with a given key using the specified downstream
* {@code Collector}. The {@code Map} produced by the Collector is created
* with the supplied factory function.
* <p>The classification function maps elements to some key type {@code K}.
* The downstream collector operates on elements of type {@code T} and
* produces a result of type {@code D}. The resulting collector produces a
* <p>For example, to compute the set of last names of people in each city,
* where the city names are sorted:
* Map<City, Set<String>> namesByCity
* = people.stream().collect(groupingBy(Person::getCity, TreeMap::new,
* mapping(Person::getLastName, toSet())));
* The returned {@code Collector} is not concurrent. For parallel stream
* pipelines, the {@code combiner} function operates by merging the keys
* from one map into another, which can be an expensive operation. If
* preservation of the order in which elements are presented to the downstream
* collector is not required, using {@link #groupingByConcurrent(Function, Supplier, Collector)}
* may offer better parallel performance.
* @param <T> the type of the input elements
* @param <K> the type of the keys
* @param <A> the intermediate accumulation type of the downstream collector
* @param <D> the result type of the downstream reduction
* @param <M> the type of the resulting {@code Map}
* @param classifier a classifier function mapping input elements to keys
* @param downstream a {@code Collector} implementing the downstream reduction
* @param mapFactory a function which, when called, produces a new empty
* {@code Map} of the desired type
* @return a {@code Collector} implementing the cascaded group-by operation
* @see #groupingBy(Function, Collector)
* @see #groupingBy(Function)
* @see #groupingByConcurrent(Function, Supplier, Collector)
public
static
<
T
,
K
,
D
,
A
,
M
extends
Map
<
K
,
D
>>
Collector
<
T
, ?,
M
>
groupingBy
(
Function
<?
super
T
, ?
extends
K
>
classifier
,
Supplier
<
M
>
mapFactory
,
Collector
<?
super
T
,
A
,
D
>
downstream
) {
Supplier
<
A
>
downstreamSupplier
=
downstream
.
supplier
();
BiConsumer
<
A
, ?
super
T
>
downstreamAccumulator
=
downstream
.
accumulator
();
BiConsumer
<
Map
<
K
,
A
>,
T
>
accumulator
= (
m
,
t
) -> {
K
key
=
Objects
.
requireNonNull
(
classifier
.
apply
(
t
),
"element cannot be mapped to a null key"
);
A
container
=
m
.
computeIfAbsent
(
key
,
k
->
downstreamSupplier
.
get
());
downstreamAccumulator
.
accept
(
container
,
t
);
BinaryOperator
<
Map
<
K
,
A
>>
merger
=
Collectors
.<
K
,
A
,
Map
<
K
,
A
>>
mapMerger
(
downstream
.
combiner
());
@
SuppressWarnings
(
"unchecked"
)
Supplier
<
Map
<
K
,
A
>>
mangledFactory
= (
Supplier
<
Map
<
K
,
A
>>)
mapFactory
;
if
(
downstream
.
characteristics
().
contains
(
Collector
.
Characteristics
.
IDENTITY_FINISH
)) {
return
new
CollectorImpl
<>(
mangledFactory
,
accumulator
,
merger
,
CH_ID
);
@
SuppressWarnings
(
"unchecked"
)
Function
<
A
,
A
>
downstreamFinisher
= (
Function
<
A
,
A
>)
downstream
.
finisher
();
Function
<
Map
<
K
,
A
>,
M
>
finisher
=
intermediate
-> {
intermediate
.
replaceAll
((
k
,
v
) ->
downstreamFinisher
.
apply
(
v
));
@
SuppressWarnings
(
"unchecked"
)
M
castResult
= (
M
)
intermediate
;
return
new
CollectorImpl
<>(
mangledFactory
,
accumulator
,
merger
,
finisher
,
CH_NOID
);
* Returns a concurrent {@code Collector} implementing a "group by"
* operation on input elements of type {@code T}, grouping elements
* according to a classification function.
* <p>This is a {@link Collector.Characteristics#CONCURRENT concurrent} and
* {@link Collector.Characteristics#UNORDERED unordered} Collector.
* <p>The classification function maps elements to some key type {@code K}.
* The collector produces a {@code ConcurrentMap<K, List<T>>} whose keys are the
* values resulting from applying the classification function to the input
* elements, and whose corresponding values are {@code List}s containing the
* input elements which map to the associated key under the classification
* <p>There are no guarantees on the type, mutability, or serializability
* of the {@code Map} or {@code List} objects returned, or of the
* thread-safety of the {@code List} objects returned.
* This produces a result similar to:
* groupingByConcurrent(classifier, toList());
* @param <T> the type of the input elements
* @param <K> the type of the keys
* @param classifier a classifier function mapping input elements to keys
* @return a concurrent, unordered {@code Collector} implementing the group-by operation
* @see #groupingBy(Function)
* @see #groupingByConcurrent(Function, Collector)
* @see #groupingByConcurrent(Function, Supplier, Collector)
Collector
<
T
, ?,
ConcurrentMap
<
K
,
List
<
T
>>>
groupingByConcurrent
(
Function
<?
super
T
, ?
extends
K
>
classifier
) {
return
groupingByConcurrent
(
classifier
,
ConcurrentHashMap
::
new
,
toList
());
* Returns a concurrent {@code Collector} implementing a cascaded "group by"
* operation on input elements of type {@code T}, grouping elements
* according to a classification function, and then performing a reduction
* operation on the values associated with a given key using the specified
* downstream {@code Collector}.
* <p>This is a {@link Collector.Characteristics#CONCURRENT concurrent} and
* {@link Collector.Characteristics#UNORDERED unordered} Collector.
* <p>The classification function maps elements to some key type {@code K}.
* The downstream collector operates on elements of type {@code T} and
* produces a result of type {@code D}. The resulting collector produces a
* <p>For example, to compute the set of last names of people in each city,
* where the city names are sorted:
* ConcurrentMap<City, Set<String>> namesByCity
* = people.stream().collect(groupingByConcurrent(Person::getCity,
* mapping(Person::getLastName, toSet())));
* @param <T> the type of the input elements
* @param <K> the type of the keys
* @param <A> the intermediate accumulation type of the downstream collector
* @param <D> the result type of the downstream reduction
* @param classifier a classifier function mapping input elements to keys
* @param downstream a {@code Collector} implementing the downstream reduction
* @return a concurrent, unordered {@code Collector} implementing the cascaded group-by operation