Example usage for java.util.stream Collectors groupingBy

List of usage examples for java.util.stream Collectors groupingBy

Introduction

In this page you can find the example usage for java.util.stream Collectors groupingBy.

Prototype

public static <T, K> Collector<T, ?, Map<K, List<T>>> groupingBy(Function<? super T, ? extends K> classifier) 

Source Link

Document

Returns a Collector implementing a "group by" operation on input elements of type T , grouping elements according to a classification function, and returning the results in a Map .

Usage

From source file:com.ikanow.aleph2.data_import_manager.analytics.utils.TestAnalyticTriggerCrudUtils.java

@Test
public void test_storeOrUpdateTriggerStage_updateActivation() throws InterruptedException {
    assertEquals(0, _test_crud.countObjects().join().intValue());

    final DataBucketBean bucket = buildBucket("/test/store/trigger", true);

    // Save a bucket
    {/*w  w  w  .  ja  v a2  s .c o  m*/
        final Stream<AnalyticTriggerStateBean> test_stream = AnalyticTriggerBeanUtils
                .generateTriggerStateStream(bucket, false, Optional.empty());
        final List<AnalyticTriggerStateBean> test_list = test_stream.collect(Collectors.toList());

        System.out.println("Resources = \n" + test_list.stream()
                .map(t -> BeanTemplateUtils.toJson(t).toString()).collect(Collectors.joining("\n")));

        assertEquals(8L, test_list.size()); //(8 not 7 cos haven't dedup'd yet)

        // 4 internal dependencies
        assertEquals(4L, test_list.stream().filter(t -> null != t.job_name()).count());
        // 4 external dependencies
        assertEquals(4L, test_list.stream().filter(t -> null == t.job_name()).count());

        final Map<Tuple2<String, String>, List<AnalyticTriggerStateBean>> grouped_triggers = test_list.stream()
                .collect(Collectors.groupingBy(t -> Tuples._2T(t.bucket_name(), null)));

        AnalyticTriggerCrudUtils.storeOrUpdateTriggerStage(bucket, _test_crud, grouped_triggers).join();

        assertEquals(7L, _test_crud.countObjects().join().intValue());

        // Time is relative (default bucket check freq == 2 minutes), so all the triggers should have been set for "now"
        assertEquals(7L,
                _test_crud
                        .countObjectsBySpec(CrudUtils.allOf(AnalyticTriggerStateBean.class)
                                .rangeBelow(AnalyticTriggerStateBean::next_check, new Date(), false))
                        .join().intValue());

    }

    //DEBUG
    //this.printTriggerDatabase();

    // Sleep to change times
    Thread.sleep(100L);

    // 2) Modify and update
    final DataBucketBean mod_bucket = BeanTemplateUtils.clone(bucket)
            .with(DataBucketBean::analytic_thread,
                    BeanTemplateUtils.clone(bucket.analytic_thread())
                            .with(AnalyticThreadBean::jobs, bucket.analytic_thread().jobs().stream()
                                    .map(j -> BeanTemplateUtils.clone(j)
                                            .with(AnalyticThreadJobBean::name, "test_" + j.name()).done())
                                    .collect(Collectors.toList()))
                            .done())
            .done();
    {

        final Stream<AnalyticTriggerStateBean> test_stream = AnalyticTriggerBeanUtils
                .generateTriggerStateStream(mod_bucket, false, Optional.empty());
        final List<AnalyticTriggerStateBean> test_list = test_stream.collect(Collectors.toList());

        final Map<Tuple2<String, String>, List<AnalyticTriggerStateBean>> grouped_triggers = test_list.stream()
                .collect(Collectors.groupingBy(t -> Tuples._2T(t.bucket_name(), null)));

        AnalyticTriggerCrudUtils.storeOrUpdateTriggerStage(bucket, _test_crud, grouped_triggers).join();

        //DEBUG
        //this.printTriggerDatabase();

        assertEquals(7L, _test_crud.countObjects().join().intValue());

        assertEquals(4L,
                Optionals
                        .streamOf(_test_crud.getObjectsBySpec(CrudUtils.allOf(AnalyticTriggerStateBean.class))
                                .join().iterator(), false)
                        .filter(t -> null != t.job_name()).filter(t -> t.job_name().startsWith("test_"))
                        .count());
    }

    // 3) Since we're here might as well try activating...
    {
        final Stream<AnalyticTriggerStateBean> test_stream = Optionals.streamOf(
                _test_crud.getObjectsBySpec(CrudUtils.allOf(AnalyticTriggerStateBean.class)).join().iterator(),
                false);

        AnalyticTriggerCrudUtils.updateTriggerStatuses(_test_crud, test_stream, new Date(), Optional.of(true))
                .join();

        assertEquals(7L, _test_crud.countObjects().join().intValue());
        assertEquals(7L,
                Optionals
                        .streamOf(_test_crud.getObjectsBySpec(CrudUtils.allOf(AnalyticTriggerStateBean.class))
                                .join().iterator(), false)
                        .filter(t -> t.is_job_active())
                        .filter(t -> 100 != Optional.ofNullable(t.last_resource_size()).orElse(-1L)).count());
    }
    // 4) ... and then de-activating...
    {
        final Stream<AnalyticTriggerStateBean> test_stream = Optionals.streamOf(
                _test_crud.getObjectsBySpec(CrudUtils.allOf(AnalyticTriggerStateBean.class)).join().iterator(),
                false);

        AnalyticTriggerCrudUtils.updateTriggerStatuses(_test_crud, test_stream, new Date(), Optional.of(false))
                .join();

        assertEquals(7L, _test_crud.countObjects().join().intValue());
        assertEquals(7L,
                Optionals
                        .streamOf(_test_crud.getObjectsBySpec(CrudUtils.allOf(AnalyticTriggerStateBean.class))
                                .join().iterator(), false)
                        .filter(t -> !t.is_job_active())
                        .filter(t -> 100 != Optional.ofNullable(t.last_resource_size()).orElse(-1L)).count());
    }
    // 5) ... finally re-activate 
    {
        final Stream<AnalyticTriggerStateBean> test_stream = Optionals
                .streamOf(_test_crud.getObjectsBySpec(CrudUtils.allOf(AnalyticTriggerStateBean.class)).join()
                        .iterator(), false)
                .map(t -> BeanTemplateUtils.clone(t).with(AnalyticTriggerStateBean::curr_resource_size, 100L)
                        .done());

        AnalyticTriggerCrudUtils.updateTriggerStatuses(_test_crud, test_stream, new Date(), Optional.of(true))
                .join();

        assertEquals(7L, _test_crud.countObjects().join().intValue());
        assertEquals(7L,
                Optionals
                        .streamOf(_test_crud.getObjectsBySpec(CrudUtils.allOf(AnalyticTriggerStateBean.class))
                                .join().iterator(), false)
                        .filter(t -> t.is_job_active()).filter(t -> 100 == t.last_resource_size()).count());

    }
}

From source file:gov.ca.cwds.cals.service.ComplaintsService.java

@UnitOfWork(CMS)
protected List<ComplaintDto> getCountyLicenseCaseComplaints(String facilityNumber) {
    Map<LocalDate, List<CountyLicenseCaseComplaintInfo>> complaintsAggregation = countyLicenseCaseComplaintInfoDao
            .loadCountyLicenseCaseComplaintsByLicenseNumber(facilityNumber).stream()
            .collect(Collectors.groupingBy(CountyLicenseCaseComplaintInfo::getComplaintDate));
    List<ComplaintDto> complaints = new ArrayList<>(complaintsAggregation.size());
    for (Map.Entry<LocalDate, List<CountyLicenseCaseComplaintInfo>> entry : complaintsAggregation.entrySet()) {
        ComplaintDto complaintDto = new ComplaintDto();
        complaintDto.setComplaintDate(LocalDateTime.of(entry.getKey(), LocalTime.NOON));
        List<AllegationDto> allegations = entry.getValue().stream()
                .map(CountyLicenseCaseComplaintInfo::toAllegationDto).collect(Collectors.toList());
        complaintDto.setAllegations(allegations);
        complaints.add(complaintDto);/* w w w.  j  av  a  2 s. c  om*/
    }
    return complaints;
}

From source file:com.hack23.cia.web.impl.ui.application.views.common.chartfactory.impl.DocumentChartDataManagerImpl.java

/**
 * Adds the document history by org data.
 *
 * @param dataSeries/*from  w  w w .j a  va  2s.  c  om*/
 *            the data series
 * @param series
 *            the series
 * @param itemList
 *            the item list
 */
private static void addDocumentHistoryByOrgData(final DataSeries dataSeries, final Series series,
        final List<ViewRiksdagenOrgDocumentDailySummary> itemList) {
    final Map<String, List<ViewRiksdagenOrgDocumentDailySummary>> map = itemList.parallelStream()
            .filter(t -> t != null)
            .collect(Collectors.groupingBy(t -> StringUtils.defaultIfBlank(t.getDocumentType(), NO_INFO)));

    for (final Entry<String, List<ViewRiksdagenOrgDocumentDailySummary>> entry : map.entrySet()) {

        series.addSeries(new XYseries().setLabel(entry.getKey()));

        dataSeries.newSeries();
        if (entry.getValue() != null) {
            for (final ViewRiksdagenOrgDocumentDailySummary item : entry.getValue()) {
                if (item != null) {
                    dataSeries.add(item.getEmbeddedId().getPublicDate(), item.getTotal());
                }
            }
        } else {
            LOGGER.info(LOG_MSG_MISSING_DATA_FOR_KEY, entry);
        }

    }
}

From source file:edu.mit.lib.mama.Mama.java

private static List<String> findItems(Handle hdl, String qfield, String value, String[] rfields) {
    String queryBase = "select lmv.* from metadatavalue lmv, metadatavalue rmv where "
            + "lmv.item_id = rmv.item_id and rmv.metadata_field_id = ? and rmv.text_value = ? ";
    Query<Map<String, Object>> query;
    if (null == rfields) { // just return default field
        query = hdl.createQuery(queryBase + "and lmv.metadata_field_id = ?").bind(2,
                findFieldId(hdl, URI_FIELD));
    } else { // filter out fields we can't resolve
        String inList = Arrays.asList(rfields).stream().map(f -> String.valueOf(findFieldId(hdl, f)))
                .filter(id -> id != "-1").collect(Collectors.joining(","));
        query = hdl.createQuery(queryBase + "and lmv.metadata_field_id in (" + inList + ")");
    }//  w w w  .  jav a2s. co  m
    List<Mdv> rs = query.bind(0, findFieldId(hdl, qfield)).bind(1, value).map(new MdvMapper()).list();
    // group the list by Item, then construct a JSON object with each item's properties
    return rs.stream().collect(Collectors.groupingBy(Mdv::getItemId)).values().stream().map(p -> jsonObject(p))
            .collect(Collectors.toList());
}

From source file:alfio.controller.api.admin.ExtensionApiController.java

@RequestMapping(value = "/setting/organization/{orgShortName}/event/{shortName}", method = RequestMethod.GET)
public Map<Integer, List<ExtensionParameterMetadataAndValue>> getParametersFor(
        @PathVariable("orgShortName") String orgShortName, @PathVariable("shortName") String eventShortName,
        Principal principal) {/*from  www  .  j  a  v  a 2 s . c o m*/

    Organization org = organizationRepository.findByName(orgShortName).orElseThrow(IllegalStateException::new);
    ensureOrganization(principal, org);
    Event event = eventRepository.findByShortName(eventShortName);
    ensureEventInOrganization(org, event);
    String pattern = String.format("-%d-%d", org.getId(), event.getId());
    return extensionService.getConfigurationParametersFor(pattern, pattern, "EVENT").stream()
            .collect(Collectors.groupingBy(ExtensionParameterMetadataAndValue::getExtensionId));
}

From source file:org.obiba.mica.search.CoverageQueryExecutor.java

/**
 * For a {@link Taxonomy}, report the number of hits and optionally the
 * number of hits for each bucket./*from w w  w .ja  v a  2s. c  o m*/
 */
private void addTaxonomyCoverage(List<MicaSearch.TaxonomyCoverageDto> coverages, Taxonomy taxonomy,
        Map<String, Map<String, Integer>> aggsMap, @Nullable List<BucketResult> bucketResults,
        Map<String, Map<String, MicaSearch.TermsAggregationResultDto>> aggTermsTitlesMap) {
    if (taxonomy.hasVocabularies()) {
        MicaSearch.TaxonomyCoverageDto.Builder taxoBuilder = MicaSearch.TaxonomyCoverageDto.newBuilder();
        taxoBuilder.setTaxonomy(dtos.asDto(taxonomy, getLocale()));
        List<Integer> hits = Lists.newArrayList();
        String namespace = taxonomy.getName().equals("Default") ? null : taxonomy.getName();
        Map<String, List<BucketResult>> bucketResultsByVocabulary = bucketResults == null ? Maps.newHashMap()
                : bucketResults.stream().collect(Collectors.groupingBy(BucketResult::getVocabulary));

        taxonomy.getVocabularies().stream().filter(vocabulary -> applyFilter(taxonomy, vocabulary))
                .forEach(vocabulary -> hits.add(addVocabularyCoverage(taxoBuilder, taxonomy, vocabulary,
                        aggsMap.get(AttributeKey.getMapKey(vocabulary.getName(), namespace)),
                        bucketResults == null ? null : bucketResultsByVocabulary.get(vocabulary.getName()),
                        aggTermsTitlesMap)));

        taxoBuilder.setHits(hits.isEmpty() ? 0 : hits.stream().mapToInt(x -> x).sum());
        // compute the sum of the hits for all vocabularies per bucket
        if (bucketResults != null) {
            Map<String, List<BucketResult>> bucketResultsByBucketField = bucketResults.stream()
                    .collect(Collectors.groupingBy(BucketResult::getBucketField));

            bucketResultsByBucketField.keySet().forEach(field -> {
                Map<String, List<BucketResult>> bucketResultsByBucketValue = bucketResultsByBucketField
                        .get(field).stream().collect(Collectors.groupingBy(BucketResult::getBucketValue));

                bucketResultsByBucketValue.keySet().stream().sorted().forEach(value -> {
                    List<BucketResult> buckets = bucketResultsByBucketValue.get(value);
                    int sumOfHits = buckets.stream().mapToInt(BucketResult::getHits).sum();
                    if (sumOfHits > 0) {
                        taxoBuilder.addBuckets(getBucketCoverageDtoBuilder(field, value, sumOfHits,
                                aggTermsTitlesMap.get(field).get(value)));
                    }
                });
            });
        }

        if (!taxoBuilder.getVocabulariesList().isEmpty()) {
            coverages.add(taxoBuilder.build());
        }
    }
}

From source file:com.github.sevntu.checkstyle.ordering.MethodOrder.java

public int getOverloadGroupsSplitCases() {
    return currentOrdering.stream().filter(method -> !method.isCtor())
            .collect(Collectors.groupingBy(Method::getName)).values().stream()
            .collect(Collectors.summingInt(this::getMethodGroupSplitCount));
}

From source file:alfio.controller.api.admin.AdminReservationApiController.java

private TicketReservationDescriptor toReservationDescriptor(String reservationId,
        Triple<TicketReservation, List<Ticket>, Event> triple) {
    List<SerializablePair<TicketCategory, List<Ticket>>> tickets = triple.getMiddle().stream()
            .collect(Collectors.groupingBy(Ticket::getCategoryId)).entrySet().stream()
            .map(entry -> SerializablePair.of(
                    eventManager.getTicketCategoryById(entry.getKey(), triple.getRight().getId()),
                    entry.getValue()))/*from   w w  w .  j a v a2 s  . c  om*/
            .collect(Collectors.toList());
    TicketReservation reservation = triple.getLeft();
    return new TicketReservationDescriptor(reservation, ticketReservationManager.orderSummaryForReservationId(
            reservationId, triple.getRight(), Locale.forLanguageTag(reservation.getUserLanguage())), tickets);
}

From source file:com.github.sevntu.checkstyle.ordering.MethodOrder.java

public int getDependenciesBetweenDistantMethodsCases(int screenLinesCount) {
    return invocations.stream().collect(Collectors.groupingBy(MethodInvocation::getCaller)).values().stream()
            .collect(Collectors//w w w.  j a  v  a 2s.co m
                    .summingInt(callerInvocations -> (int) callerInvocations.stream().filter(invocation -> {
                        final int invocationLineNo = translateInitialLineNo(invocation.getInitialLineNo());
                        final int calleeLineNo = translateInitialLineNo(
                                invocation.getCallee().getInitialLineNo());
                        return Math.abs(calleeLineNo - invocationLineNo) > screenLinesCount;
                    }).filter(new UniqueCallerCalleeMethodInvocationFilter()).count()));
}

From source file:eu.ggnet.dwoss.redtape.reporting.RedTapeCloserOperation.java

/**
 * Rolling out the units from the stocks. For this, a {@link StockTransaction} with {@link StockTransactionType#ROLL_OUT} is created,
 * all {@link StockUnit}<code>s</code> which are on a {@link LogicTransaction} with matching dossierId are added to this {@link StockTransaction} and
 * it gets {@link StockTransactionStatusType#COMPLETED}.
 * <p/>/*  w w w. ja va 2s.  c  o  m*/
 * @param dossierIds the dossierId as reference.
 * @param msg        a msg for the stocktransaction.
 * @param arranger   the arranger.
 * @param monitor    a optional monitor.
 * @return the amount of rolled out units.
 */
private int closeStock(Set<Long> dossierIds, String msg, String arranger, IMonitor monitor) {
    SubMonitor m = SubMonitor.convert(monitor, 100);
    final String h = "Stock:";
    m.message(h + "lade logische Transaktionen");
    // Rolling out
    List<LogicTransaction> lts = ltEao.findByDossierIds(dossierIds);
    m.worked(3, h + "sortiere Gerte nach Lager");
    stockEao.findAll();
    Map<Stock, List<StockUnit>> unitsByStock = lts.stream().flatMap((t) -> t.getUnits().stream())
            .collect(Collectors.groupingBy(StockUnit::getStock));
    validateStockUnits(unitsByStock);

    m.setWorkRemaining((int) unitsByStock.values().stream().count());
    List<StockTransaction> stockTransactions = new ArrayList<>();
    for (Entry<Stock, List<StockUnit>> entry : unitsByStock.entrySet()) {
        StockTransaction st = stEmo.requestRollOutPrepared(entry.getKey().getId(), arranger, msg);
        for (StockUnit stockUnit : entry.getValue()) {
            m.worked(1, h + "verbuche (refurbishId=" + stockUnit.getRefurbishId() + ",uniqueUnitId="
                    + stockUnit.getUniqueUnitId() + ")");
            st.addUnit(stockUnit);
            history.fire(new UnitHistory(stockUnit.getUniqueUnitId(), msg, arranger));
        }
        stockTransactions.add(st);

    }
    m.message(h + "auslagern");
    if (!stockTransactions.isEmpty())
        stEmo.completeRollOut(arranger, stockTransactions);
    m.finish();
    return (int) unitsByStock.values().stream().count();
}