Example usage for java.util.function Function identity

List of usage examples for java.util.function Function identity

Introduction

In this page you can find the example usage for java.util.function Function identity.

Prototype

static <T> Function<T, T> identity() 

Source Link

Document

Returns a function that always returns its input argument.

Usage

From source file:alfio.manager.TicketReservationManager.java

private void acquireItems(TicketStatus ticketStatus, AdditionalServiceItemStatus asStatus,
        PaymentProxy paymentProxy, String reservationId, String email, CustomerName customerName,
        String userLanguage, String billingAddress, int eventId) {
    Map<Integer, Ticket> preUpdateTicket = ticketRepository.findTicketsInReservation(reservationId).stream()
            .collect(toMap(Ticket::getId, Function.identity()));
    int updatedTickets = ticketRepository.updateTicketsStatusWithReservationId(reservationId,
            ticketStatus.toString());// www  . jav a2 s .c  om
    Map<Integer, Ticket> postUpdateTicket = ticketRepository.findTicketsInReservation(reservationId).stream()
            .collect(toMap(Ticket::getId, Function.identity()));

    postUpdateTicket.forEach((id, ticket) -> {
        auditUpdateTicket(preUpdateTicket.get(id), Collections.emptyMap(), ticket, Collections.emptyMap(),
                eventId);
    });

    int updatedAS = additionalServiceItemRepository.updateItemsStatusWithReservationUUID(reservationId,
            asStatus);
    Validate.isTrue(updatedTickets + updatedAS > 0, "no items have been updated");
    specialPriceRepository.updateStatusForReservation(singletonList(reservationId), Status.TAKEN.toString());
    ZonedDateTime timestamp = ZonedDateTime.now(ZoneId.of("UTC"));
    int updatedReservation = ticketReservationRepository.updateTicketReservation(reservationId,
            TicketReservationStatus.COMPLETE.toString(), email, customerName.getFullName(),
            customerName.getFirstName(), customerName.getLastName(), userLanguage, billingAddress, timestamp,
            paymentProxy.toString());
    Validate.isTrue(updatedReservation == 1,
            "expected exactly one updated reservation, got " + updatedReservation);
    waitingQueueManager.fireReservationConfirmed(reservationId);
    if (paymentProxy == PaymentProxy.PAYPAL || paymentProxy == PaymentProxy.ADMIN) {
        //we must notify the plugins about ticket assignment and send them by email
        Event event = eventRepository.findByReservationId(reservationId);
        TicketReservation reservation = findById(reservationId).orElseThrow(IllegalStateException::new);
        findTicketsInReservation(reservationId).stream()
                .filter(ticket -> StringUtils.isNotBlank(ticket.getFullName())
                        || StringUtils.isNotBlank(ticket.getFirstName())
                        || StringUtils.isNotBlank(ticket.getEmail()))
                .forEach(ticket -> {
                    Locale locale = Locale.forLanguageTag(ticket.getUserLanguage());
                    if (paymentProxy == PaymentProxy.PAYPAL) {
                        sendTicketByEmail(ticket, locale, event,
                                getTicketEmailGenerator(event, reservation, locale));
                    }
                    pluginManager.handleTicketAssignment(ticket);
                    extensionManager.handleTicketAssignment(ticket);
                });

    }
}

From source file:com.oneops.transistor.service.ManifestRfcBulkProcessor.java

private void mergeMonitorRelations(Map<String, Edge> monitorEdges, MergeResult mrgMap, CmsRfcCI manifestPlat,
        DesignPullContext context, ManifestRfcContainer platformRfcs) {
    List<CmsRfcRelation> existingMonitorRelations = cmRfcMrgProcessor.getCIRelations(context.platNsPath,
            MANIFEST_WATCHEDBY, null, null, MANIFEST_MONITOR, null);
    Map<String, CmsRfcRelation> existingMonitorsMap = existingMonitorRelations.stream()
            .collect(Collectors.toMap(reln -> reln.getToRfcCi().getCiName(), Function.identity()));

    List<String> rfcNames = platformRfcs.getRfcList().stream().map(CmsRfcCI::getCiName)
            .collect(Collectors.toList());

    monitorEdges.values().stream().forEach(edge -> {
        CmsCIRelation tmplRelation = edge.templateRel;
        if (!edge.userRels.isEmpty()) {
            for (CmsCIRelation designRelation : edge.userRels) {
                CmsRfcCI monitorFromRfc = null;
                long designFromCiId = designRelation.getFromCiId();
                if (mrgMap.rfcDesignMap.containsKey(designFromCiId)) {
                    monitorFromRfc = mrgMap.rfcDesignMap.get(designFromCiId).get(0);
                } else {
                    long manifestFromCiId = mrgMap.designIdsMap.get(designFromCiId).get(0);
                    monitorFromRfc = cmRfcMrgProcessor.getCiById(manifestFromCiId, "df");
                }//from www  . j a v  a  2s  .  co  m
                processMonitor(tmplRelation, designRelation, manifestPlat, context, platformRfcs,
                        monitorFromRfc, existingMonitorsMap, rfcNames);

            }
        } else if (tmplRelation != null) {
            long templateFromCiId = tmplRelation.getFromCiId();
            if (mrgMap.templateIdsMap.containsKey(templateFromCiId)) {
                mrgMap.templateIdsMap.get(templateFromCiId).forEach(manifestCiId -> {
                    CmsRfcCI monitorFromRfc = cmRfcMrgProcessor.getCiById(manifestCiId, "df");
                    processMonitor(tmplRelation, null, manifestPlat, context, platformRfcs, monitorFromRfc,
                            existingMonitorsMap, rfcNames);
                });
            }

            if (mrgMap.rfcMap.containsKey(templateFromCiId)) {
                mrgMap.rfcMap.get(templateFromCiId).forEach(manifestFromRfc -> {
                    processMonitor(tmplRelation, null, manifestPlat, context, platformRfcs, manifestFromRfc,
                            existingMonitorsMap, rfcNames);
                });
            }

        }
    });

    //remove obsolete monitors
    existingMonitorsMap.values().stream().filter(this::canMonitorBeDeleted)
            .forEach(obsoleteMonitor -> cmRfcMrgProcessor
                    .requestCiDelete(obsoleteMonitor.getToRfcCi().getCiId(), context.userId));
}

From source file:org.codice.ddf.catalog.ui.metacard.MetacardApplication.java

protected UpdateResponse patchMetacards(List<MetacardChanges> metacardChanges, String subjectIdentifer)
        throws SourceUnavailableException, IngestException {
    Set<String> changedIds = metacardChanges.stream().flatMap(mc -> mc.getIds().stream())
            .collect(Collectors.toSet());

    Map<String, Result> results = util.getMetacardsWithTagById(changedIds, "*");

    for (MetacardChanges changeset : metacardChanges) {
        for (AttributeChange attributeChange : changeset.getAttributes()) {
            for (String id : changeset.getIds()) {
                List<String> values = attributeChange.getValues();
                Result result = results.get(id);
                if (result == null) {
                    LOGGER.debug(/*from   w  w w  . j a v a2s . c o  m*/
                            "Metacard {} either does not exist or user {} does not have permission to see it",
                            id, subjectIdentifer);
                    throw new NotFoundException("Result was not found");
                }
                Metacard resultMetacard = result.getMetacard();

                Function<Serializable, Serializable> mapFunc = Function.identity();
                if (isChangeTypeDate(attributeChange, resultMetacard)) {
                    mapFunc = mapFunc.andThen(serializable -> Date.from(util.parseDate(serializable)));
                }

                resultMetacard.setAttribute(new AttributeImpl(attributeChange.getAttribute(),
                        values.stream().filter(Objects::nonNull).map(mapFunc).collect(Collectors.toList())));
            }
        }
    }

    List<Metacard> changedMetacards = results.values().stream().map(Result::getMetacard)
            .collect(Collectors.toList());
    return catalogFramework.update(new UpdateRequestImpl(
            changedMetacards.stream().map(Metacard::getId).toArray(String[]::new), changedMetacards));
}

From source file:com.samsung.sjs.constraintsolver.TypeConstraintFixedPointSolver.java

public TypeAssignment solve() throws CancelException {
    logger.info("Type Solve ...");
    super.solve(null);
    if (logger.isDebugEnabled()) {
        dumpBounds();//from   w  w w. ja  va2  s .  co  m
    }
    boolean typedAllVariables = findFinalSolutions();
    if (!typedAllVariables) {
        logger.debug("Cannot find types for all variables");
    }
    if (logger.isDebugEnabled()) {
        dumpFinalSolution();
    }
    return new TypeAssignment(terms.stream().filter(t -> !(t instanceof EnvironmentDeclarationTerm))
            .collect(Collectors.toMap(Function.identity(), ITypeTerm::getType, (ty1, ty2) -> {
                assert Types.isEqual(ty1, ty2);
                return ty1;
            }, LinkedHashMap::new)), this);
}

From source file:com.ggvaidya.scinames.dataset.BinomialChangesSceneController.java

private AdditionalData<String, Map.Entry<String, String>> createSummaryAdditionalData() {
    List<Map.Entry<String, String>> summary = new ArrayList<>();

    // Calculate some summary values.
    long numChanges = potentialChanges.size();
    summary.add(new AbstractMap.SimpleEntry<String, String>("Number of binomial changes",
            String.valueOf(potentialChanges.size())));

    // How many have a note?
    summary.add(new AbstractMap.SimpleEntry<String, String>("Number of changes with annotations",
            String.valueOf(potentialChanges.stream().filter(ch -> ch.getNote().isPresent()).count())));

    // Calculate overall addition and deletion.

    // Summarize by types of change.
    Map<ChangeType, List<Change>> potentialChangesByType = potentialChanges.stream()
            .collect(Collectors.groupingBy(ch -> ch.getType()));
    summary.addAll(potentialChangesByType.keySet().stream().sorted()
            .map(type -> new AbstractMap.SimpleEntry<String, String>(
                    "Number of binomial changes of type '" + type + "'",
                    String.valueOf(potentialChangesByType.get(type).size())))
            .collect(Collectors.toList()));

    // Summarize by reason.
    Map<String, Long> potentialChangesByReason = potentialChanges.stream()
            .map(pc -> pc.getType() + " because of " + calculateReason(pc))
            .collect(Collectors.groupingBy(Function.identity(), Collectors.counting()));
    summary.addAll(potentialChangesByReason.keySet().stream().sorted()
            .map(reason -> new AbstractMap.SimpleEntry<String, String>(
                    "Number of binomial changes for reason '" + reason + "'",
                    potentialChangesByReason.get(reason).toString()))
            .collect(Collectors.toList()));

    // Make an additional data about it.
    Map<String, List<Map.Entry<String, String>>> map = new HashMap<>();
    map.put("Summary", summary);

    List<TableColumn<Map.Entry<String, String>, String>> cols = new ArrayList<>();

    TableColumn<Map.Entry<String, String>, String> colKey = new TableColumn<>("Property");
    colKey.setCellValueFactory(cdf -> new ReadOnlyStringWrapper(cdf.getValue().getKey()));
    cols.add(colKey);//www.j av  a  2  s  .  c om

    TableColumn<Map.Entry<String, String>, String> colValue = new TableColumn<>("Value");
    colValue.setCellValueFactory(cdf -> new ReadOnlyStringWrapper(cdf.getValue().getValue()));
    cols.add(colValue);

    TableColumn<Map.Entry<String, String>, String> colPercent = new TableColumn<>("Percentage");
    colPercent.setCellValueFactory(cdf -> {
        String result = "NA";

        if (cdf.getValue() != null && cdf.getValue().getValue() != null
                && !cdf.getValue().getValue().equals("null")) {
            long longVal = Long.parseLong(cdf.getValue().getValue());

            result = (longVal == 0) ? "NA" : (((double) longVal / numChanges * 100) + "%");
        }

        return new ReadOnlyStringWrapper(result);
    });
    cols.add(colPercent);

    return new AdditionalData<String, Entry<String, String>>("Summary", Arrays.asList("Summary"), map, cols);
}

From source file:com.diversityarrays.kdxplore.KDXploreFrame.java

private void doBackupDatabase() {

    switch (backupProviders.size()) {
    case 0://w w  w  . j a v a2s . co  m
        MsgBox.warn(this, Msg.MSG_NO_DB_BACKUP_APPS_AVAILABLE(), getTitle());
        return;

    case 1:
        backupProviders.get(0).doDatabaseBackup(this);
        break;

    default:
        Map<String, BackupProvider> bpByName = backupProviders.stream()
                .collect(Collectors.toMap(BackupProvider::getBackupProviderName, Function.identity()));

        String[] choices = backupProviders.stream().map(BackupProvider::getBackupProviderName)
                .collect(Collectors.toList()).toArray(new String[backupProviders.size()]);

        Object choice = JOptionPane.showInputDialog(this, Msg.MSG_SELECT_APP_FOR_BACKUP(),
                Msg.TITLE_BACKUP_DATABASE(), JOptionPane.QUESTION_MESSAGE, null, choices, choices[0]);

        if (choice != null) {
            BackupProvider bp = bpByName.get(choice);
            bp.doDatabaseBackup(this);
        }
        break;
    }
}

From source file:org.efaps.esjp.accounting.Period_Base.java

/**
 * Gets the label definition./*  www .j a v  a  2 s. co  m*/
 *
 * @param _parameter Parameter as passed by the eFaps API
 * @return the label definition
 * @throws EFapsException on error
 */
@SuppressWarnings("unchecked")
public Return getTargetDocInfo4PaymentFieldValue(final Parameter _parameter) throws EFapsException {
    final Return ret = new Return();
    final String key = Period.class.getName() + ".RequestKey4TargetDocInfo4PaymentFieldValue";
    final Map<Instance, String> values;
    if (Context.getThreadContext().containsRequestAttribute(key)) {
        values = (Map<Instance, String>) Context.getThreadContext().getRequestAttribute(key);
    } else {
        values = new HashMap<>();
        Context.getThreadContext().setRequestAttribute(key, values);
        final List<Instance> instances = (List<Instance>) _parameter.get(ParameterValues.REQUEST_INSTANCES);

        final MultiPrintQuery print = new MultiPrintQuery(instances);
        final SelectBuilder selTargetInsts = SelectBuilder.get().linkfrom(CISales.Payment.TargetDocument)
                .linkto(CISales.Payment.CreateDocument).instance();
        print.addSelect(selTargetInsts);
        print.execute();
        while (print.next()) {
            final List<String> labels = new ArrayList<>();
            final Object obj = print.getSelect(selTargetInsts);
            if (obj != null) {
                final List<Instance> targetInsts;
                if (obj instanceof Instance) {
                    targetInsts = new ArrayList<>();
                    targetInsts.add((Instance) obj);
                } else {
                    targetInsts = (List<Instance>) obj;
                }
                for (final Instance targetInst : targetInsts) {
                    final SelectBuilder selActName;
                    if (InstanceUtils.isType(targetInst, CISales.PaymentOrder)) {
                        selActName = SelectBuilder.get()
                                .linkfrom(CISales.ActionDefinitionPaymentOrder2Document.ToLinkAbstract)
                                .linkto(CISales.ActionDefinitionPaymentOrder2Document.FromLinkAbstract)
                                .attribute(CISales.ActionDefinitionPaymentOrder.Name);

                    } else if (InstanceUtils.isType(targetInst, CISales.CollectionOrder)) {
                        selActName = SelectBuilder.get()
                                .linkfrom(CISales.ActionDefinitionCollectionOrder2Document.ToLinkAbstract)
                                .linkto(CISales.ActionDefinitionCollectionOrder2Document.FromLinkAbstract)
                                .attribute(CISales.ActionDefinitionCollectionOrder.Name);

                    } else if (InstanceUtils.isType(targetInst, CISales.IncomingExchange)) {
                        selActName = SelectBuilder.get()
                                .linkfrom(CISales.ActionDefinitionIncomingExchange2Document.ToLinkAbstract)
                                .linkto(CISales.ActionDefinitionIncomingExchange2Document.FromLinkAbstract)
                                .attribute(CISales.ActionDefinitionIncomingExchange.Name);

                    } else {
                        selActName = null;
                    }

                    if (selActName != null) {
                        final PrintQuery print2 = new PrintQuery(targetInst);
                        print2.addSelect(selActName);
                        print2.execute();

                        final String actname = print2.getSelect(selActName);
                        if (actname == null) {
                            labels.add(targetInst.getType().getLabel());
                        } else {
                            labels.add(targetInst.getType().getLabel() + " - " + actname);
                        }
                    } else {
                        labels.add(targetInst.getType().getLabel());
                    }
                }
                final Map<String, Long> map = labels.stream()
                        .collect(Collectors.groupingBy(Function.identity(), Collectors.counting()));
                final StringBuilder bldr = new StringBuilder();
                for (final Entry<String, Long> entry : map.entrySet()) {
                    if (bldr.length() > 0) {
                        bldr.append(", ");
                    }
                    bldr.append(entry.getValue()).append(" x ").append(entry.getKey());
                }
                values.put(print.getCurrentInstance(), bldr.toString());
            }
        }
    }
    ret.put(ReturnValues.VALUES, values.get(_parameter.getInstance()));
    return ret;
}

From source file:org.openecomp.sdc.be.model.operations.impl.CapabilityInstanceOperation.java

/**
 * update capability property values// w w w.  ja va2 s.c o m
 * 
 * @param resourceInstanceId
 * @param propertyValues
 * @param capabilityId
 * @return
 */
@Override
public Either<List<PropertyValueData>, TitanOperationStatus> updateCapabilityPropertyValues(
        String resourceInstanceId, String capabilityId, List<ComponentInstanceProperty> propertyValues) {
    log.debug("Before updating property values of capability {} ofRI {}.", capabilityId, resourceInstanceId);
    TitanOperationStatus error = null;
    Map<String, Object> props = new HashMap<>();
    CapabilityInstData capabilityInstance = null;
    String capabilityInstanceId = null;
    Either<Boolean, TitanOperationStatus> deleteProperyValuesRes;

    CapabilityData overrideCapabilityData;
    CapabilityDefinition overrideCapabilityDefinition;
    Map<String, PropertyDefinition> defaultProperties = null;
    Either<ImmutablePair<CapabilityData, GraphEdge>, TitanOperationStatus> getCapabilityDataRes = null;
    Either<List<PropertyValueData>, TitanOperationStatus> addPropertyValuesRes = null;
    Either<CapabilityDefinition, TitanOperationStatus> getCapabilityDefinitionRes = null;

    log.debug("Before getting all capability instances of RI {}.", resourceInstanceId);
    props.put(GraphPropertiesDictionary.CAPABILITY_ID.getProperty(), capabilityId);
    Either<ImmutablePair<CapabilityInstData, GraphEdge>, TitanOperationStatus> getCapabilityInstancesRes = titanGenericDao
            .getChildByEdgeCriteria(UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.ResourceInstance),
                    resourceInstanceId, GraphEdgeLabels.CAPABILITY_INST, NodeTypeEnum.CapabilityInst,
                    CapabilityInstData.class, props);
    if (getCapabilityInstancesRes.isRight()) {
        error = getCapabilityInstancesRes.right().value();
        log.debug("Failed to retrieve capability Instances of capability {} ofRI {} statusIs {}.", capabilityId,
                resourceInstanceId, error);
    }
    log.debug("After getting all capability instances ofRI {} statusIs {}.", resourceInstanceId, error);
    if (error == null) {
        log.debug("Before deleting all capability instances ofRI {}.", resourceInstanceId);
        capabilityInstance = getCapabilityInstancesRes.left().value().getLeft();
        capabilityInstanceId = capabilityInstance.getUniqueId();
        deleteProperyValuesRes = deleteAllPropertyValuesOfCapabilityInstance(resourceInstanceId,
                capabilityInstanceId);
        if (deleteProperyValuesRes.isRight()) {
            error = deleteProperyValuesRes.right().value();
            log.debug("failedDeletePropertyValues {} forRI {} statusIs {}", capabilityInstanceId,
                    resourceInstanceId, statusIs, error);
        }
        log.debug("After deleting all capability instances ofRI {} statusIs {}.", resourceInstanceId, error);
    }
    if (error == null) {
        log.debug("Before getting capability {} ofRI {}.", capabilityId, resourceInstanceId);
        getCapabilityDataRes = titanGenericDao.getChild(
                UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.ResourceInstance), resourceInstanceId,
                GraphEdgeLabels.CALCULATED_CAPABILITY, NodeTypeEnum.Capability, CapabilityData.class);
        if (getCapabilityDataRes.isRight()) {
            error = getCapabilityDataRes.right().value();
            log.debug("Failed to get capability {} ofRI {} statusIs {}.", capabilityId, resourceInstanceId,
                    error);
        }
        log.debug("After getting capability {} ofRI {} statusIs {}.", capabilityId, resourceInstanceId, error);
    }
    if (error == null) {
        log.debug("Before getting capability definition for capability {} ofRI {}.", capabilityId,
                resourceInstanceId);
        overrideCapabilityData = getCapabilityDataRes.left().value().getLeft();
        getCapabilityDefinitionRes = capabilityOperation.getCapabilityByCapabilityData(overrideCapabilityData);
        if (getCapabilityDefinitionRes.isRight()) {
            error = getCapabilityDefinitionRes.right().value();
            log.debug("Failed to retrieve capability {} ofRI {} statusIs {}", capabilityId, resourceInstanceId,
                    error);
        }
        log.debug("After getting capability definition for capability {} ofRI {} statusIs {}.", capabilityId,
                resourceInstanceId, error);
    }
    if (error == null) {
        log.debug("Before validating capability properties of capability instance {} ofRI {}.",
                capabilityInstanceId, resourceInstanceId);
        overrideCapabilityDefinition = getCapabilityDefinitionRes.left().value();
        if (overrideCapabilityDefinition.getProperties() != null) {
            defaultProperties = overrideCapabilityDefinition.getProperties().stream()
                    .collect(Collectors.toMap(PropertyDefinition::getName, Function.identity()));
        }
        Either<Boolean, TitanOperationStatus> result = validateCapabilityInstanceProperties(defaultProperties,
                propertyValues);
        if (result.isRight()) {
            error = result.right().value();
            log.debug("failedAddProperties {} ofRI {} statusIs {}.", capabilityInstance.getUniqueId(),
                    resourceInstanceId, error);
        }
        log.debug("After validating capability properties of capability instance {} of RI {} statusIs {}.",
                capabilityInstanceId, resourceInstanceId, error);
    }
    if (error == null) {
        log.debug("Before adding property values toCI {} ofRI {}.", capabilityInstanceId, resourceInstanceId);
        addPropertyValuesRes = addPropertyValuesToCapabilityInstance(capabilityInstance, propertyValues,
                defaultProperties);
        if (addPropertyValuesRes.isRight()) {
            error = addPropertyValuesRes.right().value();
            log.debug("failedAddProperties {} ofRI {} statusIs {}.", capabilityInstance.getUniqueId(),
                    resourceInstanceId, error);
        }
        log.debug("Before adding property values toCI {} ofRI {}.", capabilityInstanceId, resourceInstanceId);
    }
    log.debug("After updating property values of capability {} ofRI {} statusIs {}.", capabilityId,
            resourceInstanceId, error);
    if (error == null) {
        return addPropertyValuesRes;
    }
    return Either.right(error);
}

From source file:org.jamocha.dn.compiler.pathblocks.PathBlocks.java

protected static void horizontalRecursion(final Block block, final Stack<Set<FilterInstance>> exclusionStack,
        final PathBlockSet resultBlocks) {
    // needed: the filters that are contained in every rule of the block, where for every
    // filter it is the case that: every rule contains at least one INSTANCE not already
    // excluded by the exclusion stack
    // thus: get the non-excluded filter instances
    final Set<FilterInstance> neighbours = block.getBorderConflicts().keySet().stream()
            .filter(fi -> !exclusionStack.stream().anyMatch(as -> as.contains(fi))).collect(toSet());
    if (neighbours.isEmpty()) {
        resultBlocks.addDuringHorizontalRecursion(block);
        return;/*w  w  w. j  a va2s .  co  m*/
    }
    // group them by their filter
    final Map<Filter, List<FilterInstance>> nFilterToInstances = neighbours.stream()
            .collect(groupingBy(FilterInstance::getFilter));
    // get all the rules in the block
    final Set<Either<Rule, ExistentialProxy>> bRules = block.getRulesOrProxies();
    // get a map from filter to all rules containing instances of that filter
    final Map<Filter, Set<Either<Rule, ExistentialProxy>>> nFilterToRulesContainingIt = nFilterToInstances
            .entrySet().stream().collect(toMap(Entry::getKey,
                    e -> e.getValue().stream().map(FilterInstance::getRuleOrProxy).collect(toSet())));
    // get the filters that are contained in every rule
    final Set<Filter> nRelevantFilters = nFilterToInstances.keySet().stream()
            .filter(f -> nFilterToRulesContainingIt.get(f).containsAll(bRules)).collect(toSet());
    // if no filters are left to add, the block is horizontally maximized, add it
    if (nRelevantFilters.isEmpty()) {
        resultBlocks.addDuringHorizontalRecursion(block);
        return;
    }
    // divide into filters without multiple instances and filters with multiple instances
    final List<Filter> nSingleCellFilters, nMultiCellFilters;
    {
        final Map<Boolean, List<Filter>> partition = nRelevantFilters.stream()
                .collect(partitioningBy(f -> nFilterToInstances.get(f).size() > bRules.size()));
        nSingleCellFilters = partition.get(Boolean.FALSE);
        nMultiCellFilters = partition.get(Boolean.TRUE);
    }
    // list of rule-filter-matchings that may be added
    final List<Map<Either<Rule, ExistentialProxy>, Set<FilterInstancesSideBySide>>> matchingFilters = new ArrayList<>();
    final List<Filter> incompatibleFilters = new ArrayList<>();

    // there is a 1 to 1 mapping from filter instances (side-by-side) to rules
    // for every filter INSTANCE, the conflicts have to be the same in all rules
    final Map<Either<Rule, ExistentialProxy>, Map<Filter, FilterInstancesSideBySide>> bRuleToFilterToBlockInstances = block
            .getFilterInstances().stream().collect(groupingBy(FilterInstancesSideBySide::getRuleOrProxy,
                    toMap(FilterInstancesSideBySide::getFilter, Function.identity())));
    // prefer singleCellFilters
    final List<Filter> bFilters = ImmutableList.copyOf(block.getFilters());
    findMatchingAndIncompatibleFilters(nFilterToInstances, bRules, nSingleCellFilters, bFilters,
            matchingFilters, incompatibleFilters, bRuleToFilterToBlockInstances);
    // if none matched, try multiCellFilters, otherwise defer them
    if (matchingFilters.isEmpty()) {
        findMatchingAndIncompatibleFilters(nFilterToInstances, bRules, nMultiCellFilters, bFilters,
                matchingFilters, incompatibleFilters, bRuleToFilterToBlockInstances);
        // if still none matched, the block is maximal, add it to the result blocks
        if (matchingFilters.isEmpty()) {
            resultBlocks.addDuringHorizontalRecursion(block);
            return;
        }
    }

    // create the next exclusion layer
    final Set<FilterInstance> furtherExcludes = new HashSet<>();
    // add it to the stack
    exclusionStack.push(furtherExcludes);
    // add all incompatible filter instances to the exclusion stack
    for (final Filter incompatibleFilter : incompatibleFilters) {
        for (final Either<Rule, ExistentialProxy> rule : bRules) {
            furtherExcludes.addAll(incompatibleFilter.getInstances(rule));
        }
    }
    // for every matching filter INSTANCE set, create a new block
    for (final Map<Either<Rule, ExistentialProxy>, Set<FilterInstancesSideBySide>> neighbourMap : matchingFilters) {
        final Block newBlock = new Block(block);
        newBlock.addFilterInstances(neighbourMap);
        // recurse for that block
        horizontalRecursion(newBlock, exclusionStack, resultBlocks);
        // after the recursion, exclude all filter instances just used
        for (final Set<FilterInstancesSideBySide> set : neighbourMap.values()) {
            for (final FilterInstancesSideBySide filterInstancesSideBySide : set) {
                for (final FilterInstance filterInstance : filterInstancesSideBySide.getInstances()) {
                    furtherExcludes.add(filterInstance);
                }
            }
        }
    }
    // eliminate top layer of the exclusion stack
    exclusionStack.pop();
}

From source file:org.jamocha.dn.compiler.pathblocks.PathBlocks.java

protected static void findMatchingAndIncompatibleFilters(
        final Map<Filter, List<FilterInstance>> nFilterToInstances,
        final Set<Either<Rule, ExistentialProxy>> bRules, final List<Filter> nFilters,
        final List<Filter> bFilters,
        final List<Map<Either<Rule, ExistentialProxy>, Set<FilterInstancesSideBySide>>> matchingFilters,
        final List<Filter> incompatibleFilters,
        final Map<Either<Rule, ExistentialProxy>, Map<Filter, FilterInstancesSideBySide>> bRuleToFilterToBlockInstances) {
    // iterate over every single-/multi-cell filter and check that its instances have the same
    // conflicts in every rule
    for (final Filter nFilter : nFilters) {
        boolean matchingConstellationFound = false;

        // iterate over the possible mappings: (filter,rule) -> filter INSTANCE
        final List<Set<FilterInstance>> nListOfRelevantFilterInstancesGroupedByRule = new ArrayList<>(
                nFilterToInstances.get(nFilter).stream()
                        .collect(groupingBy(FilterInstance::getRuleOrProxy, toSet())).values());
        // create the cartesian product
        final Set<List<FilterInstance>> nRelevantFilterInstanceCombinations = Sets
                .cartesianProduct(nListOfRelevantFilterInstancesGroupedByRule);
        // iterate over the possible filter INSTANCE combinations
        cartesianProductLoop: for (final List<FilterInstance> nCurrentOutsideFilterInstances : nRelevantFilterInstanceCombinations) {
            // list of conflicts for this filter INSTANCE combination
            final List<Conflict> conflicts = new ArrayList<>();
            // create a map for faster lookup: rule -> filter INSTANCE (outside)
            final Map<Either<Rule, ExistentialProxy>, FilterInstance> nRuleToCurrentOutsideFilterInstance = nCurrentOutsideFilterInstances
                    .stream().collect(toMap(FilterInstance::getRuleOrProxy, Function.identity()));
            if (bRules.size() > 1) {
                // iterate over the Rule-BlockFilterInstance-mappings
                for (final Entry<Either<Rule, ExistentialProxy>, Map<Filter, FilterInstancesSideBySide>> entry : bRuleToFilterToBlockInstances
                        .entrySet()) {//from w w  w  .  ja v a  2  s.c o m
                    int i = 0;
                    final Either<Rule, ExistentialProxy> rule = entry.getKey();
                    final Map<Filter, FilterInstancesSideBySide> bFilterToBlockInstances = entry.getValue();
                    // for every filter of the block check that the conflicts are the same as
                    // those of the first rule
                    for (final Filter bFilter : bFilters) {
                        // get the mapping from rule to filter INSTANCE for the current filter
                        final FilterInstancesSideBySide bSideBySide = bFilterToBlockInstances.get(bFilter);
                        // get the corresponding filter INSTANCE(s) that may be added
                        final FilterInstance nSource = nRuleToCurrentOutsideFilterInstance.get(rule);
                        // iterate over the filter instances
                        for (final FilterInstance bTarget : bSideBySide.getInstances()) {
                            // determine conflict between inside INSTANCE and outside INSTANCE
                            final Conflict conflict = nSource.getOrDetermineConflicts(bTarget);
                            // if this is the first loop iteration, just add the conflict to be
                            // compared later on
                            if (i >= conflicts.size()) {
                                conflicts.add(conflict);
                            } else if (!hasEqualConflicts(conflicts.get(i), conflict)) {
                                // if the conflicts don't match, continue with next filter
                                continue cartesianProductLoop;
                            }
                            ++i;
                        }
                    }
                }
            }
            // conflict identical for all rules
            matchingFilters.add(bRules.stream().collect(toMap(Function.identity(), rule -> Collections
                    .singleton(new FilterInstancesSideBySide(nRuleToCurrentOutsideFilterInstance.get(rule))))));
            matchingConstellationFound = true;
        }
        if (!matchingConstellationFound) {
            incompatibleFilters.add(nFilter);
        }
    }
}