Example usage for java.util.stream Collectors toSet

List of usage examples for java.util.stream Collectors toSet

Introduction

In this page you can find the example usage for java.util.stream Collectors toSet.

Prototype

public static <T> Collector<T, ?, Set<T>> toSet() 

Source Link

Document

Returns a Collector that accumulates the input elements into a new Set .

Usage

From source file:com.oneops.transistor.util.CloudUtil.java

private Set<String> getCloudServices(CmsRfcRelation rel) {
    return cmProcessor.getFromCIRelationsNaked(rel.getToCiId(), BASE_PROVIDES, null).stream()
            .filter(this::isService).map(cmsCIRelation -> cmsCIRelation.getAttribute("service").getDjValue())
            .collect(Collectors.toSet());
}

From source file:com.netflix.genie.web.data.services.jpa.JpaServiceUtils.java

/**
 * Convert the data in this job projection into a job DTO for external exposure.
 *
 * @return The job DTO representation/*from w ww.  j a v  a  2 s.  c om*/
 */
static Job toJobDto(final JobProjection jobProjection) {
    final Job.Builder builder = new Job.Builder(jobProjection.getName(), jobProjection.getUser(),
            jobProjection.getVersion()).withId(jobProjection.getUniqueId())
                    .withCreated(jobProjection.getCreated()).withUpdated(jobProjection.getUpdated())
                    .withTags(
                            jobProjection.getTags().stream().map(TagEntity::getTag).collect(Collectors.toSet()))
                    .withStatus(jobProjection.getStatus()).withCommandArgs(jobProjection.getCommandArgs());

    jobProjection.getDescription().ifPresent(builder::withDescription);
    jobProjection.getStatusMsg().ifPresent(builder::withStatusMsg);
    jobProjection.getStarted().ifPresent(builder::withStarted);
    jobProjection.getFinished().ifPresent(builder::withFinished);
    jobProjection.getArchiveLocation().ifPresent(builder::withArchiveLocation);
    jobProjection.getClusterName().ifPresent(builder::withClusterName);
    jobProjection.getCommandName().ifPresent(builder::withCommandName);
    jobProjection.getGrouping().ifPresent(builder::withGrouping);
    jobProjection.getGroupingInstance().ifPresent(builder::withGroupingInstance);
    setDtoMetadata(builder, jobProjection);

    return builder.build();
}

From source file:ijfx.ui.filter.DefaultNumberFilter.java

public void updateChart() {

    final double min; // minimum value
    final double max; // maximum value
    double range; // max - min
    final double binSize;
    int maximumBinNumber = 30;
    int finalBinNumber;

    int differentValuesCount = possibleValues.stream().filter(n -> Double.isFinite(n.doubleValue()))
            .collect(Collectors.toSet()).size();
    if (differentValuesCount < maximumBinNumber) {
        finalBinNumber = differentValuesCount;
    } else {/* w  ww .  j  a  v a  2s  . co m*/
        finalBinNumber = maximumBinNumber;
    }

    EmpiricalDistribution distribution = new EmpiricalDistribution(finalBinNumber);

    double[] values = possibleValues.parallelStream().filter(n -> Double.isFinite(n.doubleValue()))
            .mapToDouble(v -> v.doubleValue()).sorted().toArray();
    distribution.load(values);

    min = values[0];
    max = values[values.length - 1];
    range = max - min;
    binSize = range / (finalBinNumber - 1);

    XYChart.Series<Double, Double> serie = new XYChart.Series<>();
    ArrayList<Data<Double, Double>> data = new ArrayList<>();
    double k = min;
    for (SummaryStatistics st : distribution.getBinStats()) {
        data.add(new Data<>(k, new Double(st.getN())));
        k += binSize;
    }

    Platform.runLater(() -> {

        serie.getData().addAll(data);
        areaChart.getData().clear();

        areaChart.getData().add(serie);

        updateSlider(min, max, finalBinNumber);
    });
}

From source file:se.uu.it.cs.recsys.ruleminer.impl.FPGrowthImpl.java

/**
 *
 * @param singlePrefixPath, ordered single prefix path from a FP Tree
 * @return pairs of (frequent pattern, its support); returns empty map if
 * input is null or empty/*from ww  w. jav  a  2s  .c  o m*/
 */
public static Map<Set<Integer>, Integer> getFrequentPatternFromSinglePrefixPath(List<Item> singlePrefixPath) {
    if (singlePrefixPath == null || singlePrefixPath.isEmpty()) {
        LOGGER.warn("Nonsence to give null or empty input. Do you agree?");
        return Collections.EMPTY_MAP;
    }

    Set<Item> itemSetFromPath = new HashSet<>(singlePrefixPath);

    Set<Set<Item>> powerSet = Sets.powerSet(itemSetFromPath);

    Map<Set<Integer>, Integer> r = new HashMap<>();

    Util.removeEmptySet(powerSet).forEach(itemSet -> {
        int localMinSupport = FPTreeUtil.getMinSupport(itemSet);

        r.put(itemSet.stream().map(item -> item.getId()).collect(Collectors.toSet()), localMinSupport);
    });

    return r;
}

From source file:eu.itesla_project.modules.rules.CheckSecurityTool.java

@Override
public void run(CommandLine line) throws Exception {
    OfflineConfig config = OfflineConfig.load();
    Path caseFile = Paths.get(line.getOptionValue("case-file"));
    Objects.requireNonNull(caseFile);
    String rulesDbName = line.hasOption("rules-db-name") ? line.getOptionValue("rules-db-name")
            : OfflineConfig.DEFAULT_RULES_DB_NAME;
    RulesDbClientFactory rulesDbClientFactory = config.getRulesDbClientFactoryClass().newInstance();
    String workflowId = line.getOptionValue("workflow");
    RuleAttributeSet attributeSet = RuleAttributeSet.valueOf(line.getOptionValue("attribute-set"));
    double purityThreshold = line.hasOption("purity-threshold")
            ? Double.parseDouble(line.getOptionValue("purity-threshold"))
            : CheckSecurityCommand.DEFAULT_PURITY_THRESHOLD;
    Path outputCsvFile = null;//  w  w w  .  j  a v a2s. c  o m
    if (line.hasOption("output-csv-file")) {
        outputCsvFile = Paths.get(line.getOptionValue("output-csv-file"));
    }
    Set<SecurityIndexType> securityIndexTypes = line.hasOption("security-index-types")
            ? Arrays.stream(line.getOptionValue("security-index-types").split(","))
                    .map(SecurityIndexType::valueOf).collect(Collectors.toSet())
            : EnumSet.allOf(SecurityIndexType.class);
    final Set<String> contingencies = line.hasOption("contingencies")
            ? Arrays.stream(line.getOptionValue("contingencies").split(",")).collect(Collectors.toSet())
            : null;

    try (RulesDbClient rulesDb = rulesDbClientFactory.create(rulesDbName)) {

        if (Files.isRegularFile(caseFile)) {
            System.out.println("loading case " + caseFile + "...");
            // load the network
            Network network = Importers.loadNetwork(caseFile);
            if (network == null) {
                throw new RuntimeException("Case '" + caseFile + "' not found");
            }
            network.getStateManager().allowStateMultiThreadAccess(true);

            System.out.println("checking rules...");

            Map<String, Map<SecurityIndexType, SecurityRuleCheckStatus>> checkStatusPerContingency = SecurityRuleUtil
                    .checkRules(network, rulesDb, workflowId, attributeSet, securityIndexTypes, contingencies,
                            purityThreshold);

            if (outputCsvFile == null) {
                prettyPrint(checkStatusPerContingency, securityIndexTypes);
            } else {
                writeCsv(checkStatusPerContingency, securityIndexTypes, outputCsvFile);
            }
        } else if (Files.isDirectory(caseFile)) {
            if (outputCsvFile == null) {
                throw new RuntimeException(
                        "In case of multiple impact security checks, only output to csv file is supported");
            }
            Map<String, Map<SecurityIndexId, SecurityRuleCheckStatus>> checkStatusPerBaseCase = Collections
                    .synchronizedMap(new LinkedHashMap<>());
            Importers.loadNetworks(caseFile, true, network -> {
                try {
                    Map<String, Map<SecurityIndexType, SecurityRuleCheckStatus>> checkStatusPerContingency = SecurityRuleUtil
                            .checkRules(network, rulesDb, workflowId, attributeSet, securityIndexTypes,
                                    contingencies, purityThreshold);

                    Map<SecurityIndexId, SecurityRuleCheckStatus> checkStatusMap = new HashMap<>();
                    for (Map.Entry<String, Map<SecurityIndexType, SecurityRuleCheckStatus>> entry : checkStatusPerContingency
                            .entrySet()) {
                        String contingencyId = entry.getKey();
                        for (Map.Entry<SecurityIndexType, SecurityRuleCheckStatus> entry1 : entry.getValue()
                                .entrySet()) {
                            SecurityIndexType type = entry1.getKey();
                            SecurityRuleCheckStatus status = entry1.getValue();
                            checkStatusMap.put(new SecurityIndexId(contingencyId, type), status);
                        }
                    }

                    checkStatusPerBaseCase.put(network.getId(), checkStatusMap);
                } catch (Exception e) {
                    LOGGER.error(e.toString(), e);
                }
            }, dataSource -> System.out.println("loading case " + dataSource.getBaseName() + "..."));

            writeCsv2(checkStatusPerBaseCase, outputCsvFile);
        }
    }
}

From source file:com.netflix.spinnaker.clouddriver.ecs.provider.view.EcsServerClusterProvider.java

private Map<String, Set<EcsServerCluster>> findClustersForRegion(Map<String, Set<EcsServerCluster>> clusterMap,
        AmazonCredentials credentials, AmazonCredentials.AWSRegion awsRegion, String application) {

    Collection<Service> services = serviceCacheClient.getAll(credentials.getName(), awsRegion.getName());
    Collection<Task> allTasks = taskCacheClient.getAll(credentials.getName(), awsRegion.getName());

    for (Service service : services) {
        String applicationName = service.getApplicationName();
        String serviceName = service.getServiceName();

        if (application != null && !applicationName.equals(application)) {
            continue;
        }//from   www.  ja va 2s  .  co  m

        Set<LoadBalancer> loadBalancers = new HashSet<>(
                ecsLoadbalancerCacheClient.find(credentials.getName(), awsRegion.getName()));

        Set<Instance> instances = allTasks.stream()
                .filter(task -> task.getGroup().equals("service:" + serviceName))
                .map(task -> convertToEcsTask(credentials.getName(), awsRegion.getName(), serviceName, task))
                .collect(Collectors.toSet());

        String taskDefinitionKey = Keys.getTaskDefinitionKey(credentials.getName(), awsRegion.getName(),
                service.getTaskDefinition());
        com.amazonaws.services.ecs.model.TaskDefinition taskDefinition = taskDefinitionCacheClient
                .get(taskDefinitionKey);
        if (taskDefinition == null) {
            continue;
        }

        EcsServerGroup ecsServerGroup = buildEcsServerGroup(credentials.getName(), awsRegion.getName(),
                serviceName, service.getDesiredCount(), instances, service.getCreatedAt(),
                service.getClusterName(), taskDefinition);

        if (ecsServerGroup == null) {
            continue;
        }

        if (clusterMap.containsKey(applicationName)) {
            String escClusterName = StringUtils.substringBeforeLast(ecsServerGroup.getName(), "-");
            boolean found = false;

            for (EcsServerCluster cluster : clusterMap.get(applicationName)) {
                if (cluster.getName().equals(escClusterName)) {
                    cluster.getServerGroups().add(ecsServerGroup);
                    found = true;
                    break;
                }
            }

            if (!found) {
                EcsServerCluster spinnakerCluster = buildSpinnakerServerCluster(credentials, loadBalancers,
                        ecsServerGroup);
                clusterMap.get(applicationName).add(spinnakerCluster);
            }
        } else {
            EcsServerCluster spinnakerCluster = buildSpinnakerServerCluster(credentials, loadBalancers,
                    ecsServerGroup);
            clusterMap.put(applicationName, Sets.newHashSet(spinnakerCluster));
        }
    }

    return clusterMap;
}

From source file:org.openlmis.fulfillment.service.ExporterBuilder.java

/**
 * Fetch orderables for each line item of given order.
 * @param order related order// w w  w  .ja  v  a2  s .co  m
 * @return a list of orderable dtos
 */
public List<OrderableDto> getLineItemOrderables(Order order) {
    Set<UUID> ids = order.getOrderLineItems().stream().map(OrderLineItem::getOrderableId)
            .collect(Collectors.toSet());
    return products.findByIds(ids);
}

From source file:cc.kave.commons.pointsto.evaluation.ProjectTrainValidateEvaluation.java

private void evaluateType(ICoReTypeName type, List<ProjectUsageStore> usageStores) throws IOException {
    Set<ProjectIdentifier> projects = usageStores.stream().flatMap(store -> store.getProjects(type).stream())
            .collect(Collectors.toSet());

    if (projects.size() < numFolds) {
        ++skippedNumProjects;// w w  w .j  a  v a2  s  .  com
        return;
    }

    log("%s:\n", CoReNames.vm2srcQualifiedType(type));

    List<List<ProjectIdentifier>> projectFolds = createProjectFolds(projects, type, usageStores);
    List<EvaluationResult> localResults = new ArrayList<>(usageStores.size() * usageStores.size());
    for (ProjectUsageStore trainingStore : usageStores) {
        Map<ProjectIdentifier, List<Usage>> trainingUsages = loadUsages(trainingStore, type);

        for (ProjectUsageStore validationStore : usageStores) {
            // avoid unnecessary loading of usages
            Map<ProjectIdentifier, List<Usage>> validationUsages = (trainingStore == validationStore)
                    ? trainingUsages
                    : loadUsages(validationStore, type);
            ProjectTrainValidateSetProvider setProvider = new ProjectTrainValidateSetProvider(projectFolds,
                    trainingUsages, validationUsages);
            double score;
            try {
                score = cvEvaluator.evaluate(setProvider);
                localResults.add(new EvaluationResult(trainingStore.getName(), validationStore.getName(), score,
                        getNumberOfUsages(trainingUsages), getNumberOfUsages(validationUsages)));
            } catch (RuntimeException e) {
                if (e.getCause() instanceof EmptySetException) {
                    ++skippedUsageFilter;
                    return;
                } else {
                    throw e;
                }
            }
            log("\t%s-%s: %s=%.3f, Fold size deviation=%.1f\n", trainingStore.getName(),
                    validationStore.getName(), cvEvaluator.getMeasure().getClass().getSimpleName(), score,
                    setProvider.getAbsoluteFoldSizeDeviation());
        }
    }
    results.put(type, localResults);
}

From source file:com.chadekin.jadys.syntax.from.impl.FromClauseBuilderImpl.java

private Set<String> findRelatedAlias(Collection<String> aliasToSearch) {
    return joinStatements.entrySet().stream().filter(entry -> aliasToSearch.contains(entry.getKey()))
            .map(entry -> extractAlias(entry.getValue())).flatMap(entry -> entry.stream())
            .filter(alias -> joinStatements.keySet().contains(alias) && !aliasToSearch.contains(alias))
            .collect(Collectors.toSet());
}