Example usage for java.util.stream Collectors toConcurrentMap

List of usage examples for java.util.stream Collectors toConcurrentMap

Introduction

In this page you can find the example usage for java.util.stream Collectors toConcurrentMap.

Prototype

public static <T, K, U> Collector<T, ?, ConcurrentMap<K, U>> toConcurrentMap(
        Function<? super T, ? extends K> keyMapper, Function<? super T, ? extends U> valueMapper) 

Source Link

Document

Returns a concurrent Collector that accumulates elements into a ConcurrentMap whose keys and values are the result of applying the provided mapping functions to the input elements.

Usage

From source file:Main.java

public static void main(String[] args) {
    Map<Long, String> idToNameMap = Employee.persons().stream()
            .collect(Collectors.toConcurrentMap(Employee::getId, Employee::getName));
    System.out.println(idToNameMap);
}

From source file:org.obiba.mica.micaConfig.service.helper.OpalServiceHelper.java

@Cacheable(value = "opal-taxonomies", key = "#opalJavaClient.newUri().build()") //opal root url as key
public Map<String, Taxonomy> getTaxonomies(OpalJavaClient opalJavaClient) {
    log.info("Fetching opal taxonomies");
    URI uri = opalJavaClient.newUri().segment("system", "conf", "taxonomies").build();
    List<Opal.TaxonomyDto> taxonomies = opalJavaClient.getResources(Opal.TaxonomyDto.class, uri,
            Opal.TaxonomyDto.newBuilder());

    ConcurrentMap<String, Taxonomy> taxonomiesList = taxonomies.stream()
            .collect(Collectors.toConcurrentMap(Opal.TaxonomyDto::getName, this::fromDto));
    eventBus.post(new OpalTaxonomiesUpdatedEvent(taxonomiesList));
    return taxonomiesList;
}

From source file:org.lightjason.examples.pokemon.simulation.agent.pokemon.CLevel.java

/**
 * generates of a distribution map an set of values
 *
 * @param p_map distribution map//from  w ww.  j a  v a  2  s .c  o  m
 * @return value map
 *
 * @tparam T key type
 */
private static <T> Map<T, Number> generate(
        final Map<T, ImmutableTriple<AbstractRealDistribution, Number, Number>> p_map) {
    return p_map.entrySet().parallelStream()
            .collect(Collectors.toConcurrentMap(Map.Entry::getKey,
                    i -> Math.min(
                            Math.max(i.getValue().getLeft().sample(), i.getValue().getMiddle().doubleValue()),
                            i.getValue().getRight().doubleValue())));
}

From source file:org.lightjason.examples.pokemon.simulation.agent.pokemon.CPokemon.java

/**
 * ctor/*from  w ww . j a v a 2s . co m*/
 * @param p_environment environment
 * @param p_agentconfiguration agent configuration
 * @param p_position initialize position
 * @param p_pokemon pokemon name
 */
@SuppressWarnings("unchecked")
public CPokemon(final IEnvironment p_environment, final IAgentConfiguration<IAgent> p_agentconfiguration,
        final DoubleMatrix1D p_position, final String p_pokemon) {
    super(p_environment, p_agentconfiguration, p_position);

    if (p_pokemon.isEmpty())
        throw new RuntimeException("pokemon name need not to be empty");

    m_pokemon = p_pokemon;
    final CLevel l_level = CDefinition.INSTANCE.tupel(m_pokemon, m_level.get());

    m_experiencemaximum = CDefinition.INSTANCE.experience(m_pokemon);
    m_levelexperience = m_experiencemaximum.divide(BigInteger.valueOf(CDefinition.INSTANCE.level(p_pokemon)));
    m_ethnic = l_level.ethnic();
    m_motivation = l_level.motivation();
    m_attack = l_level.attack().stream().collect(Collectors.toConcurrentMap(CAttack::name, i -> i));
    m_attribute = l_level.attribute().entrySet().stream().collect(Collectors.toConcurrentMap(
            i -> i.getKey().name(), i -> new MutablePair<>(i.getKey().access(), i.getValue())));

    m_beliefbase.add(new CEthnicBeliefbase().create("ethnic", m_beliefbase))
            .add(new CAttributeBeliefbase().create("attribute", m_beliefbase))
            .add(new CMotivationBeliefbase().create("motivation", m_beliefbase))
            .add(new CAttackBeliefbase().create("attack", m_beliefbase))
            .add(new CEnvironmentBeliefbase().create("env", m_beliefbase));

    m_socialforcepotential = new CPotential();
    m_socialforcepotentialrating = new CRating();
    m_socialforcemetric = (i) -> SOCIALFORCEMETRIC.apply(this.attribute(), i.attribute());
}

From source file:org.phoenicis.repository.types.MultipleRepository.java

@Override
public RepositoryDTO fetchInstallableApplications() {
    LOGGER.info(String.format("Fetching applications for: %s", this.toString()));

    /*//from ww w.j a  v  a 2  s .  c om
     * This step is needed because we need a mapping between the CategoryDTO
     * list and its application source, to preserve the order in the
     * reduction step
     */
    final Map<Repository, RepositoryDTO> repositoriesMap = this.repositories.stream().parallel()
            .collect(Collectors.toConcurrentMap(source -> source, Repository::fetchInstallableApplications));

    return mergeRepositories(repositoriesMap, repositories);
}

From source file:se.idsecurity.ldifcompare.LdifCompare.java

/**
 * Compare LDIF entries using DN, if the DN is the same in both entries then the entries should be compared
 * @param source Entries from the "left" file
 * @param target Entries from the "right" file
 * @param diffFile Write results to this file
 * @param comment Comment to write to the file
 * @throws FileNotFoundException /*from  w w w. j a v a 2  s  . co m*/
 * @since 1.2
 */
private void getDiffUsingDN(Set<Entry> source, Set<Entry> target, File diffFile, String comment)
        throws FileNotFoundException {
    StopWatch sw = new StopWatch();
    sw.start();

    ConcurrentMap<String, Entry> collect = source.parallelStream()
            .collect(Collectors.toConcurrentMap(Entry::getDN, Function.identity()));
    sw.stop();
    logger.error("Set -> ConcurrentMap: " + sw.toString());
    sw.reset();

    try (PrintWriter writer = new PrintWriter(diffFile)) {

        writer.println(comment);

        sw.start();
        for (Entry targetEntry : target) {
            String dn = targetEntry.getDN();
            Entry sEntry = collect.get(dn);
            if (sEntry != null) {
                writer.println();
                writer.println(dn);
                List<Modification> diff = Entry.diff(sEntry, targetEntry, false);
                for (Modification mod : diff) {
                    writer.println(mod);
                }
            }

        }
    }
    sw.stop();
    logger.error("Time taken to loop inside getDiffUsingDN: " + sw.toString());

}

From source file:se.idsecurity.ldifcompare.LdifCompare.java

private void getUniqueEntriesUsingDN(Set<Entry> source, Set<Entry> target, LDIFWriter ldifWriterUnique) {
    Runnable r = () -> {//from  w ww  .j  a  v a  2  s .  c  o  m
        StopWatch sw = new StopWatch();
        sw.start();
        ConcurrentMap<String, Entry> targetMap = target.parallelStream()
                .collect(Collectors.toConcurrentMap(Entry::getDN, Function.identity()));

        for (Entry e : source) {
            //Get unique entries from the rightLdif file based on DN - no entry with the same DN exist in leftLdif
            String dn = e.getDN();
            if (!targetMap.containsKey(dn)) {
                try {
                    ldifWriterUnique.writeEntry(e);//Entry only exists in rightLdif
                } catch (IOException ex) {
                    logger.error("Error writing to LDIF file", ex);
                }
            }
        }
        fileWriteCdl.countDown();

        sw.stop();
        logger.error("Time taken to process getUniqueEntriesUsingDN(): " + sw.toString());
        sw.reset();
    };

    exec.execute(r);

}