Example usage for org.apache.commons.collections4 MultiValuedMap put

List of usage examples for org.apache.commons.collections4 MultiValuedMap put

Introduction

In this page you can find the example usage for org.apache.commons.collections4 MultiValuedMap put.

Prototype

boolean put(K key, V value);

Source Link

Document

Adds a key-value mapping to this multi-valued map.

Usage

From source file:com.o2o.util.WebUtils.java

/**
 * ?? /*from www.j av  a2s.co  m*/
 * @param url
 * @return
 */
public static String generateShortUrl(String url) {
    MultiValuedMap<String, Object> params = new ArrayListValuedHashMap<String, Object>();
    params.put("url", url);
    String jsonStr = HttpUtil.doPost("http://dwz.cn/create.php", params);

    JSONObject object = JSON.parseObject(jsonStr);
    String shortUrl = object.getString("tinyurl");
    if (StringUtils.isEmpty(shortUrl))
        return url;
    return shortUrl;
}

From source file:fr.cph.chicago.util.Util.java

@NonNull
public static MultiValuedMap<String, String> getFavoritesTrainParams(@NonNull final Context context) {
    final MultiValuedMap<String, String> paramsTrain = new ArrayListValuedHashMap<>();
    final List<Integer> favorites = Preferences.getTrainFavorites(context, App.PREFERENCE_FAVORITES_TRAIN);
    Stream.of(favorites).forEach(
            favorite -> paramsTrain.put(context.getString(R.string.request_map_id), favorite.toString()));
    return paramsTrain;
}

From source file:fr.cph.chicago.util.Util.java

@NonNull
public static MultiValuedMap<String, String> getFavoritesBusParams(@NonNull final Context context) {
    final MultiValuedMap<String, String> paramsBus = new ArrayListValuedHashMap<>();
    final List<String> busFavorites = Preferences.getBusFavorites(context, App.PREFERENCE_FAVORITES_BUS);
    Stream.of(busFavorites).map(Util::decodeBusFavorite).forEach(fav -> {
        paramsBus.put(context.getString(R.string.request_rt), fav[0]);
        paramsBus.put(context.getString(R.string.request_stop_id), fav[1]);
    });/*from  www. java2  s  . c om*/
    return paramsBus;
}

From source file:de.tud.inf.db.sparqlytics.repository.AbstractRepository.java

@Override
public MetricRegistry getStatistics() {
    MetricRegistry registry = new MetricRegistry();
    final Set<Cube> cubes = getCubes();
    registry.register("#cubes", new Gauge<Integer>() {
        @Override/*from  ww w. j a va 2 s.  c  o  m*/
        public Integer getValue() {
            return cubes.size();
        }
    });
    final Set<Dimension> allDimensions = getDimensions();
    registry.register("#dimensions", new Gauge<Integer>() {
        @Override
        public Integer getValue() {
            return allDimensions.size();
        }
    });
    final Set<Measure> allMeasures = getMeasures();
    registry.register("#measures", new Gauge<Integer>() {
        @Override
        public Integer getValue() {
            return allMeasures.size();
        }
    });
    Histogram levelsPerDimension = registry.register("levels/dimension",
            new Histogram(new SlidingWindowReservoir(cubes.size())));
    for (Dimension dimension : getDimensions()) {
        levelsPerDimension.update(dimension.getLevels().size() - 1);
    }
    Histogram dimensionsPerCube = registry.register("dimensions/cube",
            new Histogram(new SlidingWindowReservoir(cubes.size())));
    Histogram measuresPerCube = registry.register("measures/cube",
            new Histogram(new SlidingWindowReservoir(cubes.size())));
    MultiValuedMap<Dimension, Cube> dimensionMap = new ArrayListValuedHashMap<>();
    MultiValuedMap<Measure, Cube> measureMap = new ArrayListValuedHashMap<>();
    for (Cube cube : cubes) {
        Set<Dimension> dimensions = cube.getDimensions();
        dimensionsPerCube.update(dimensions.size());
        for (Dimension dimension : dimensions) {
            dimensionMap.put(dimension, cube);
        }
        Set<Measure> measures = cube.getMeasures();
        measuresPerCube.update(measures.size());
        for (Measure measure : measures) {
            measureMap.put(measure, cube);
        }
    }
    Histogram cubesPerDimension = registry.register("cubes/dimension",
            new Histogram(new SlidingWindowReservoir(cubes.size())));
    for (Collection<Cube> values : dimensionMap.asMap().values()) {
        cubesPerDimension.update(values.size());
    }
    Histogram cubesPerMeasure = registry.register("cubes/measure",
            new Histogram(new SlidingWindowReservoir(cubes.size())));
    for (Collection<Cube> values : measureMap.asMap().values()) {
        cubesPerMeasure.update(values.size());
    }
    return registry;
}

From source file:fr.cph.chicago.core.fragment.NearbyFragment.java

private SparseArray<TrainArrival> loadAroundTrainArrivals(@NonNull final List<Station> trainStations) {
    try {//from  w  w w.  j av  a 2 s  . c  o m
        final SparseArray<TrainArrival> trainArrivals = new SparseArray<>();
        if (isAdded()) {
            final CtaConnect cta = CtaConnect.getInstance(getContext());
            for (final Station station : trainStations) {
                final MultiValuedMap<String, String> reqParams = new ArrayListValuedHashMap<>(1, 1);
                reqParams.put(requestMapId, Integer.toString(station.getId()));
                final InputStream xmlRes = cta.connect(TRAIN_ARRIVALS, reqParams);
                final XmlParser xml = XmlParser.getInstance();
                final SparseArray<TrainArrival> temp = xml.parseArrivals(xmlRes,
                        DataHolder.getInstance().getTrainData());
                for (int j = 0; j < temp.size(); j++) {
                    trainArrivals.put(temp.keyAt(j), temp.valueAt(j));
                }
                trackWithGoogleAnalytics(activity, R.string.analytics_category_req,
                        R.string.analytics_action_get_train, TRAINS_ARRIVALS_URL, 0);
            }
        }
        return trainArrivals;
    } catch (final Throwable throwable) {
        throw Exceptions.propagate(throwable);
    }
}

From source file:fr.cph.chicago.core.fragment.NearbyFragment.java

private void loadAroundBusArrivals(@NonNull final BusStop busStop,
        @NonNull final SparseArray<Map<String, List<BusArrival>>> busArrivalsMap) {
    try {//  www  .j  a  va2  s  .  c o  m
        if (isAdded()) {
            final CtaConnect cta = CtaConnect.getInstance(getContext());
            int busStopId = busStop.getId();
            // Create
            final Map<String, List<BusArrival>> tempMap = busArrivalsMap.get(busStopId,
                    new ConcurrentHashMap<>());
            if (!tempMap.containsKey(Integer.toString(busStopId))) {
                busArrivalsMap.put(busStopId, tempMap);
            }

            final MultiValuedMap<String, String> reqParams = new ArrayListValuedHashMap<>(1, 1);
            reqParams.put(requestStopId, Integer.toString(busStopId));
            final InputStream is = cta.connect(BUS_ARRIVALS, reqParams);
            final XmlParser xml = XmlParser.getInstance();
            final List<BusArrival> busArrivals = xml.parseBusArrivals(is);
            for (final BusArrival busArrival : busArrivals) {
                final String direction = busArrival.getRouteDirection();
                if (tempMap.containsKey(direction)) {
                    final List<BusArrival> temp = tempMap.get(direction);
                    temp.add(busArrival);
                } else {
                    final List<BusArrival> temp = new ArrayList<>();
                    temp.add(busArrival);
                    tempMap.put(direction, temp);
                }
            }
            trackWithGoogleAnalytics(activity, R.string.analytics_category_req,
                    R.string.analytics_action_get_bus, BUSES_ARRIVAL_URL, 0);
        }
    } catch (final Throwable throwable) {
        throw Exceptions.propagate(throwable);
    }
}

From source file:com.evolveum.midpoint.task.quartzimpl.work.workers.WorkersManager.java

private MultiValuedMap<String, WorkerKey> createWorkerKeys(Task task,
        Map<WorkerKey, WorkerTasksPerNodeConfigurationType> perNodeConfigurationMap, OperationResult opResult)
        throws SchemaException {
    TaskWorkManagementType wsCfg = task.getWorkManagement();
    WorkersManagementType workersCfg = wsCfg.getWorkers();
    if (workersCfg == null) {
        throw new IllegalStateException("Workers configuration is missing: " + task);
    }//  w  w w  . ja v a2  s  .c om
    MultiValuedMap<String, WorkerKey> rv = new ArrayListValuedHashMap<>();
    for (WorkerTasksPerNodeConfigurationType perNodeConfig : getWorkersPerNode(workersCfg)) {
        for (String nodeIdentifier : getNodeIdentifiers(perNodeConfig, opResult)) {
            int count = defaultIfNull(perNodeConfig.getCount(), 1);
            for (int index = 1; index <= count; index++) {
                WorkerKey key = createWorkerKey(nodeIdentifier, index, perNodeConfig, workersCfg, task);
                rv.put(key.group, key);
                perNodeConfigurationMap.put(key, perNodeConfig);
            }
        }
    }
    return rv;
}

From source file:com.evolveum.midpoint.prism.schema.SchemaRegistryImpl.java

private void parsePrismSchemas(List<SchemaDescription> schemaDescriptions, boolean allowDelayedItemDefinitions)
        throws SchemaException {
    List<SchemaDescription> prismSchemaDescriptions = schemaDescriptions.stream()
            .filter(sd -> sd.isPrismSchema()).collect(Collectors.toList());
    Element schemaElement = DOMUtil.createElement(DOMUtil.XSD_SCHEMA_ELEMENT);
    schemaElement.setAttribute("targetNamespace", "http://dummy/");
    schemaElement.setAttribute("elementFormDefault", "qualified");

    // These fragmented namespaces should not be included in wrapper XSD because they are defined in multiple XSD files.
    // We have to process them one by one.
    MultiValuedMap<String, SchemaDescription> schemasByNamespace = new ArrayListValuedHashMap<>();
    prismSchemaDescriptions.forEach(sd -> schemasByNamespace.put(sd.getNamespace(), sd));
    List<String> fragmentedNamespaces = schemasByNamespace.keySet().stream()
            .filter(ns -> schemasByNamespace.get(ns).size() > 1).collect(Collectors.toList());
    LOGGER.trace("Fragmented namespaces: {}", fragmentedNamespaces);

    List<SchemaDescription> wrappedDescriptions = new ArrayList<>();
    for (SchemaDescription description : prismSchemaDescriptions) {
        String namespace = description.getNamespace();
        if (!fragmentedNamespaces.contains(namespace)) {
            Element importElement = DOMUtil.createSubElement(schemaElement, DOMUtil.XSD_IMPORT_ELEMENT);
            importElement.setAttribute(DOMUtil.XSD_ATTR_NAMESPACE.getLocalPart(), namespace);
            description.setSchema(new PrismSchemaImpl(prismContext));
            wrappedDescriptions.add(description);
        }/*from   w  w  w  .j a v  a2s .c o  m*/
    }
    if (LOGGER.isTraceEnabled()) {
        String xml = DOMUtil.serializeDOMToString(schemaElement);
        LOGGER.trace("Wrapper XSD:\n{}", xml);
    }

    long started = System.currentTimeMillis();
    LOGGER.trace("Parsing {} schemas wrapped in single XSD", wrappedDescriptions.size());
    PrismSchemaImpl.parseSchemas(schemaElement, entityResolver, wrappedDescriptions,
            allowDelayedItemDefinitions, getPrismContext());
    LOGGER.trace("Parsed {} schemas in {} ms", wrappedDescriptions.size(),
            System.currentTimeMillis() - started);

    for (SchemaDescription description : wrappedDescriptions) {
        detectExtensionSchema(description.getSchema());
    }

    for (String namespace : fragmentedNamespaces) {
        Collection<SchemaDescription> fragments = schemasByNamespace.get(namespace);
        LOGGER.trace("Parsing {} schemas for fragmented namespace {}", fragments.size(), namespace);
        for (SchemaDescription schemaDescription : fragments) {
            parsePrismSchema(schemaDescription, allowDelayedItemDefinitions);
        }
    }
}

From source file:org.finra.herd.dao.impl.HerdDaoImpl.java

/**
 * {@inheritDoc}// w ww . j  a  va  2  s .c om
 */
@Override
public MultiValuedMap<Integer, String> getStorageFilePathsByStorageUnits(
        List<StorageUnitEntity> storageUnitEntities) {
    // Create a map that can hold a collection of values against each key.
    MultiValuedMap<Integer, String> result = new ArrayListValuedHashMap<>();

    // Retrieve the pagination size for the storage file paths query configured in the system.
    Integer paginationSize = configurationHelper
            .getProperty(ConfigurationValue.STORAGE_FILE_PATHS_QUERY_PAGINATION_SIZE, Integer.class);

    // Create the criteria builder and the criteria.
    CriteriaBuilder builder = entityManager.getCriteriaBuilder();
    CriteriaQuery<Tuple> criteria = builder.createTupleQuery();

    // The criteria root is the storage file.
    Root<StorageFileEntity> storageFileEntity = criteria.from(StorageFileEntity.class);

    // Join to the other tables we can filter on.
    Join<StorageFileEntity, StorageUnitEntity> storageUnitEntity = storageFileEntity
            .join(StorageFileEntity_.storageUnit);

    // Get the columns.
    Path<Integer> storageUnitIdColumn = storageUnitEntity.get(StorageUnitEntity_.id);
    Path<String> storageFilePathColumn = storageFileEntity.get(StorageFileEntity_.path);

    // Create the standard restrictions (i.e. the standard where clauses).
    Predicate queryRestriction = getPredicateForInClause(builder, storageUnitEntity, storageUnitEntities);

    // Add the select clause.
    criteria.multiselect(storageUnitIdColumn, storageFilePathColumn);

    // Add the where clause.
    criteria.where(queryRestriction);

    // Execute the query using pagination and populate the result map.
    int startPosition = 0;
    while (true) {
        // Run the query to get a list of tuples back.
        List<Tuple> tuples = entityManager.createQuery(criteria).setFirstResult(startPosition)
                .setMaxResults(paginationSize).getResultList();

        // Populate the result map from the returned tuples (i.e. 1 tuple for each row).
        for (Tuple tuple : tuples) {
            // Extract the tuple values.
            Integer storageUnitId = tuple.get(storageUnitIdColumn);
            String storageFilePath = tuple.get(storageFilePathColumn);

            // Update the result map.
            result.put(storageUnitId, storageFilePath);
        }

        // Break out of the while loop if we got less results than the pagination size.
        if (tuples.size() < paginationSize) {
            break;
        }

        // Increment the start position.
        startPosition += paginationSize;
    }

    return result;
}

From source file:org.finra.herd.dao.impl.StorageFileDaoImpl.java

@Override
public MultiValuedMap<Integer, String> getStorageFilePathsByStorageUnitIds(List<Integer> storageUnitIds) {
    // Create a map that can hold a collection of values against each key.
    MultiValuedMap<Integer, String> result = new ArrayListValuedHashMap<>();

    // Retrieve the pagination size for the storage file paths query configured in the system.
    Integer paginationSize = configurationHelper
            .getProperty(ConfigurationValue.STORAGE_FILE_PATHS_QUERY_PAGINATION_SIZE, Integer.class);

    // Create the criteria builder and the criteria.
    CriteriaBuilder builder = entityManager.getCriteriaBuilder();
    CriteriaQuery<Tuple> criteria = builder.createTupleQuery();

    // The criteria root is the storage file.
    Root<StorageFileEntity> storageFileEntity = criteria.from(StorageFileEntity.class);

    // Get the columns.
    Path<Integer> storageUnitIdColumn = storageFileEntity.get(StorageFileEntity_.storageUnitId);
    Path<String> storageFilePathColumn = storageFileEntity.get(StorageFileEntity_.path);

    // Create the standard restrictions (i.e. the standard where clauses).
    Predicate queryRestriction = getPredicateForInClause(builder, storageUnitIdColumn, storageUnitIds);

    // Add the select clause.
    criteria.multiselect(storageUnitIdColumn, storageFilePathColumn);

    // Add the where clause.
    criteria.where(queryRestriction);/* ww w.j ava2  s  .co  m*/

    // Execute the query using pagination and populate the result map.
    int startPosition = 0;
    while (true) {
        // Run the query to get a list of tuples back.
        List<Tuple> tuples = entityManager.createQuery(criteria).setFirstResult(startPosition)
                .setMaxResults(paginationSize).getResultList();

        // Populate the result map from the returned tuples (i.e. 1 tuple for each row).
        for (Tuple tuple : tuples) {
            // Extract the tuple values.
            Integer storageUnitId = tuple.get(storageUnitIdColumn);
            String storageFilePath = tuple.get(storageFilePathColumn);

            // Update the result map.
            result.put(storageUnitId, storageFilePath);
        }

        // Break out of the while loop if we got less results than the pagination size.
        if (tuples.size() < paginationSize) {
            break;
        }

        // Increment the start position.
        startPosition += paginationSize;
    }

    return result;
}