Example usage for com.google.common.collect SetMultimap asMap

List of usage examples for com.google.common.collect SetMultimap asMap

Introduction

In this page you can find the example usage for com.google.common.collect SetMultimap asMap.

Prototype

@Override
Map<K, Collection<V>> asMap();

Source Link

Document

Note: The returned map's values are guaranteed to be of type Set .

Usage

From source file:org.eclipse.rdf4j.sail.lucene.AbstractSearchIndex.java

/**
 * Add a complete Lucene Document based on these statements. Do not search for an existing document with
 * the same subject id. (assume the existing document was deleted)
 * // www . j  a va 2 s.co m
 * @param statements
 *        the statements that make up the resource
 * @throws IOException
 */
@Override
public final synchronized void addDocuments(Resource subject, List<Statement> statements) throws IOException {

    String resourceId = SearchFields.getResourceID(subject);

    SetMultimap<String, Statement> stmtsByContextId = HashMultimap.create();

    String contextId;
    for (Statement statement : statements) {
        contextId = SearchFields.getContextID(statement.getContext());

        stmtsByContextId.put(contextId, statement);
    }

    BulkUpdater batch = newBulkUpdate();
    for (Entry<String, Collection<Statement>> entry : stmtsByContextId.asMap().entrySet()) {
        // create a new document
        String id = SearchFields.formIdString(resourceId, entry.getKey());
        SearchDocument document = newDocument(id, resourceId, entry.getKey());

        for (Statement stmt : entry.getValue()) {
            // determine stuff to store
            addProperty(stmt, document);
        }
        // add it to the index
        batch.add(document);
    }
    batch.end();
}

From source file:org.geogig.web.functional.WebAPICucumberHooks.java

/**
 * Checks that the repository named {@code repositoryName}, at it's commit {@code headRef}, has
 * the expected features as given by the {@code expectedFeatures} {@link DataTable}.
 * <p>/*from   ww  w  .  j a  v a  2s .c om*/
 * The {@code DataTable} top cells represent feature tree paths, and their cells beneath each
 * feature tree path, the feature ids expected for each layer.
 * <p>
 * A {@code question mark} indicates a wild card feature where the feature id may not be known.
 * <p>
 * Example:
 * 
 * <pre>
 * <code>
 *     |  Points   |  Lines   |  Polygons   | 
 *     |  Points.1 |  Lines.1 |  Polygons.1 | 
 *     |  Points.2 |  Lines.2 |  Polygons.2 | 
 *     |  ?        |          |             |
 *</code>
 * </pre>
 * 
 * @param repositoryName
 * @param headRef
 * @param expectedFeatures
 * @throws Throwable
 */
@Then("^the ([^\"]*) repository's \"([^\"]*)\" in the (@[^\"]*) transaction should have the following features:$")
public void verifyRepositoryContentsTx(String repositoryName, String headRef, String txId,
        DataTable expectedFeatures) throws Throwable {

    SetMultimap<String, String> expected = HashMultimap.create();
    {
        List<Map<String, String>> asMaps = expectedFeatures.asMaps(String.class, String.class);
        for (Map<String, String> featureMap : asMaps) {
            for (Entry<String, String> entry : featureMap.entrySet()) {
                if (entry.getValue().length() > 0) {
                    expected.put(entry.getKey(), context.replaceVariables(entry.getValue()));
                }
            }
        }
    }

    SetMultimap<String, String> actual = context.listRepo(repositoryName, headRef, txId);

    Map<String, Collection<String>> actualMap = actual.asMap();
    Map<String, Collection<String>> expectedMap = expected.asMap();

    for (String featureType : actualMap.keySet()) {
        assertTrue(expectedMap.containsKey(featureType));
        Collection<String> actualFeatureCollection = actualMap.get(featureType);
        Collection<String> expectedFeatureCollection = expectedMap.get(featureType);
        for (String actualFeature : actualFeatureCollection) {
            if (expectedFeatureCollection.contains(actualFeature)) {
                expectedFeatureCollection.remove(actualFeature);
            } else if (expectedFeatureCollection.contains("?")) {
                expectedFeatureCollection.remove("?");
            } else {
                fail();
            }
        }
        assertEquals(0, expectedFeatureCollection.size());
        expectedMap.remove(featureType);
    }
    assertEquals(0, expectedMap.size());

}

From source file:com.eucalyptus.auth.policy.PolicyParser.java

/**
 * The algorithm of decomposing the actions and resources of a statement into authorizations:
 * 1. Group actions into different vendors.
 * 2. Group resources into different resource types.
 * 3. Permute all combinations of action groups and resource groups, matching them by the same
 *    vendors.//from   www.  j  a v a  2s .  c o  m
 */
private List<AuthorizationEntity> decomposeStatement(final String effect, final String actionElement,
        final List<String> actions, final String resourceElement, final List<String> resources) {
    // Group actions by vendor
    final SetMultimap<String, String> actionMap = HashMultimap.create();
    for (String action : actions) {
        action = normalize(action);
        final String vendor = checkAction(action);
        actionMap.put(vendor, action);
    }
    // Group resources by type, key is a pair of (optional) account + resource type
    final SetMultimap<Pair<Optional<String>, String>, String> resourceMap = HashMultimap.create();
    for (final String resource : resources) {
        final Ern ern = Ern.parse(resource);
        resourceMap.put(Pair.lopair(Strings.emptyToNull(ern.getNamespace()), ern.getResourceType()),
                ern.getResourceName());
    }
    final boolean notAction = PolicySpec.NOTACTION.equals(actionElement);
    final boolean notResource = PolicySpec.NOTRESOURCE.equals(resourceElement);
    // Permute action and resource groups and construct authorizations.
    final List<AuthorizationEntity> results = Lists.newArrayList();
    for (final Map.Entry<String, Collection<String>> actionSetEntry : actionMap.asMap().entrySet()) {
        final String vendor = actionSetEntry.getKey();
        final Set<String> actionSet = (Set<String>) actionSetEntry.getValue();
        boolean added = false;
        for (final Map.Entry<Pair<Optional<String>, String>, Collection<String>> resourceSetEntry : resourceMap
                .asMap().entrySet()) {
            final Optional<String> accountIdOrName = resourceSetEntry.getKey().getLeft();
            final String type = resourceSetEntry.getKey().getRight();
            final Set<String> resourceSet = (Set<String>) resourceSetEntry.getValue();
            if (PolicySpec.ALL_ACTION.equals(vendor) || PolicySpec.ALL_RESOURCE.equals(type)
                    || PolicySpec.isPermittedResourceVendor(vendor, PolicySpec.vendor(type))) {
                results.add(new AuthorizationEntity(EffectType.valueOf(effect), accountIdOrName.orNull(), type,
                        actionSet, notAction, resourceSet, notResource));
                added = true;
            }
        }
        if (!added) {
            results.add(new AuthorizationEntity(EffectType.valueOf(effect), actionSet, notAction));
        }
    }
    return results;
}

From source file:org.sosy_lab.cpachecker.cpa.predicate.persistence.PredicateMapWriter.java

public void writePredicateMap(
        SetMultimap<Pair<CFANode, Integer>, AbstractionPredicate> locationInstancePredicates,
        SetMultimap<CFANode, AbstractionPredicate> localPredicates,
        SetMultimap<String, AbstractionPredicate> functionPredicates,
        Set<AbstractionPredicate> globalPredicates, Collection<AbstractionPredicate> allPredicates,
        Appendable sb) throws IOException {

    // In this set, we collect the definitions and declarations necessary
    // for the predicates (e.g., for variables)
    // The order of the definitions is important!
    Set<String> definitions = Sets.newLinkedHashSet();

    // in this set, we collect the string representing each predicate
    // (potentially making use of the above definitions)
    Map<AbstractionPredicate, String> predToString = Maps.newHashMap();

    // fill the above set and map
    for (AbstractionPredicate pred : allPredicates) {
        String predString;/*w  ww .j  av  a  2 s. c  o m*/

        if (format == PredicateDumpFormat.SMTLIB2) {
            Pair<String, List<String>> p = splitFormula(fmgr, pred.getSymbolicAtom());
            predString = p.getFirst();
            definitions.addAll(p.getSecond());
        } else {
            predString = pred.getSymbolicAtom().toString();
        }

        predToString.put(pred, predString);
    }

    LINE_JOINER.appendTo(sb, definitions);
    sb.append("\n\n");

    writeSetOfPredicates(sb, "*", globalPredicates, predToString);

    for (Entry<String, Collection<AbstractionPredicate>> e : functionPredicates.asMap().entrySet()) {
        writeSetOfPredicates(sb, e.getKey(), e.getValue(), predToString);
    }

    for (Entry<CFANode, Collection<AbstractionPredicate>> e : localPredicates.asMap().entrySet()) {
        String key = e.getKey().getFunctionName() + " " + e.getKey().toString();
        writeSetOfPredicates(sb, key, e.getValue(), predToString);
    }

    for (Entry<Pair<CFANode, Integer>, Collection<AbstractionPredicate>> e : locationInstancePredicates.asMap()
            .entrySet()) {
        CFANode loc = e.getKey().getFirst();
        String key = loc.getFunctionName() + " " + loc.toString() + "@" + e.getKey().getSecond();
        writeSetOfPredicates(sb, key, e.getValue(), predToString);
    }
}

From source file:edu.buaa.satla.analysis.core.predicate.PredicateCPAStatistics.java

private void exportPredmapToFile(Path targetFile, MutablePredicateSets predicates) {
    Preconditions.checkNotNull(targetFile);
    Preconditions.checkNotNull(predicates);

    Set<AbstractionPredicate> allPredicates = Sets.newHashSet(predicates.global);
    allPredicates.addAll(predicates.function.values());
    allPredicates.addAll(predicates.location.values());
    allPredicates.addAll(predicates.locationInstance.values());

    try (Writer w = Files.openOutputFile(targetFile)) {
        precisionWriter.writePredicateMap(predicates.locationInstance, predicates.location, predicates.function,
                predicates.global, allPredicates, w);

        /**/*w  w  w  .  j  a va  2 s  . c o m*/
         * XXX
         *
         */
        try {
            SetMultimap<CFANode, AbstractionPredicate> localPredicates = predicates.location;
            FormulaManagerView fmgrv = cpa.getFormulaManager();
            for (AbstractionPredicate pred : localPredicates.values()) {
                Pair<String, List<String>> p = splitFormula(fmgrv, pred.getSymbolicAtom());
                String predString = p.getFirst();// ?

            }

            PathFormulaManager pfmgr = cpa.getPathFormulaManager();

            PredicateAbstractionManager pmgr = cpa.getPredicateManager();
            RegionCreator rmgr = pmgr.getRegionCreator();
            final NestedTimer abstractionEnumTime = new NestedTimer(); // outer: solver time, inner: bdd time
            final Timer abstractionSolveTime = new Timer(); // only the time for solving, not for model enumeration

            // ??precisionWriter.writePredicateMap?predMap
            for (Entry<CFANode, Collection<AbstractionPredicate>> e : localPredicates.asMap().entrySet()) {
                CFANode n = e.getKey();
                Collection<AbstractionPredicate> nodePredicates = e.getValue();

                ProverEnvironment thmProver = pmgr.getProverEnvironment();
                // ????
                BooleanFormulaManagerView bfmgr = fmgrv.getBooleanFormulaManager();
                BooleanFormula predDef = bfmgr.makeBoolean(true);
                List<BooleanFormula> predVars = new ArrayList<>(nodePredicates.size());
                //          List<BooleanFormula> booleanFormulas = new ArrayList<>();
                for (AbstractionPredicate pred : nodePredicates) {
                    // get propositional variable and definition of predicate
                    BooleanFormula var = pred.getSymbolicVariable();
                    BooleanFormula def = pred.getSymbolicAtom();
                    //            booleanFormulas.add(pred.getSymbolicAtom());

                    assert !bfmgr.isFalse(def);
                    //            def = fmgr.instantiate(def, ssa);
                    // build the formula (var <-> def) and add it to the list of definitions
                    BooleanFormula equiv = bfmgr.equivalence(var, def);
                    predDef = bfmgr.and(predDef, equiv);
                    predVars.add(var);
                }

                for (CFAEdge edge : CFAUtils.allLeavingEdges(n)) {
                    List<CFAEdge> edges = new ArrayList<>();
                    edges.add(edge);
                    PathFormula pf = pfmgr.makeFormulaForPath(edges);
                    predDef = bfmgr.and(predDef, pf.getFormula());
                    //            booleanFormulas.add(pf.getFormula());
                    String pfStr = pf.toString();

                    thmProver.push(predDef);
                    AllSatResult satResult = thmProver.allSat(predVars, rmgr, abstractionSolveTime,
                            abstractionEnumTime);
                    int cnt = satResult.getCount();
                }
            }
        } catch (Exception e) {

        }
        // 
        //      try {
        //        PathFormulaManager pfmgr = cpa.getPathFormulaManager();
        //        CFA cfa = GlobalInfo.getInstance().getCFAInfo().get().getCFA();
        //
        //
        //        for (FunctionEntryNode fnode : cfa.getAllFunctionHeads()) {
        //          CFAEdge edge = fnode.getLeavingEdge(0);
        //          if (edge instanceof MultiEdge) {
        //            List<CFAEdge> edges = new ArrayList<>();
        //            edges.add(edge);
        //            PathFormula pf = pfmgr.makeFormulaForPath(edges);
        //            System.out.println(pf);
        //          }
        //        }
        //      } catch (Exception e) {
        //        // TODO: handle exception
        //      }

    } catch (IOException e) {
        cpa.getLogger().logUserException(Level.WARNING, e, "Could not write predicate map to file");
    }
}

From source file:ome.services.graphs.GraphTraversal.java

/**
 * Convert the indicated objects to {@link CI}s with their actual class identified.
 * @param session a Hibernate session/* ww  w .  jav  a 2 s.c om*/
 * @param objects the objects to query
 * @return {@link CI}s corresponding to the objects
 * @throws GraphException if any of the specified objects could not be queried
 */
private Collection<CI> objectsToCIs(Session session, SetMultimap<String, Long> objects) throws GraphException {
    final List<CI> returnValue = new ArrayList<CI>(objects.size());
    for (final Entry<String, Collection<Long>> oneQueryClass : objects.asMap().entrySet()) {
        final String className = oneQueryClass.getKey();
        final Collection<Long> ids = oneQueryClass.getValue();
        final Collection<CI> retrieved = findObjectDetails(className, ids).values();
        if (ids.size() != retrieved.size()) {
            throw new GraphException("cannot read all the specified objects of class " + className);
        }
        returnValue.addAll(retrieved);
    }
    return returnValue;
}

From source file:ome.services.graphs.GraphTraversal.java

/**
 * Prepare to process the targeted model objects.
 * @return the actual processor for the targeted model objects, to be used by the caller
 * @throws GraphException if the user does not have permission to process the targets or
 * if a cycle is detected in the model object graph
 *//*  ww w . j  a v a  2  s . c om*/
public PlanExecutor processTargets() throws GraphException {
    if (!progress.contains(Milestone.PLANNED)) {
        throw new IllegalStateException("operation not yet planned");
    }
    final List<Entry<Map<String, Collection<Long>>, Map<String, Collection<Long>>>> toJoinAndDelete = new ArrayList<Entry<Map<String, Collection<Long>>, Map<String, Collection<Long>>>>();
    /* process the targets forward across links */
    while (!planning.blockedBy.isEmpty()) {
        /* determine which objects can be processed in this step */
        final Collection<CI> nowUnblocked = new HashSet<CI>();
        final Iterator<Entry<CI, Set<CI>>> blocks = planning.blockedBy.entrySet().iterator();
        while (blocks.hasNext()) {
            final Entry<CI, Set<CI>> block = blocks.next();
            final CI object = block.getKey();
            if (block.getValue().isEmpty()) {
                blocks.remove();
                nowUnblocked.add(object);
            }
        }
        if (nowUnblocked.isEmpty()) {
            throw new GraphException(
                    "cycle detected among " + Joiner.on(", ").join(planning.blockedBy.keySet()));
        }
        for (final Set<CI> blockers : planning.blockedBy.values()) {
            blockers.removeAll(nowUnblocked);
        }
        final SetMultimap<String, Long> toJoin = HashMultimap.create();
        final SetMultimap<String, Long> toDelete = HashMultimap.create();
        for (final CI object : nowUnblocked) {
            if (planning.included.contains(object)) {
                toJoin.put(object.className, object.id);
            } else {
                toDelete.put(object.className, object.id);
            }
        }
        /* note this group's includes and deletes */
        final Map<String, Collection<Long>> eachToJoin = toJoin.asMap();
        for (final Entry<String, Collection<Long>> oneClassToJoin : eachToJoin.entrySet()) {
            final String className = oneClassToJoin.getKey();
            final Collection<Long> allIds = oneClassToJoin.getValue();
            assertMayBeProcessed(className, allIds);
        }
        final Map<String, Collection<Long>> eachToDelete = toDelete.asMap();
        for (final Entry<String, Collection<Long>> oneClassToDelete : eachToDelete.entrySet()) {
            final String className = oneClassToDelete.getKey();
            final Collection<Long> allIds = oneClassToDelete.getValue();
            assertMayBeDeleted(className, allIds);
        }
        toJoinAndDelete.add(Maps.immutableEntry(eachToJoin, eachToDelete));
    }
    return new PlanExecutor() {
        @Override
        public void execute() throws GraphException {
            if (!progress.contains(Milestone.UNLINKED)) {
                throw new IllegalStateException("model objects not yet unlinked");
            }
            if (progress.contains(Milestone.PROCESSED)) {
                throw new IllegalStateException("model objects already processed");
            }
            /* actually do the noted processing */
            for (final Entry<Map<String, Collection<Long>>, Map<String, Collection<Long>>> next : toJoinAndDelete) {
                final Map<String, Collection<Long>> toJoin = next.getKey();
                final Map<String, Collection<Long>> toDelete = next.getValue();
                /* perform this group's deletes */
                if (!toDelete.isEmpty()) {
                    for (final Entry<String, Collection<Long>> oneClassToDelete : toDelete.entrySet()) {
                        final String className = oneClassToDelete.getKey();
                        final Collection<Long> allIds = oneClassToDelete.getValue();
                        final Collection<Collection<Long>> idGroups;
                        if (OriginalFile.class.getName().equals(className)) {
                            idGroups = ModelObjectSequencer.sortOriginalFileIds(session, allIds);
                        } else {
                            idGroups = Collections.singleton(allIds);
                        }
                        for (final Collection<Long> idGroup : idGroups) {
                            for (final List<Long> ids : Iterables.partition(idGroup, BATCH_SIZE)) {
                                processor.deleteInstances(className, ids);
                            }
                        }
                    }
                }
                /* perform this group's includes */
                if (!toJoin.isEmpty()) {
                    for (final Entry<String, Collection<Long>> oneClassToJoin : toJoin.entrySet()) {
                        final String className = oneClassToJoin.getKey();
                        final Collection<Long> allIds = oneClassToJoin.getValue();
                        for (final List<Long> ids : Iterables.partition(allIds, BATCH_SIZE)) {
                            processor.processInstances(className, ids);
                        }
                    }
                }
            }
            progress.add(Milestone.PROCESSED);
        }
    };
}

From source file:omero.cmd.graphs.DiskUsageI.java

/**
 * Calculate the disk usage of the model objects specified in the request.
 * @return the total usage, in bytes//from w  ww .  j av  a  2s . com
 */
private DiskUsageResponse getDiskUsage() {
    final IQuery queryService = helper.getServiceFactory().getQueryService();

    final int batchSize = 256;

    final SetMultimap<String, Long> objectsToProcess = HashMultimap.create();
    final SetMultimap<String, Long> objectsProcessed = HashMultimap.create();
    final Usage usage = new Usage();

    /* original file ID to types that refer to them */
    final SetMultimap<Long, String> typesWithFiles = HashMultimap.create();
    /* original file ID to file ownership and size */
    final Map<Long, OwnershipAndSize> fileSizes = new HashMap<Long, OwnershipAndSize>();

    /* note the objects to process */

    for (final String className : classes) {
        final String hql = "SELECT " + getIdPropertyFor(className) + " FROM " + className;
        for (final Object[] resultRow : queryService.projection(hql, null)) {
            if (resultRow != null) {
                final Long objectId = (Long) resultRow[0];
                objectsToProcess.put(className, objectId);
            }
        }
    }

    for (final Map.Entry<String, List<Long>> objectList : objects.entrySet()) {
        objectsToProcess.putAll(objectList.getKey(), objectList.getValue());

        if (LOGGER.isDebugEnabled()) {
            final List<Long> ids = Lists.newArrayList(objectsToProcess.get(objectList.getKey()));
            Collections.sort(ids);
            LOGGER.debug("size calculator to process " + objectList.getKey() + " " + Joiner.on(", ").join(ids));
        }
    }

    /* check that the objects' class names are valid */

    for (final String className : objectsToProcess.keySet()) {
        getIdPropertyFor(className);
    }

    /* iteratively process objects, descending the model graph */

    while (!objectsToProcess.isEmpty()) {
        /* obtain canonical class name and ID list */
        final Map.Entry<String, Collection<Long>> nextClass = objectsToProcess.asMap().entrySet().iterator()
                .next();
        String className = nextClass.getKey();
        final int lastDot = className.lastIndexOf('.');
        if (lastDot >= 0) {
            className = className.substring(lastDot + 1);
        } else if (className.charAt(0) == '/') {
            className = className.substring(1);
        }
        /* get IDs still to process, and split off a batch of them for this query */
        final Collection<Long> ids = nextClass.getValue();
        ids.removeAll(objectsProcessed.get(className));
        if (ids.isEmpty()) {
            continue;
        }
        final List<Long> idsToQuery = Lists.newArrayList(Iterables.limit(ids, batchSize));
        ids.removeAll(idsToQuery);
        objectsProcessed.putAll(className, idsToQuery);
        final Parameters parameters = new Parameters().addIds(idsToQuery);

        if ("Pixels".equals(className)) {
            /* Pixels may have /OMERO/Pixels/<id> files */
            final String hql = "SELECT id, details.owner.id, details.group.id FROM Pixels WHERE id IN (:ids)";
            for (final Object[] resultRow : queryService.projection(hql, parameters)) {
                if (resultRow != null) {
                    final Long pixelsId = (Long) resultRow[0];
                    final Long ownerId = (Long) resultRow[1];
                    final Long groupId = (Long) resultRow[2];
                    final String pixelsPath = pixelsService.getPixelsPath(pixelsId);
                    usage.bumpTotals().add(ownerId, groupId, className, getFileSize(pixelsPath));
                    usage.bumpTotals().add(ownerId, groupId, className,
                            getFileSize(pixelsPath + PixelsService.PYRAMID_SUFFIX));
                    usage.bumpTotals().add(ownerId, groupId, className, getFileSize(
                            pixelsPath + PixelsService.PYRAMID_SUFFIX + BfPyramidPixelBuffer.PYR_LOCK_EXT));
                }
            }
        } else if ("Thumbnail".equals(className)) {
            /* Thumbnails may have /OMERO/Thumbnails/<id> files */
            final String hql = "SELECT id, details.owner.id, details.group.id FROM Thumbnail WHERE id IN (:ids)";
            for (final Object[] resultRow : queryService.projection(hql, parameters)) {
                if (resultRow != null) {
                    final Long thumbnailId = (Long) resultRow[0];
                    final Long ownerId = (Long) resultRow[1];
                    final Long groupId = (Long) resultRow[2];
                    final String thumbnailPath = thumbnailService.getThumbnailPath(thumbnailId);
                    usage.bumpTotals().add(ownerId, groupId, className, getFileSize(thumbnailPath));
                }
            }
        } else if ("OriginalFile".equals(className)) {
            /* OriginalFiles have their size noted */
            final String hql = "SELECT id, details.owner.id, details.group.id, size FROM OriginalFile WHERE id IN (:ids)";
            for (final Object[] resultRow : queryService.projection(hql, parameters)) {
                if (resultRow != null && resultRow[3] instanceof Long) {
                    final Long fileId = (Long) resultRow[0];
                    final Long ownerId = (Long) resultRow[1];
                    final Long groupId = (Long) resultRow[2];
                    final Long fileSize = (Long) resultRow[3];
                    fileSizes.put(fileId, new OwnershipAndSize(ownerId, groupId, fileSize));
                }
            }
        } else if ("Experimenter".equals(className)) {
            /* for an experimenter, use the list of owned objects */
            for (final String resultClassName : OWNED_OBJECTS) {
                final String hql = "SELECT " + getIdPropertyFor(resultClassName) + " FROM " + resultClassName
                        + " WHERE details.owner.id IN (:ids)";
                for (final Object[] resultRow : queryService.projection(hql, parameters)) {
                    objectsToProcess.put(resultClassName, (Long) resultRow[0]);
                }
            }
        } else if ("ExperimenterGroup".equals(className)) {
            /* for an experimenter group, use the list of owned objects */
            for (final String resultClassName : OWNED_OBJECTS) {
                final String hql = "SELECT " + getIdPropertyFor(resultClassName) + " FROM " + resultClassName
                        + " WHERE details.group.id IN (:ids)";
                for (final Object[] resultRow : queryService.projection(hql, parameters)) {
                    objectsToProcess.put(resultClassName, (Long) resultRow[0]);
                }
            }
        }

        /* follow the next step from here on the model object graph */
        for (final Map.Entry<String, String> query : TRAVERSAL_QUERIES.get(className)) {
            final String resultClassName = query.getKey();
            final String hql = query.getValue();
            for (final Object[] resultRow : queryService.projection(hql, parameters)) {
                if (resultRow != null && resultRow[0] instanceof Long) {
                    final Long resultId = (Long) resultRow[0];
                    objectsToProcess.put(resultClassName, resultId);
                    if ("OriginalFile".equals(resultClassName)) {
                        typesWithFiles.put(resultId, className);
                    }
                }
            }
        }
        if (ANNOTATABLE_OBJECTS.contains(className)) {
            /* also watch for annotations on the current objects */
            final String hql = "SELECT child.id FROM " + className + "AnnotationLink WHERE parent.id IN (:ids)";
            for (final Object[] resultRow : queryService.projection(hql, parameters)) {
                objectsToProcess.put("Annotation", (Long) resultRow[0]);
            }
        }

        if (LOGGER.isDebugEnabled()) {
            Collections.sort(idsToQuery);
            LOGGER.debug("usage is " + usage + " after processing " + className + " "
                    + Joiner.on(", ").join(idsToQuery));
        }
    }

    /* collate file counts and sizes by referer type */
    for (final Map.Entry<Long, OwnershipAndSize> fileIdSize : fileSizes.entrySet()) {
        final Long fileId = fileIdSize.getKey();
        final OwnershipAndSize fileSize = fileIdSize.getValue();
        Set<String> types = typesWithFiles.get(fileId);
        if (types.isEmpty()) {
            types = ImmutableSet.of("OriginalFile");
        }
        usage.bumpTotals();
        for (final String type : types) {
            usage.add(fileSize.owner, fileSize.group, type, fileSize.size);
        }
    }

    if (LOGGER.isDebugEnabled()) {
        LOGGER.debug("usage is " + usage + " after including " + OriginalFile.class.getSimpleName() + " sizes");
    }

    return usage.getDiskUsageResponse();
}

From source file:gobblin.data.management.copy.CopySource.java

/**
 * <ul>/*w  ww .  j  a v  a2s. c  o  m*/
 * Does the following:
 * <li>Instantiate a {@link DatasetsFinder}.
 * <li>Find all {@link Dataset} using {@link DatasetsFinder}.
 * <li>For each {@link CopyableDataset} get all {@link CopyEntity}s.
 * <li>Create a {@link WorkUnit} per {@link CopyEntity}.
 * </ul>
 *
 * <p>
 * In this implementation, one workunit is created for every {@link CopyEntity} found. But the extractor/converters
 * and writers are built to support multiple {@link CopyEntity}s per workunit
 * </p>
 *
 * @param state see {@link gobblin.configuration.SourceState}
 * @return Work units for copying files.
 */
@Override
public List<WorkUnit> getWorkunits(final SourceState state) {

    this.metricContext = Instrumented.getMetricContext(state, CopySource.class);

    try {

        DeprecationUtils.renameDeprecatedKeys(state,
                CopyConfiguration.MAX_COPY_PREFIX + "." + CopyResourcePool.ENTITIES_KEY,
                Lists.newArrayList(MAX_FILES_COPIED_KEY));

        final FileSystem sourceFs = getSourceFileSystem(state);
        final FileSystem targetFs = getTargetFileSystem(state);

        log.info("Identified source file system at {} and target file system at {}.", sourceFs.getUri(),
                targetFs.getUri());

        long maxSizePerBin = state.getPropAsLong(MAX_SIZE_MULTI_WORKUNITS, 0);
        long maxWorkUnitsPerMultiWorkUnit = state.getPropAsLong(MAX_WORK_UNITS_PER_BIN, 50);
        final long minWorkUnitWeight = Math.max(1, maxSizePerBin / maxWorkUnitsPerMultiWorkUnit);
        final Optional<CopyableFileWatermarkGenerator> watermarkGenerator = CopyableFileWatermarkHelper
                .getCopyableFileWatermarkGenerator(state);
        int maxThreads = state.getPropAsInt(MAX_CONCURRENT_LISTING_SERVICES,
                DEFAULT_MAX_CONCURRENT_LISTING_SERVICES);

        final CopyConfiguration copyConfiguration = CopyConfiguration.builder(targetFs, state.getProperties())
                .build();

        DatasetsFinder<CopyableDatasetBase> datasetFinder = DatasetUtils.instantiateDatasetFinder(
                state.getProperties(), sourceFs, DEFAULT_DATASET_PROFILE_CLASS_KEY,
                new EventSubmitter.Builder(this.metricContext, CopyConfiguration.COPY_PREFIX).build(), state);

        IterableDatasetFinder<CopyableDatasetBase> iterableDatasetFinder = datasetFinder instanceof IterableDatasetFinder
                ? (IterableDatasetFinder<CopyableDatasetBase>) datasetFinder
                : new IterableDatasetFinderImpl<>(datasetFinder);

        Iterator<CopyableDatasetRequestor> requestorIteratorWithNulls = Iterators.transform(
                iterableDatasetFinder.getDatasetsIterator(),
                new CopyableDatasetRequestor.Factory(targetFs, copyConfiguration, log));
        Iterator<CopyableDatasetRequestor> requestorIterator = Iterators.filter(requestorIteratorWithNulls,
                Predicates.<CopyableDatasetRequestor>notNull());

        final SetMultimap<FileSet<CopyEntity>, WorkUnit> workUnitsMap = Multimaps
                .<FileSet<CopyEntity>, WorkUnit>synchronizedSetMultimap(
                        HashMultimap.<FileSet<CopyEntity>, WorkUnit>create());

        RequestAllocator<FileSet<CopyEntity>> allocator = createRequestAllocator(copyConfiguration, maxThreads);
        Iterator<FileSet<CopyEntity>> prioritizedFileSets = allocator.allocateRequests(requestorIterator,
                copyConfiguration.getMaxToCopy());

        Iterator<Callable<Void>> callableIterator = Iterators.transform(prioritizedFileSets,
                new Function<FileSet<CopyEntity>, Callable<Void>>() {
                    @Nullable
                    @Override
                    public Callable<Void> apply(FileSet<CopyEntity> input) {
                        return new FileSetWorkUnitGenerator((CopyableDatasetBase) input.getDataset(), input,
                                state, workUnitsMap, watermarkGenerator, minWorkUnitWeight);
                    }
                });

        try {
            List<Future<Void>> futures = new IteratorExecutor<>(callableIterator, maxThreads, ExecutorsUtils
                    .newDaemonThreadFactory(Optional.of(log), Optional.of("Copy-file-listing-pool-%d")))
                            .execute();

            for (Future<Void> future : futures) {
                try {
                    future.get();
                } catch (ExecutionException exc) {
                    log.error("Failed to get work units for dataset.", exc.getCause());
                }
            }
        } catch (InterruptedException ie) {
            log.error("Retrieval of work units was interrupted. Aborting.");
            return Lists.newArrayList();
        }

        log.info(String.format("Created %s workunits ", workUnitsMap.size()));

        copyConfiguration.getCopyContext().logCacheStatistics();

        if (state.contains(SIMULATE) && state.getPropAsBoolean(SIMULATE)) {
            log.info("Simulate mode enabled. Will not execute the copy.");
            for (Map.Entry<FileSet<CopyEntity>, Collection<WorkUnit>> entry : workUnitsMap.asMap().entrySet()) {
                log.info(String.format("Actions for dataset %s file set %s.",
                        entry.getKey().getDataset().datasetURN(), entry.getKey().getName()));
                for (WorkUnit workUnit : entry.getValue()) {
                    CopyEntity copyEntity = deserializeCopyEntity(workUnit);
                    log.info(copyEntity.explain());
                }
            }
            return Lists.newArrayList();
        }

        List<? extends WorkUnit> workUnits = new WorstFitDecreasingBinPacking(maxSizePerBin)
                .pack(Lists.newArrayList(workUnitsMap.values()), this.weighter);
        log.info(String.format(
                "Bin packed work units. Initial work units: %d, packed work units: %d, max weight per bin: %d, "
                        + "max work units per bin: %d.",
                workUnitsMap.size(), workUnits.size(), maxSizePerBin, maxWorkUnitsPerMultiWorkUnit));
        return ImmutableList.copyOf(workUnits);
    } catch (IOException e) {
        throw new RuntimeException(e);
    }
}

From source file:ome.services.graphs.GraphTraversal.java

/**
 * Load object instances and their links into the various cache fields of {@link Planning}.
 * @param session a Hibernate session//from  w  w  w  .j a  v  a  2  s  .c  o  m
 * @param toCache the objects to cache
 * @throws GraphException if the objects could not be converted to unloaded instances
 */
private void cache(Session session, Collection<CI> toCache) throws GraphException {
    /* note which links to query, organized for batch querying */
    final SetMultimap<CP, Long> forwardLinksWanted = HashMultimap.create();
    final SetMultimap<CP, Long> backwardLinksWanted = HashMultimap.create();
    for (final CI inclusionCandidate : toCache) {
        for (final String inclusionCandidateSuperclassName : model
                .getSuperclassesOfReflexive(inclusionCandidate.className)) {
            for (final Entry<String, String> forwardLink : model
                    .getLinkedTo(inclusionCandidateSuperclassName)) {
                final CP linkProperty = new CP(inclusionCandidateSuperclassName, forwardLink.getValue());
                forwardLinksWanted.put(linkProperty, inclusionCandidate.id);
            }
            for (final Entry<String, String> backwardLink : model
                    .getLinkedBy(inclusionCandidateSuperclassName)) {
                final CP linkProperty = new CP(backwardLink.getKey(), backwardLink.getValue());
                backwardLinksWanted.put(linkProperty, inclusionCandidate.id);
            }
        }
    }
    /* query and cache forward links */
    for (final Entry<CP, Collection<Long>> forwardLink : forwardLinksWanted.asMap().entrySet()) {
        final CP linkProperty = forwardLink.getKey();
        final String query = "SELECT linker.id, linked.id FROM " + linkProperty.className + " AS linker "
                + "JOIN linker." + linkProperty.propertyName + " AS linked WHERE linker.id IN (:ids)";
        for (final Entry<CI, CI> linkerLinked : getLinksToCache(linkProperty, query, forwardLink.getValue())) {
            planning.forwardLinksCached.put(linkProperty.toCPI(linkerLinked.getKey().id),
                    linkerLinked.getValue());
        }
    }
    /* query and cache backward links */
    for (final Entry<CP, Collection<Long>> backwardLink : backwardLinksWanted.asMap().entrySet()) {
        final CP linkProperty = backwardLink.getKey();
        final String query = "SELECT linker.id, linked.id FROM " + linkProperty.className + " AS linker "
                + "JOIN linker." + linkProperty.propertyName + " AS linked WHERE linked.id IN (:ids)";
        for (final Entry<CI, CI> linkerLinked : getLinksToCache(linkProperty, query, backwardLink.getValue())) {
            planning.backwardLinksCached.put(linkProperty.toCPI(linkerLinked.getValue().id),
                    linkerLinked.getKey());
        }
    }
    /* note cached objects for further processing */
    planning.cached.addAll(toCache);
    planning.toProcess.addAll(toCache);
}