Example usage for org.apache.commons.collections MultiMap get

List of usage examples for org.apache.commons.collections MultiMap get

Introduction

In this page you can find the example usage for org.apache.commons.collections MultiMap get.

Prototype

Object get(Object key);

Source Link

Document

Gets the collection of values associated with the specified key.

Usage

From source file:edu.uci.ics.jung.graph.impl.BipartiteGraph.java

/**
 * Creates a one-part graph from a bipartite graph by folding
 * Vertices from one class into a second class. This function
 * creates a new UndirectedGraph (with vertex set V') in which: <br>
 * <ul>//  w  ww .  j a  va 2s .  c o m
 * <li> each vertex in V' has an equivalent V in bpg.getAllVertices( class ) </li>
 * <li> an edge E' joins V'1 and V'2 iff there is a path of length 2 from
 * V1 to V2 (by way of some third vertex in the other class, VXs) </li>
 * <li> each edge E' is annotated with the set of vertices VXs </li>
 * </ul>
 * 
 * In social network analysis and related fields, this operation transforms
 * an actor-by-event chart into an actor-by-actor chart.
 * 
 * @param bpg      The bipartite graph to be folded
 * @param vertexSet      Chooses the set of vertices to be brought into
 *                the new Graph.
 * @return   an UndirectedSparseGraph.
 */
public static Graph fold(BipartiteGraph bpg, Choice vertexSet) {
    Graph newGraph = new UndirectedSparseGraph();
    Set vertices = bpg.getAllVertices(vertexSet);
    for (Iterator iter = vertices.iterator(); iter.hasNext();) {
        BipartiteVertex v = (BipartiteVertex) iter.next();
        v.copy(newGraph);
    }

    Set coveredNodes = new HashSet();

    for (Iterator iter = vertices.iterator(); iter.hasNext();) {
        BipartiteVertex v = (BipartiteVertex) iter.next();
        coveredNodes.add(v);

        // the set of all Bs that touch this A
        Set hyperEdges = v.getNeighbors();

        // this will ultimately contain a mapping from
        // the next adjacent "A" to the list of "B"s that support that
        // connection (that is, all Bs that run between this A and its neighbor
        MultiMap mm = new MultiHashMap();
        for (Iterator iterator = hyperEdges.iterator(); iterator.hasNext();) {
            Vertex hyperEdge = (Vertex) iterator.next();
            addAll(mm, hyperEdge.getNeighbors(), hyperEdge);
        }
        for (Iterator iterator = mm.keySet().iterator(); iterator.hasNext();) {
            Vertex aVertex = (Vertex) iterator.next();

            if (coveredNodes.contains(aVertex))
                continue;

            Edge newEdge = GraphUtils.addEdge(newGraph, (Vertex) v.getEqualVertex(newGraph),
                    (Vertex) aVertex.getEqualVertex(newGraph));
            newEdge.addUserDatum(BIPARTITE_USER_TAG, mm.get(aVertex), UserData.SHARED);
        }
    }
    return newGraph;
}

From source file:edu.uci.ics.jung.algorithms.blockmodel.GraphCollapser.java

/**
 * INTERNAL (undocumented) method//from   w  w  w .j a  v  a2 s  . c o  m
 * @param m
 * @param dest
 * @param superV
 */
protected void replaceWith(MultiMap m, Vertex dest, CollapsedVertex superV) {
    Collection c = (Collection) m.get(dest);
    for (Iterator iter = c.iterator(); iter.hasNext();) {
        m.put(superV, iter.next());
    }
    m.remove(dest);
}

From source file:edu.harvard.med.screensaver.service.cherrypicks.CherryPickRequestPlateMapFilesBuilder.java

@SuppressWarnings("unchecked")
private void buildReadme(CherryPickRequest cherryPickRequest, ZipOutputStream zipOut) throws IOException {

    ZipEntry zipEntry = new ZipEntry(README_FILE_NAME);
    zipOut.putNextEntry(zipEntry);//from  w  ww . j a  va  2s .  c  o m
    PrintWriter writer = new CustomNewlinePrintWriter(zipOut, NEWLINE);

    writer.println("This zip file contains plate mappings for Cherry Pick Request "
            + cherryPickRequest.getCherryPickRequestNumber());
    writer.println();

    {
        StringBuilder buf = new StringBuilder();
        for (CherryPickAssayPlate assayPlate : cherryPickRequest.getActiveCherryPickAssayPlates()) {
            buf.append(assayPlate.getName()).append("\t").append(assayPlate.getStatusLabel());
            if (assayPlate.isPlatedAndScreened()) {
                buf.append("\t(").append(assayPlate.getCherryPickScreenings().last().getDateOfActivity())
                        .append(" by ").append(assayPlate.getCherryPickScreenings().last().getPerformedBy()
                                .getFullNameFirstLast())
                        .append(')');
            } else if (assayPlate.isPlated()) {
                buf.append("\t(").append(assayPlate.getCherryPickLiquidTransfer().getDateOfActivity())
                        .append(" by ").append(assayPlate.getCherryPickLiquidTransfer().getPerformedBy()
                                .getFullNameFirstLast())
                        .append(')');
            }
            buf.append(NEWLINE);
        }
        if (buf.length() > 0) {
            writer.println("Cherry pick plates:");
            writer.print(buf.toString());
            writer.println();
        }
    }

    Map<CherryPickAssayPlate, Integer> platesRequiringReload = cherryPickRequestPlateMapper
            .getAssayPlatesRequiringSourcePlateReload(cherryPickRequest);
    if (platesRequiringReload.size() > 0) {
        writer.println("WARNING: Some cherry pick plates will be created from the same source plate!");
        writer.println(
                "You will need to reload one or more source plates for each of the following cherry pick plates:");
        for (CherryPickAssayPlate assayPlate : platesRequiringReload.keySet()) {
            writer.println("\tCherry pick plate '" + assayPlate.getName() + "' requires reload of source plate "
                    + platesRequiringReload.get(assayPlate));
        }
        writer.println();
    }

    {
        StringBuilder buf = new StringBuilder();
        MultiMap sourcePlateTypesForEachAssayPlate = getSourcePlateTypesForEachAssayPlate(cherryPickRequest);
        for (CherryPickAssayPlate assayPlate : cherryPickRequest.getActiveCherryPickAssayPlates()) {
            Set<PlateType> sourcePlateTypes = (Set<PlateType>) sourcePlateTypesForEachAssayPlate
                    .get(assayPlate.getName());
            if (sourcePlateTypes != null && sourcePlateTypes.size() > 1) {
                buf.append(assayPlate.getName()).append(NEWLINE);
            }
        }
        if (buf.length() > 0) {
            writer.println(
                    "WARNING: Some cherry pick plates will be created from multiple source plates of non-uniform plate types!");
            writer.println("The following cherry pick plates are specified across multiple files:");
            writer.print(buf.toString());
            writer.println();
        }
    }

    writer.flush();
}

From source file:gov.nih.nci.caadapter.ui.mapping.sdtm.SDTMMapFileTransformer.java

public void BeginTransformation() throws Exception {
    /**/*from   w  ww  .  j a  va 2s. c  o m*/
     * 1. For each key in the _csvDataFromFile, check if the key exists in _mappedData <br>
     * 1a. If exists, get the pos and the colmnname <br>
     * 2. create SDTM record instance <br>
     * 2a. setRecord <br>
     * 3. print rec <br>
     */
    SDTMRecord _sdtm = new SDTMRecord();
    MultiMap mhm = new SDTM_CSVReader().readCSVFile(_csvFileName);
    // Iterate over the keys in the map
    Iterator it = mhm.keySet().iterator();
    while (it.hasNext()) {
        // SDTMRecord _sdtm = new SDTMRecord(_csvDataFromFile);
        // Get key
        Object key = it.next();
        if (_mappedData.containsKey(key)) {
            Collection coll = (Collection) mhm.get(key);
            for (Iterator it1 = coll.iterator(); it1.hasNext();) {
                Object mappedKey = it1.next();
                StringBuffer _value = (StringBuffer) _mappedData.get(key);
                // System.out.println("==============================================");
                // System.out.println("Mappedvalues " + _value);
                // System.out.println("CSVValues are " + mappedKey);
                StringTokenizer _level0 = new StringTokenizer(_value.toString(), ",");
                while (_level0.hasMoreTokens()) {
                    StringTokenizer str = new StringTokenizer(_level0.nextToken(), "?");
                    int pos = new Integer(str.nextToken().substring(0, 2).trim()).intValue();
                    String dataKey = str.nextToken();
                    EmptyStringTokenizer emp = new EmptyStringTokenizer(mappedKey.toString(), ",");
                    createRecord1(_sdtm, emp.getTokenAt(pos - 1).toString(), dataKey.replace('.', '_'));
                }
            }
        }
    }
    _sdtm.print(defineXMLList.toString(), _saveSDTMPath);
}

From source file:edu.wustl.geneconnect.postwork.MetadataManager.java

public Path getPath(String path) {
    String[] pathNodes = path.split(PATH_NODES_DELIMITER);
    Long sourceNodeId = new Long(pathNodes[0]);
    Long destinationNodeId = new Long(pathNodes[pathNodes.length - 1]);
    MultiMap pathMap = (MultiMap) masterPathList.get(sourceNodeId.intValue());
    Collection possiblePathsFromSrcToDest = (Collection) pathMap.get(destinationNodeId);

    if (possiblePathsFromSrcToDest != null) {
        Path currentPath = null;/* w ww .  j  av  a 2s. co  m*/
        for (Iterator iter = possiblePathsFromSrcToDest.iterator(); iter.hasNext();) {
            currentPath = (Path) iter.next();
            if (path.equals(currentPath.getCompletePath())) {
                return currentPath;
            }
        }
    }
    return null;
}

From source file:com.pactera.edg.am.metamanager.extractor.adapter.mapping.impl.RecordExtractMappingServiceImpl.java

/**
 * ?????????<br>/*from   w  ww. j  a  v  a 2s  .  c om*/
 * ??depReferences?????
 * @param cfg ?
 * @return ?
 * @throws SQLException ?
 */
protected List<MMDDependency> extractDependency(TRecordConfigFull cfg) throws SQLException {
    // ??
    List<TRecordRelationship> depList = cfg.findAllDependency();
    for (int i = 0; i < depList.size(); i++) {
        if (depList.get(i).useSql()) {
            this.queryDependencyRelation(depList.get(i));
        }
    }

    // ? (???????)
    List<MMDDependency> result = new ArrayList<MMDDependency>();
    for (Iterator<TRecordRelationship> depIt = this.depReferences.keySet().iterator(); depIt.hasNext();) {
        TRecordRelationship depc = depIt.next();
        MultiMap map = this.depReferences.get(depc);
        for (Iterator<?> fromIt = map.keySet().iterator(); fromIt.hasNext();) {
            String fromId = (String) fromIt.next();
            Collection<?> toIds = (Collection<?>) map.get(fromId); //MultiMap
            if (toIds == null || toIds.isEmpty()) {
                continue;
            }
            MdKey fromKey = MetadataMap.getMetadataKey(depc.getFromClassifier(), fromId);
            List<MMMetadata> fromMetadatas = this.mdReferences.getByInherit(fromKey, cfg.getInherits());
            if (fromMetadatas == null || fromMetadatas.isEmpty()) {
                continue;
            }
            for (Iterator<?> toIt = toIds.iterator(); toIt.hasNext();) {
                String toId = (String) toIt.next();
                MdKey toKey = MetadataMap.getMetadataKey(depc.getToClassifier(), toId);
                List<MMMetadata> toMetadatas = this.mdReferences.getByInherit(toKey, cfg.getInherits());
                if (toMetadatas == null || toMetadatas.isEmpty()) {
                    continue;
                }

                for (Iterator<MMMetadata> it1 = fromMetadatas.iterator(); it1.hasNext();) {
                    MMMetadata fromMetadata = it1.next();
                    for (Iterator<MMMetadata> it2 = toMetadatas.iterator(); it2.hasNext();) {
                        MMMetadata toMetadata = it2.next();
                        MMDDependency dependency = new MMDDependency();
                        dependency.setOwnerMetadata(fromMetadata);
                        dependency.setValueMetadata(toMetadata);
                        dependency.setOwnerRole(depc.getFromRole());
                        dependency.setValueRole(depc.getToRole());
                        result.add(dependency);
                    }
                }
            }
        }
    }

    return result;
}

From source file:edu.uci.ics.jung.algorithms.blockmodel.GraphCollapser.java

/**
 * This function collapses a series of vertices in one
 * EquivalenceSet into one//  w w w.  ja  v a  2s  .  c o  m
 * CollapsedVertex. 
 * @param g      A graph to collapse vertices from
 * @param rootSet   A set of vertice to collapse into one CollapsedVertex
 * @return      A graph with rootset.size()-1 fewer vertices.
 */
public Graph getCollapsedGraph(Graph g, Set rootSet) {

    // first, we copy the original graph
    Graph copy = (Graph) g.copy();

    // and remove our set to merge
    for (Iterator iter = rootSet.iterator(); iter.hasNext();) {
        Vertex v = (Vertex) iter.next();
        copy.removeVertex((Vertex) v.getEqualVertex(copy));
    }

    // and create one new vertex
    CollapsedVertex superVertex = createCollapsedVertex(copy, rootSet);
    annotateVertex(superVertex, rootSet);

    MultiMap vertices_to_edges = findEdgesAndVerticesConnectedToRootSet(superVertex.getRootSet());

    for (Iterator iter = vertices_to_edges.keySet().iterator(); iter.hasNext();) {
        Vertex opposite = (Vertex) iter.next();
        opposite = (Vertex) opposite.getEqualVertex(copy);
        Set relevantEdges = new HashSet((Collection) vertices_to_edges.get(opposite));

        if (shouldAddEdge(opposite, superVertex.getRootSet(), relevantEdges)) {

            if (PredicateUtils.enforcesEdgeConstraint(g, Graph.DIRECTED_EDGE)) {
                createDirectedEdges(copy, superVertex, opposite, relevantEdges);
            } else if (PredicateUtils.enforcesEdgeConstraint(g, Graph.UNDIRECTED_EDGE)) {
                createUndirectedEdge(copy, superVertex, opposite, relevantEdges);
            } else
                throw new IllegalArgumentException(
                        "Mixed (directed/undirected" + " graphs not currently supported");
        }
    }

    return copy;
}

From source file:edu.harvard.med.screensaver.service.cherrypicks.CherryPickRequestPlateMapFilesBuilder.java

@SuppressWarnings("unchecked")
private InputStream doBuildZip(CherryPickRequest cherryPickRequest, Set<CherryPickAssayPlate> forPlates)
        throws IOException {
    ByteArrayOutputStream zipOutRaw = new ByteArrayOutputStream();
    ZipOutputStream zipOut = new ZipOutputStream(zipOutRaw);
    MultiMap/*<String,SortedSet<CherryPick>>*/ files2CherryPicks = buildCherryPickFiles(cherryPickRequest,
            forPlates);/*from  w w w .j  a  v  a2s .c o  m*/
    buildReadme(cherryPickRequest, zipOut);
    buildDistinctPlateCopyFile(cherryPickRequest, forPlates, zipOut);
    PrintWriter out = new CSVPrintWriter(new OutputStreamWriter(zipOut), NEWLINE);
    for (Iterator iter = files2CherryPicks.keySet().iterator(); iter.hasNext();) {
        String fileName = (String) iter.next();
        ZipEntry zipEntry = new ZipEntry(fileName);
        zipOut.putNextEntry(zipEntry);
        writeHeadersRow(out);
        for (LabCherryPick cherryPick : (SortedSet<LabCherryPick>) files2CherryPicks.get(fileName)) {
            writeCherryPickRow(out, cherryPick);
        }
        out.flush();
    }
    out.close();
    return new ByteArrayInputStream(zipOutRaw.toByteArray());
}

From source file:edu.uci.ics.jung.algorithms.blockmodel.GraphCollapser.java

/**
 * INTERNAL METHOD//from ww  w .  j  a v  a2s  . c o  m
 */
protected void createEdgesCorrespondingToMap(Graph copy, CollapsedVertex cv, MultiMap vertices_to_edges,
        Set coveredCV) {
    for (Iterator iter = vertices_to_edges.keySet().iterator(); iter.hasNext();) {

        Vertex edgeDestination = (Vertex) iter.next();

        // this line does nothing for CVs, but is useful for other vertices
        // opposite is either a CollapsedVertex, or it's a copy of the origial
        // if we've already seen it, we should skip it; we've already done those edges
        if (coveredCV.contains(edgeDestination))
            continue;

        Set relevantEdges = new HashSet((Collection) vertices_to_edges.get(edgeDestination));

        edgeDestination = (Vertex) edgeDestination.getEqualVertex(copy);

        if (shouldAddEdge(edgeDestination, cv.getRootSet(), relevantEdges)) {

            if (PredicateUtils.enforcesEdgeConstraint(copy, Graph.DIRECTED_EDGE))
                createDirectedEdges(copy, cv, edgeDestination, relevantEdges);
            else if (PredicateUtils.enforcesEdgeConstraint(copy, Graph.UNDIRECTED_EDGE))
                createUndirectedEdge(copy, cv, edgeDestination, relevantEdges);
            else
                throw new IllegalArgumentException(
                        "Mixed (directed/undirected) " + "graphs not currently supported");
        }
    }

}

From source file:edu.wustl.geneconnect.metadata.MetadataCalculator.java

private boolean isPathPresent(int srcNodeID, int destNodeID, List newNodeList) {
    MultiMap pathMap = getPathsForSrc(srcNodeID);
    Collection coll = (Collection) pathMap.get(new Integer(destNodeID));
    if (null != coll) {
        for (Iterator iter = coll.iterator(); iter.hasNext();) {
            int count = 0;
            List l1 = (List) iter.next();
            if (l1.size() == newNodeList.size()) {
                // if sizes are same, compare element by element
                int size = l1.size();
                for (int i = 0; i < size; ++i) {
                    String test1 = (l1.get(i)).toString();
                    String test2 = (newNodeList.get(i)).toString();

                    if (l1.get(i).equals(newNodeList.get(i))) {
                        ++count;/* w w  w .j  a  v  a2s  . com*/
                    } else {
                        break;
                    }
                }

                if (count == size) {
                    return true;
                }
            }
        }
    }
    return false;
}