Example usage for org.apache.commons.collections MultiMap put

List of usage examples for org.apache.commons.collections MultiMap put

Introduction

In this page you can find the example usage for org.apache.commons.collections MultiMap put.

Prototype

Object put(Object key, Object value);

Source Link

Document

Adds the value to the collection associated with the specified key.

Usage

From source file:ar.com.fdvs.dj.core.layout.AbstractLayoutManager.java

protected void layoutCharts() {
    //Pre-sort charts by group column
    MultiMap mmap = new MultiHashMap();
    for (Iterator iter = getReport().getCharts().iterator(); iter.hasNext();) {
        DJChart djChart = (DJChart) iter.next();
        mmap.put(djChart.getColumnsGroup(), djChart);
    }/*from   ww w  . j av a  2s  .c om*/

    for (Iterator iterator = mmap.keySet().iterator(); iterator.hasNext();) {
        Object key = iterator.next();
        Collection charts = (Collection) mmap.get(key);
        ArrayList l = new ArrayList(charts);
        //Reverse iteration of the charts to meet insertion order
        for (int i = l.size(); i > 0; i--) {
            DJChart djChart = (DJChart) l.get(i - 1);
            JRDesignChart chart = createChart(djChart);

            //Charts has their own band, so they are added in the band at Y=0
            JRDesignBand band = createGroupForChartAndGetBand(djChart);
            band.addElement(chart);
        }
    }

    //Pre-sort charts by group column
    mmap = new MultiHashMap();
    for (Iterator iter = getReport().getNewCharts().iterator(); iter.hasNext();) {
        ar.com.fdvs.dj.domain.chart.DJChart djChart = (ar.com.fdvs.dj.domain.chart.DJChart) iter.next();
        mmap.put(djChart.getDataset().getColumnsGroup(), djChart);
    }

    for (Iterator iterator = mmap.keySet().iterator(); iterator.hasNext();) {
        Object key = iterator.next();
        Collection charts = (Collection) mmap.get(key);
        ArrayList l = new ArrayList(charts);
        //Reverse iteration of the charts to meet insertion order
        for (int i = l.size(); i > 0; i--) {
            ar.com.fdvs.dj.domain.chart.DJChart djChart = (ar.com.fdvs.dj.domain.chart.DJChart) l.get(i - 1);
            String name = "chart_" + (i - 1);
            JRDesignChart chart = createChart(djChart, name);

            if (djChart.getLink() != null)
                HyperLinkUtil.applyHyperLinkToElement((DynamicJasperDesign) getDesign(), djChart.getLink(),
                        chart, name + "_hyperlink");

            //Charts has their own band, so they are added in the band at Y=0
            JRDesignBand band = createGroupForChartAndGetBand(djChart);
            band.addElement(chart);
        }
    }
}

From source file:edu.wustl.geneconnect.postwork.MetadataManager.java

private void cachePaths() {
    for (int i = 0; i < dataSources.size(); ++i) {
        MultiMap pathMap = new MultiValueMap();
        masterPathList.add(pathMap);/*from ww w  .j  a  v a2 s . c om*/
    }

    Path currentPath = null;
    String sqlQuery = "select PATH_ID, SOURCE_DATASOURCE_ID, TARGET_DATASOURCE_ID, PATH FROM PATH";
    //execute query to get all the data from the BASETABLE_METADATA table
    ResultSet resultSet = dbManager.executeSQLQuery(sqlQuery);
    try {
        while (resultSet.next()) {
            //Prepae base tabel object
            currentPath = new Path(new Long(resultSet.getString(1)), new Long(resultSet.getString(2)),
                    new Long(resultSet.getString(3)), resultSet.getString(4));

            //put it into the Map, which will be refered later on
            paths.put(new Long(resultSet.getString(1)), currentPath);

            MultiMap pathMap = (MultiMap) masterPathList.get(currentPath.getSourceDataSourceId().intValue());
            pathMap.put(currentPath.getTargetDataSourceId(), currentPath);
        }
    } catch (SQLException e) {
        Logger.log("SQLException occured while fetching the metadata from the database", Logger.FATAL);
        SummaryExceptionHandler.handleException(e);
    }
}

From source file:fr.in2p3.cc.storage.treqs.control.dispatcher.Dispatcher.java

/**
 * Inner loop of new requests./*from   www  .  j  ava  2s . co m*/
 * 
 * @param newRequests
 *            Map of new requests.
 * @param listNewRequests
 *            List of new requests.
 * @throws TReqSException
 *             If there a problem retrieving the objects.
 */
private void getNewRequestsInner(final MultiMap newRequests,
        final List<PersistenceHelperFileRequest> listNewRequests) throws TReqSException {
    LOGGER.trace("> getNewRequestsInner");

    assert newRequests != null;
    assert listNewRequests != null;

    final Iterator<PersistenceHelperFileRequest> iterator = listNewRequests.iterator();
    while (iterator.hasNext()) {
        final PersistenceHelperFileRequest dbFileRequest = iterator.next();
        LOGGER.debug("New request [{}] for file '{}' from user: '{}'", new Object[] { dbFileRequest.getId(),
                dbFileRequest.getFileName(), dbFileRequest.getOwnerName() });
        final User owner = UsersController.getInstance().add(dbFileRequest.getOwnerName());
        final FileRequest newFileReq = new FileRequest(dbFileRequest.getId(), dbFileRequest.getFileName(),
                owner, dbFileRequest.getNumberTries());

        newRequests.put(dbFileRequest.getFileName(), newFileReq);
    }

    LOGGER.trace("< getNewRequestsInner");
}

From source file:edu.harvard.med.screensaver.service.cherrypicks.CherryPickRequestPlateMapFilesBuilder.java

@SuppressWarnings("unchecked")
/**/*  www  .  ja v a 2  s.  c o m*/
 * Normally, we create 1 file per assay plate. However, in the case where an
 * assay plate is comprised of wells from library copy plates that have
 * different plate types, we need to generate a separate file for each source
 * plate type (i.e., the assay plate will be defined over multiple files).
 * @return a MultiMap that partitions the cherry picks by file,
 * ordering both the file names and cherry picks for each file.
 */
private MultiMap/*<String,SortedSet<CherryPick>>*/ buildCherryPickFiles(CherryPickRequest cherryPickRequest,
        Set<CherryPickAssayPlate> forPlates) {
    MultiMap assayPlate2SourcePlateTypes = getSourcePlateTypesForEachAssayPlate(cherryPickRequest);

    MultiMap result = MultiValueMap.decorate(new TreeMap<String, SortedSet<LabCherryPick>>(), new Factory() {
        public Object create() {
            return new TreeSet<LabCherryPick>(PlateMappingCherryPickComparator.getInstance());
        }
    });

    // HACK: transform set of CPAP into a set of IDs, for purpose of checking
    // set membership; we can't rely upon CPAP.equals(), since we're comparing
    // non-managed entities with managed entities, and therefore we do not have
    // the guarantee of instance equality for entities with the same ID
    Set<Serializable> forPlateIds = new HashSet<Serializable>(forPlates.size());
    for (CherryPickAssayPlate cpap : forPlates) {
        if (cpap.getEntityId() == null) {
            throw new IllegalArgumentException(
                    "all members of 'forPlates' must already be persisted and have a database identifier");
        }
        forPlateIds.add(cpap.getEntityId());
    }

    for (LabCherryPick cherryPick : cherryPickRequest.getLabCherryPicks()) {
        if (cherryPick.isAllocated()) {
            CherryPickAssayPlate assayPlate = cherryPick.getAssayPlate();
            if (forPlates == null || (assayPlate != null && forPlateIds.contains(assayPlate.getEntityId()))) {
                Set<PlateType> sourcePlateTypes = (Set<PlateType>) assayPlate2SourcePlateTypes
                        .get(assayPlate.getName());
                String fileName = makeFilename(cherryPick, sourcePlateTypes.size());
                result.put(fileName, cherryPick);
            }
        }
    }
    return result;
}

From source file:com.pactera.edg.am.metamanager.extractor.adapter.mapping.impl.RecordExtractMappingServiceImpl.java

/**
 * ????//from   w w  w.  j ava2s . c o m
 * @param depc ?
 * @return MultiMapKEY?{@link #getMetadataKey(String, String)}
 * @throws SQLException ?
 */
private MultiMap queryDependencyRelation(TRecordRelationship depc) throws SQLException {
    if (log.isInfoEnabled()) {
        StringBuffer sb = new StringBuffer();
        sb.append("?FROM=").append(depc.getFromClassifier());
        sb.append(" TO=").append(depc.getToClassifier());
        sb.append(" FromRole=").append(depc.getFromRole()).append(" ToRole=").append(depc.getToRole());
        sb.append(" FromColumns=").append(depc.getFromColumns()).append(" ToColumns=")
                .append(depc.getToColumns());
        sb.append("  SQL=").append(depc.getRelSqlScript());
        log.info(sb.toString());
    }

    MultiMap map = this.depReferences.get(depc);
    if (map == null) {
        map = new MultiValueMap();
        this.depReferences.put(depc, map);
    }

    Page page = new Page(0, SIZE);
    ResultSetGetter rs = null;
    while (true) {
        rs = connectionVisitor.query(depc.getRelSqlScript(), page.startIndex, page.pageSize);
        if (log.isInfoEnabled()) {
            log.info("" + (page.startIndex / page.pageSize + 1) + ""
                    + (rs.getRowCount()) + "?");
        }
        for (int i = 0, count = rs.getRowCount(); i < count; i++) {
            Map<String, Object> row = rs.getRow(i);
            String fromId = extractValue(row, depc.getFromColumns());
            String toId = extractValue(row, depc.getToColumns());
            MdKey fromKey = MetadataMap.getMetadataKey(depc.getFromClassifier(), fromId);
            MdKey toKey = MetadataMap.getMetadataKey(depc.getToClassifier(), toId);
            if (!this.mdReferences.containsKeyInherit(fromKey, cfg.getInherits())) {
                continue; // From??
            }
            if (!this.mdReferences.containsKeyInherit(toKey, cfg.getInherits())) {
                continue; // To??
            }
            map.put(fromId, toId);
        }

        if (rs.getRowCount() < SIZE) { //??
            break;
        }
        page.next();
    }
    return map;
}

From source file:edu.wustl.geneconnect.metadata.MetadataCalculator.java

/**
 * Add to multi map/*  w w w  .  ja v  a  2 s .c  om*/
 * Key is the source node IDs
 * @param fromNodeID
 * @param toNodeID
 * @param usingNodeID
 */
private void updatePath(int fromNodeID, int toNodeID, int usingNodeID) {
    if (fromNodeID == toNodeID) {
        return;
    }

    // Get all paths from 'fromNodeID'
    MultiMap pathMap = getPathsForSrc(fromNodeID);

    if (pathMap.containsKey(new Integer(usingNodeID))) {
        // Get all possible paths from 'fromNodeID' to 'usingNodeID'  
        Collection coll = (Collection) pathMap.get(new Integer(usingNodeID));

        if (null != coll) {
            for (Iterator iter = coll.iterator(); iter.hasNext();) {
                List l1 = (List) iter.next();
                List newNodeList = new ArrayList();

                // Check for cyclicity (i.e.) the path should NOT contain 'toNodeID'
                if (!l1.contains(new Integer(toNodeID))) {
                    // Append each path to the 'toNodeID'

                    for (Iterator iter1 = l1.iterator(); iter1.hasNext();) {
                        newNodeList.add(iter1.next());
                    }

                    // now add 'toNodeID' as the last element
                    newNodeList.add(new Integer(toNodeID));

                    // Add this entry to the map after checking that it is not already present
                    if (!isPathPresent(fromNodeID, toNodeID, newNodeList)) {
                        pathMap.put(new Integer(toNodeID), newNodeList);
                    }
                }
            }
        }
    } else {
        // This should ideally never happen
    }
}

From source file:com.krawler.workflow.bizservice.WorkflowServiceImpl.java

public String importWorkflow(String processid) throws ServiceException {
    String result = "{\"success\":false}";
    try {/*from  w ww .ja  va2  s.c  o  m*/

        DocumentBuilderFactory docBuilderFactory = DocumentBuilderFactory.newInstance();
        DocumentBuilder docBuilder = docBuilderFactory.newDocumentBuilder();

        String path = ConfigReader.getinstance().get("workflowpath") + processid;

        File fdir = new File(path);
        File file = new File(fdir + System.getProperty("file.separator") + "bpmn.xml");
        Document doc = docBuilder.parse(file);
        int s;
        int a;
        String name = "";
        ObjectInfo obj = new ObjectInfo();

        HashMap<String, ObjectInfo> activityHm = new HashMap<String, ObjectInfo>();
        NodeList nodeList = doc.getElementsByTagName("Activity");

        for (s = 0; s < nodeList.getLength(); s++) {
            name = "";
            Node node = nodeList.item(s);

            if (node.getNodeType() == Node.ELEMENT_NODE) {
                obj = new ObjectInfo();
                obj.type = getNodeType(node);
                getNodeInfo(node, obj);
                if (obj.type.equals("activity")) {
                    Node graphicsInfoNode = getActivityNode(node, 1);
                    getGraphicsNodeInfo(graphicsInfoNode, obj);
                }
                activityHm.put(obj.objId, obj);
            }

        }

        NodeList transitionList = doc.getElementsByTagName("Transitions");
        String fromId = "";
        String toId = "";
        ObjectInfo fromObj;
        ObjectInfo toObj;
        ObjectInfo tempObj;
        JSONObject jobj;
        JSONObject jtemp = new com.krawler.utils.json.base.JSONObject();
        HashMap<String, String> fromConditionHm = new HashMap<String, String>();
        MultiMap toConditionHm = new MultiHashMap();
        for (int i = 0; i < transitionList.getLength(); i++) {
            Node node = transitionList.item(i);
            NodeList childrenList = node.getChildNodes();
            for (int cnt = 0; cnt < childrenList.getLength(); cnt++) {
                node = childrenList.item(cnt);
                if (node.getNodeType() == Node.ELEMENT_NODE) {
                    NamedNodeMap attr = node.getAttributes();
                    for (int b = 0; b < attr.getLength(); b++) {
                        Node attribute = attr.item(b);
                        name = attribute.getNodeName();
                        if (name.compareToIgnoreCase("From") == 0) {
                            fromId = attribute.getNodeValue();
                        } else if (name.compareToIgnoreCase("To") == 0) {
                            toId = attribute.getNodeValue();
                        }
                    }
                    fromObj = activityHm.get(fromId);
                    toObj = activityHm.get(toId);
                    if (fromObj.type.equals("start")) {
                        tempObj = new ObjectInfo();
                        tempObj = activityHm.get(toId);
                        tempObj.hasStart = "true";
                        activityHm.put(toId, tempObj);
                        continue;
                    }
                    if (toObj.type.equals("end")) {
                        tempObj = new ObjectInfo();
                        tempObj = activityHm.get(fromId);
                        tempObj.hasEnd = "true";
                        activityHm.put(fromId, tempObj);
                        continue;
                    }
                    if (fromObj.type.equals("activity") && toObj.type.equals("activity")) {
                        jobj = new com.krawler.utils.json.base.JSONObject();
                        jobj.put("fromId", "flowPanel" + fromId);
                        jobj.put("toId", "flowPanel" + toId);
                        jtemp.append("Lines", jobj);
                        tempObj = new ObjectInfo();
                        tempObj = activityHm.get(fromId);
                        tempObj.derivationRule = "sequence";
                        activityHm.put(fromId, tempObj);
                        continue;
                    }
                    if (fromObj.type.equals("activity") && toObj.type.equals("condition")) {
                        fromConditionHm.put(toId, fromId);
                        tempObj = new ObjectInfo();
                        tempObj = activityHm.get(fromId);
                        tempObj.derivationRule = "evaluation";
                        activityHm.put(fromId, tempObj);
                        continue;
                    }
                    if (fromObj.type.equals("condition") && toObj.type.equals("activity")) {
                        toConditionHm.put(fromId, toId);
                        continue;
                    }
                }
            }
        }

        Set keys = activityHm.keySet();
        Iterator ite = keys.iterator();
        while (ite.hasNext()) {
            String key = (String) ite.next();
            obj = new ObjectInfo();
            obj = activityHm.get(key);
            if (obj.type.equals("activity")) {
                jobj = new com.krawler.utils.json.base.JSONObject();
                jobj.put("Id", "flowPanel" + obj.objId);
                jobj.put("name", obj.name);
                jobj.put("xpos", obj.xpos);
                jobj.put("ypos", obj.ypos);
                jobj.put("height", obj.height);
                jobj.put("width", obj.width);
                jobj.put("parent", obj.parentId);
                jobj.put("refId", obj.refId);
                jobj.put("hasStart", obj.hasStart);
                jobj.put("hasEnd", obj.hasEnd);
                jobj.put("startRefId", obj.startRefId);
                jobj.put("endRefId", obj.endRefId);
                jobj.put("derivationRule", obj.derivationRule);
                jobj.put("domEl", obj.domEl);
                jtemp.append("data", jobj);
            }
        }

        keys = fromConditionHm.keySet();
        ite = keys.iterator();
        Iterator ite1 = null;
        String key = "";
        while (ite.hasNext()) {
            key = (String) ite.next();
            fromId = fromConditionHm.get(key);
            List toList = (List) toConditionHm.get(key);
            ite1 = toList.iterator();
            while (ite1.hasNext()) {
                toId = (String) ite1.next();
                jobj = new com.krawler.utils.json.base.JSONObject();
                jobj.put("fromId", "flowPanel" + fromId);
                jobj.put("toId", "flowPanel" + toId);
                jtemp.append("Lines", jobj);
            }
        }
        return jtemp.toString();
    } catch (ParserConfigurationException ex) {
        logger.warn(ex.getMessage(), ex);
        result = "{\"success\":false}";
        throw ServiceException.FAILURE("workflow.reloadWorkflow", ex);
    } catch (SAXException ex) {
        logger.warn(ex.getMessage(), ex);
        result = "{\"success\":false}";
        throw ServiceException.FAILURE("workflow.reloadWorkflow", ex);
    } catch (IOException ex) {
        logger.warn(ex.getMessage(), ex);
        result = "{\"success\":false}";
        throw ServiceException.FAILURE("workflow.reloadWorkflow", ex);
    } catch (JSONException ex) {
        logger.warn(ex.getMessage(), ex);
        result = "{\"success\":false}";
        throw ServiceException.FAILURE("workflow.reloadWorkflow", ex);
    }
}

From source file:org.apache.flume.channel.file.ReplayHandler.java

void replayLog(List<File> logs) throws IOException {
    int total = 0;
    int count = 0;
    MultiMap transactionMap = new MultiValueMap();
    LOG.info("Starting replay of " + logs);
    for (File log : logs) {
        LOG.info("Replaying " + log);
        LogFile.SequentialReader reader = null;
        try {//w w w  .  jav a  2 s .c  o  m
            reader = new LogFile.SequentialReader(log);
            reader.skipToLastCheckpointPosition(queue.getTimestamp());
            Pair<Integer, TransactionEventRecord> entry;
            FlumeEventPointer ptr;
            // for puts the fileId is the fileID of the file they exist in
            // for takes the fileId and offset are pointers to a put
            int fileId = reader.getLogFileID();
            int readCount = 0;
            int putCount = 0;
            int takeCount = 0;
            int rollbackCount = 0;
            int commitCount = 0;
            int skipCount = 0;
            while ((entry = reader.next()) != null) {
                int offset = entry.getLeft();
                TransactionEventRecord record = entry.getRight();
                short type = record.getRecordType();
                long trans = record.getTransactionID();
                readCount++;
                if (record.getTimestamp() > lastCheckpoint) {
                    if (type == TransactionEventRecord.Type.PUT.get()) {
                        putCount++;
                        ptr = new FlumeEventPointer(fileId, offset);
                        transactionMap.put(trans, ptr);
                    } else if (type == TransactionEventRecord.Type.TAKE.get()) {
                        takeCount++;
                        Take take = (Take) record;
                        ptr = new FlumeEventPointer(take.getFileID(), take.getOffset());
                        transactionMap.put(trans, ptr);
                    } else if (type == TransactionEventRecord.Type.ROLLBACK.get()) {
                        rollbackCount++;
                        transactionMap.remove(trans);
                    } else if (type == TransactionEventRecord.Type.COMMIT.get()) {
                        commitCount++;
                        @SuppressWarnings("unchecked")
                        Collection<FlumeEventPointer> pointers = (Collection<FlumeEventPointer>) transactionMap
                                .remove(trans);
                        if (pointers != null && pointers.size() > 0) {
                            processCommit(((Commit) record).getType(), pointers);
                            count += pointers.size();
                        }
                    } else {
                        Preconditions.checkArgument(false, "Unknown record type: " + Integer.toHexString(type));
                    }

                } else {
                    skipCount++;
                }
            }
            LOG.info("Replayed " + count + " from " + log);
            if (LOG.isDebugEnabled()) {
                LOG.debug("read: " + readCount + ", put: " + putCount + ", take: " + takeCount + ", rollback: "
                        + rollbackCount + ", commit: " + commitCount + ", skipp: " + skipCount);
            }
        } catch (EOFException e) {
            LOG.warn("Hit EOF on " + log);
        } finally {
            total += count;
            count = 0;
            if (reader != null) {
                reader.close();
            }
        }
    }
    int pendingTakesSize = pendingTakes.size();
    if (pendingTakesSize > 0) {
        String msg = "Pending takes " + pendingTakesSize + " exist after the end of replay";
        if (LOG.isDebugEnabled()) {
            for (Long pointer : pendingTakes) {
                LOG.debug("Pending take " + FlumeEventPointer.fromLong(pointer));
            }
        } else {
            LOG.error(msg + ". Duplicate messages will exist in destination.");
        }
    }
    LOG.info("Replayed " + total);
}

From source file:org.lockss.extractor.TestArticleMetadata.java

public void testCookList() throws MetadataException {
    am.putRaw("r1", "V1");
    am.putRaw("r2", "V2");
    am.putRaw("r3", "V3");
    am.putRaw("r3", "V4");
    MultiMap map = new MultiValueMap();
    map.put("r2", FIELD_ARTICLE_TITLE);
    map.put("r1", FIELD_VOLUME);
    map.put("r1", FIELD_ISSUE);
    map.put("r3", FIELD_AUTHOR);
    map.put("r3", FIELD_KEYWORDS);
    am.cook(map);//ww  w.ja va  2 s  .  c  o m
    assertEquals("V1", am.get(FIELD_VOLUME));
    assertEquals("V1", am.get(FIELD_ISSUE));
    assertEquals("V2", am.get(FIELD_ARTICLE_TITLE));
    assertEquals("V3", am.get(FIELD_AUTHOR));
    assertEquals(ListUtil.list("V3", "V4"), am.getList(FIELD_AUTHOR));
    assertEquals(ListUtil.list("V3", "V4"), am.getList(FIELD_KEYWORDS));
    assertEquals(3, am.rawSize());
    assertEquals(5, am.size());
}

From source file:org.lockss.extractor.TestArticleMetadata.java

public void testCookListError() throws MetadataException {
    am.putRaw("r1", "V1");
    am.putRaw("r2", "V2");
    am.putRaw("r3", "bill");
    am.putRaw("r3", "frank");
    am.putRaw("r3", "william");
    MultiMap map = new MultiValueMap();
    map.put("r2", FIELD_ARTICLE_TITLE);
    map.put("r1", FIELD_VOLUME);
    map.put("r1", FIELD_ISSUE);
    map.put("r3", FIELD_AUTHOR);
    map.put("r3", FIELD_MULTI);
    List<MetadataException> errs = am.cook(multiMap(map));

    assertEquals(2, errs.size());//  w  w w . ja  v  a  2 s  .co  m
    MetadataException ex1 = errs.get(0);
    MetadataException ex2 = errs.get(1);
    assertClass(MetadataException.ValidationException.class, ex1);
    assertEquals(FIELD_MULTI, ex1.getField());
    assertEquals("bill", ex1.getRawValue());
    assertClass(MetadataException.ValidationException.class, ex2);
    assertEquals(FIELD_MULTI, ex2.getField());
    assertEquals("william", ex2.getRawValue());

    assertEquals("V1", am.get(FIELD_VOLUME));
    assertEquals("V1", am.get(FIELD_ISSUE));
    assertEquals("V2", am.get(FIELD_ARTICLE_TITLE));
    assertEquals("bill", am.get(FIELD_AUTHOR));
    assertEquals(ListUtil.list("bill", "frank", "william"), am.getList(FIELD_AUTHOR));
    assertEquals(ListUtil.list("frank"), am.getList(FIELD_MULTI));
    assertEquals(3, am.rawSize());
    assertEquals(5, am.size());
}