List of usage examples for org.apache.commons.collections MultiMap entrySet
Set<Map.Entry<K, V>> entrySet();
From source file:net.jadler.stubbing.server.jetty.StubHandler.java
private void setResponseHeaders(final MultiMap headers, final HttpServletResponse response) { for (@SuppressWarnings("unchecked") final Iterator<Entry<String, Collection<String>>> it = headers.entrySet().iterator(); it.hasNext();) { final Entry<String, Collection<String>> e = it.next(); for (final String value : e.getValue()) { response.addHeader(e.getKey(), value); }/*from w ww .j a v a 2 s. c o m*/ } }
From source file:net.jadler.stubbing.Request.java
private Map<String, List<String>> readParameters() throws IOException { final MultiMap params = readParametersFromQueryString(); //TODO: shitty attempt to check whether the body contains html form data. Please refactor. if (!StringUtils.isBlank(this.getContentType()) && this.getContentType().contains("application/x-www-form-urlencoded")) { if ("POST".equalsIgnoreCase(this.getMethod()) || "PUT".equalsIgnoreCase(this.getMethod())) { params.putAll(this.readParametersFromBody()); }/*from w w w .j ava 2s. co m*/ } final Map<String, List<String>> res = new HashMap<String, List<String>>(); for (final Object o : params.entrySet()) { final Map.Entry<String, List<String>> e = (Map.Entry) o; res.put(e.getKey(), unmodifiableList(e.getValue())); } return unmodifiableMap(res); }
From source file:edu.wustl.geneconnect.metadata.MetadataCalculator.java
private List getPathsWithSpecifiedLength(int srcID, int pathLength) { List pathList = new ArrayList(); // get all paths for given source node MultiMap pathMap = getPathsForSrc(srcID); Iterator mapIter = pathMap.entrySet().iterator(); for (Iterator iter = pathMap.entrySet().iterator(); iter.hasNext();) { Map.Entry entry = (Map.Entry) iter.next(); List nodeList = (List) entry.getValue(); int size = nodeList.size(); for (int i = 0; i < size; ++i) { List IDList = (List) nodeList.get(i); if (IDList.size() == pathLength) { // create a new list that includes the source ID List newList = new ArrayList(); newList.add(new Integer(srcID)); for (int k = 0; k < IDList.size(); ++k) { newList.add(IDList.get(k)); }/* w ww .j ava 2 s.c o m*/ // add this to the list of all paths pathList.add(newList); } } } return pathList; }
From source file:edu.wustl.geneconnect.metadata.MetadataCalculator.java
private void getSubPaths() { int pathCount = 1; for (int srcID = 0; srcID < m_numSources; ++srcID) { // Get all paths for 'srcID' MultiMap nodePaths = (MultiMap) m_masterPathList.get(srcID); Iterator mapIter = nodePaths.entrySet().iterator(); // This 'for' loop denotes all paths from a given srcID to all possible destinations for (Iterator iter = nodePaths.entrySet().iterator(); iter.hasNext();) { Map.Entry entry = (Map.Entry) iter.next(); int destID = ((Integer) (entry.getKey())).intValue(); List nodeList = (List) entry.getValue(); int size = nodeList.size(); // below 'for' loop denotes all paths to a given destination with 'srcID' as starting node for (int i = 0; i < size; ++i) { // 'nodeIDList' contains a single path (of node IDs) excluding srcID List nodeIDList = (List) nodeList.get(i); List pathList = new ArrayList(); pathList.add(new Integer(srcID)); for (int k = 0; k < nodeIDList.size(); k++) { pathList.add(nodeIDList.get(k)); }/*from www. ja v a 2 s . c o m*/ calculateSubPaths(pathCount, pathList); ++pathCount; } } } }
From source file:edu.wustl.geneconnect.metadata.MetadataCalculator.java
private void deleteRedundantPaths(List pathList) { // for each path in in-memory structure, compare list with above list // if exact match between the 2 or in-memory path contained in 'pathList' above, delete the in-memory path from in-memory data structure for (int srcID = 0; srcID < m_numSources; ++srcID) { // get all paths for given source node MultiMap pathMap = getPathsForSrc(srcID); Iterator mapIter = pathMap.entrySet().iterator(); for (Iterator iter = pathMap.entrySet().iterator(); iter.hasNext();) { Map.Entry entry = (Map.Entry) iter.next(); List nodeList = (List) entry.getValue(); int size = nodeList.size(); for (int i = 0; i < nodeList.size(); ++i) { List IDlist = (List) nodeList.get(i); // form list to be compared against List nodesInPath = new ArrayList(); nodesInPath.add(new Integer(srcID)); for (int k = 0; k < IDlist.size(); ++k) { nodesInPath.add(IDlist.get(k)); }// ww w.ja v a2s . co m // proceed with comparison only if length(pathList) > length(nodesInPath), since 'pathList' should contain 'nodesInPath' if (pathList.size() >= nodesInPath.size()) { int numMatches = 0; // check if 'nodesInPath' is contained in 'pathList' // search for first occurrence of first element of 'nodesInPath' in 'pathList' Integer first = (Integer) nodesInPath.get(0); int indx = pathList.indexOf(first); if (indx != -1) { int length = nodesInPath.size(); // compare 'length' elements from 'indx' position in 'pathList' to 'nodesInPath' for (int j = indx, k = 0; ((j < pathList.size()) && (k < length)); ++j, ++k) { if (pathList.get(j).equals(nodesInPath.get(k))) { ++numMatches; } else // if not equal, can break out of loop immediately { break; } } } // delete if following is true // a) 'nodesInPath' is entirely contained in 'pathList' // b) 'nodesInPath' is not the same as 'pathList' (so that longest non-redundant paths are not compared with itself and deleted) if ((numMatches == nodesInPath.size()) && (nodesInPath.size() != pathList.size())) { // delete from original data structure nodeList.remove(i); } } } } } return; }
From source file:edu.wustl.geneconnect.metadata.MetadataCalculator.java
private void calculateSubPaths(int pathIndex, List pathList) { int subPathIndex = 1; for (int srcID = 0; srcID < m_numSources; ++srcID) { // get all paths for given source node MultiMap pathMap = getPathsForSrc(srcID); Iterator mapIter = pathMap.entrySet().iterator(); for (Iterator iter = pathMap.entrySet().iterator(); iter.hasNext();) { Map.Entry entry = (Map.Entry) iter.next(); List nodeList = (List) entry.getValue(); int size = nodeList.size(); for (int i = 0; i < nodeList.size(); ++i) { List IDlist = (List) nodeList.get(i); // form list to be compared against List nodesInPath = new ArrayList(); nodesInPath.add(new Integer(srcID)); for (int k = 0; k < IDlist.size(); ++k) { nodesInPath.add(IDlist.get(k)); }/*from w w w . j a v a2s .c om*/ // proceed with comparison only if length(pathList) > length(nodesInPath), since 'pathList' should contain 'nodesInPath' if (pathList.size() >= nodesInPath.size()) { int numMatches = 0; // check if 'nodesInPath' is contained in 'pathList' // search for first occurrence of first element of 'nodesInPath' in 'pathList' Integer first = (Integer) nodesInPath.get(0); int indx = pathList.indexOf(first); if (indx != -1) { int length = nodesInPath.size(); // compare 'length' elements from 'indx' position in 'pathList' to 'nodesInPath' for (int j = indx, k = 0; ((j < pathList.size()) && (k < length)); ++j, ++k) { if (pathList.get(j).equals(nodesInPath.get(k))) { ++numMatches; } else // if not equal, can break out of loop immediately { break; } } } // delete if following is true // a) 'nodesInPath' is entirely contained in 'pathList' // b) 'nodesInPath' is not the same as 'pathList' (so that longest non-redundant paths are not compared with itself and deleted) if ((numMatches == nodesInPath.size()) && (nodesInPath.size() != pathList.size())) { List subPathList = (List) m_pathToSubPathMap.get(new Integer(pathIndex)); if (subPathList == null) { subPathList = new ArrayList(); subPathList.add(new Integer(subPathIndex)); m_pathToSubPathMap.put(new Integer(pathIndex), subPathList); } else { subPathList.add(new Integer(subPathIndex)); } } } ++subPathIndex; } } } return; }
From source file:edu.wustl.geneconnect.metadata.MetadataCalculator.java
/** * Create following output files which will use sqlloader to populate database tables * O/p file for all paths -- populates PATH table *//*ww w . j ava 2 s . co m*/ private void writePaths(String fileName, int fileFormat) { // All paths // This portion fills the following tables: // a) ALL_PATH int loopCount = 1; try { FileWriter opFile3 = new FileWriter(new File(fileName)); if (fileFormat == DB_FORMAT) { opFile3.write("LOAD DATA INFILE * APPEND INTO TABLE PATH FIELDS TERMINATED BY '" + FIELD_DELIMITER + "' " + "(PATH_ID, SOURCE_DATASOURCE_ID, PATH, TARGET_DATASOURCE_ID)\n" + "BEGINDATA\n"); } for (int srcID = 0; srcID < m_numSources; ++srcID) { // Get all paths for 'srcID' MultiMap nodePaths = (MultiMap) m_masterPathList.get(srcID); Iterator mapIter = nodePaths.entrySet().iterator(); // This 'for' loop denotes all paths from a given srcID to all possible destinations for (Iterator iter = nodePaths.entrySet().iterator(); iter.hasNext();) { Map.Entry entry = (Map.Entry) iter.next(); int destID = ((Integer) (entry.getKey())).intValue(); List nodeList = (List) entry.getValue(); // below 'for' loop denotes all paths to a given destination with 'srcID' as starting node for (int i = 0; i < nodeList.size(); ++i) { StringBuffer temp = new StringBuffer(); if (fileFormat == DB_FORMAT) { // Write Record count in the o/p temp.append((new Integer(loopCount)).toString() + FIELD_DELIMITER); //Write source datasource node ID temp.append(new Integer(srcID).toString() + FIELD_DELIMITER); } else { //Write source datasource node ID temp.append(new Integer(srcID).toString() + PATH_NODES_DELIMITER); } // 'nodeIDList' contains a single path (of node IDs) excluding srcID // ######################################################################## List nodeIDList = (List) nodeList.get(i); for (int h = 0; h < nodeIDList.size() - 1; ++h) { //Write intermediate datasource node ID temp.append(nodeIDList.get(h) + PATH_NODES_DELIMITER); } if (fileFormat == DB_FORMAT) { if (nodeIDList.size() > 1) { temp.deleteCharAt(temp.length() - 1); temp.append(FIELD_DELIMITER); } else { temp.append(FIELD_DELIMITER); } } // Write Destination datasource node ID temp.append(destID); temp.append("\n"); opFile3.write(temp.toString()); opFile3.flush(); ++loopCount; // ######################################################################## } // end of path } } } catch (IOException ioex) { System.out.println("Failed to create output file in current directory" + ioex.getMessage()); } }
From source file:com.zimbra.cs.store.file.BlobDeduper.java
private Pair<Integer, Long> deDupe(List<BlobReference> blobs) throws ServiceException { int linksCreated = 0; long sizeSaved = 0; long srcInodeNum = 0; String srcPath = null;//from ww w. j a va 2 s .c o m // check if there is any processed blob for (BlobReference blob : blobs) { if (blob.isProcessed()) { String path = FileBlobStore.getBlobPath(blob.getMailboxId(), blob.getItemId(), blob.getRevision(), blob.getVolumeId()); try { IO.FileInfo fileInfo = IO.fileInfo(path); if (fileInfo != null) { srcInodeNum = fileInfo.getInodeNum(); srcPath = path; break; } } catch (IOException e) { // ignore } } } if (srcInodeNum == 0) { // check the path with maximum links // organize the paths based on inode MultiMap inodeMap = new MultiValueMap(); for (BlobReference blob : blobs) { String path = FileBlobStore.getBlobPath(blob.getMailboxId(), blob.getItemId(), blob.getRevision(), blob.getVolumeId()); try { IO.FileInfo fileInfo = IO.fileInfo(path); if (fileInfo != null) { inodeMap.put(fileInfo.getInodeNum(), path); blob.setFileInfo(fileInfo); } } catch (IOException e) { // ignore } } // find inode which has maximum paths int maxPaths = 0; @SuppressWarnings("unchecked") Iterator<Map.Entry<Long, Collection<String>>> iter = inodeMap.entrySet().iterator(); while (iter.hasNext()) { Map.Entry<Long, Collection<String>> entry = iter.next(); if (entry.getValue().size() > maxPaths) { maxPaths = entry.getValue().size(); srcInodeNum = entry.getKey(); srcPath = entry.getValue().iterator().next(); } } } if (srcInodeNum == 0) { return new Pair<Integer, Long>(0, Long.valueOf(0)); } // First create a hard link for the source path, so that the file // doesn't get deleted in the middle. String holdPath = srcPath + "_HOLD"; File holdFile = new File(holdPath); try { IO.link(srcPath, holdPath); // Now link the other paths to source path for (BlobReference blob : blobs) { if (blob.isProcessed()) { continue; } String path = FileBlobStore.getBlobPath(blob.getMailboxId(), blob.getItemId(), blob.getRevision(), blob.getVolumeId()); try { if (blob.getFileInfo() == null) { blob.setFileInfo(IO.fileInfo(path)); } } catch (IOException e) { // ignore } if (blob.getFileInfo() == null) { continue; } if (srcInodeNum == blob.getFileInfo().getInodeNum()) { markBlobAsProcessed(blob); continue; } // create the links for paths in two steps. // first create a temp link and then rename it to actual path // this guarantees that the file is always available. String tempPath = path + "_TEMP"; File tempFile = new File(tempPath); try { IO.link(holdPath, tempPath); File destFile = new File(path); tempFile.renameTo(destFile); markBlobAsProcessed(blob); linksCreated++; sizeSaved += blob.getFileInfo().getSize(); } catch (IOException e) { ZimbraLog.misc.warn("Ignoring the error while deduping " + path, e); } finally { if (tempFile.exists()) { tempFile.delete(); } } } } catch (IOException e) { ZimbraLog.misc.warn("Ignoring the error while creating a link for " + srcPath, e); } finally { // delete the hold file if (holdFile.exists()) { holdFile.delete(); } } return new Pair<Integer, Long>(linksCreated, sizeSaved); }
From source file:edu.wustl.geneconnect.metadata.MetadataCalculator.java
/** * Create following output files which will use sqlloader to populate database tables *//*from w ww.j ava 2s. com*/ private void writeONTs(String ontFileName, String ontFileNameForDB, String pathOntMappingFileName) { // All paths // This portion fills the following tables: // a) ONT (Linked-list like table) // b) PATH TO ONT mapping int loopCount = 1; try { FileWriter opOntFile = new FileWriter(new File(ontFileName)); FileWriter opOntFileForDB = new FileWriter(new File(ontFileNameForDB)); FileWriter opPathOntMappingForDB = new FileWriter(new File(pathOntMappingFileName)); opOntFileForDB.write("LOAD DATA INFILE * APPEND INTO TABLE ONT FIELDS TERMINATED BY '" + FIELD_DELIMITER + "' " + "(PATH_ID, SOURCE_DS_ID, LINKTYPE_ID, NEXT_PATH_ID, PREV_PATH_ID)\n" + "BEGINDATA\n"); opPathOntMappingForDB.write("LOAD DATA INFILE * APPEND INTO TABLE PATH_ONT FIELDS TERMINATED BY '" + FIELD_DELIMITER + "' " + "(PATH_ID, ONT_ID)\n" + "BEGINDATA\n"); for (int srcID = 0; srcID < m_numSources; ++srcID) { // Get all paths for 'srcID' MultiMap nodePaths = (MultiMap) m_masterPathList.get(srcID); Iterator mapIter = nodePaths.entrySet().iterator(); // This 'for' loop denotes all paths from a given srcID to all possible destinations for (Iterator iter = nodePaths.entrySet().iterator(); iter.hasNext();) { Map.Entry entry = (Map.Entry) iter.next(); int destID = ((Integer) (entry.getKey())).intValue(); List allPathsfromCurrentSourceToDest = (List) entry.getValue(); int size = allPathsfromCurrentSourceToDest.size(); // below 'for' loop denotes all paths to a given destination with 'srcID' as starting node for (int i = 0; i < size; ++i) { // 'nodeIDList' contains a single path (of node IDs) excluding srcID // ######################################################################## List currentPath = (List) allPathsfromCurrentSourceToDest.get(i); // List of lists to form ONTs // each list will contain the same src nodes but with different link types List currentCompletePath = new ArrayList(); List linkCounts = new ArrayList(); /* ############# Form complete path including source ############# */ currentCompletePath.add(new Integer(srcID)); int totalIntermediateNodes = currentPath.size(); for (int h = 0; h < totalIntermediateNodes - 1; ++h) { Integer intermediateID = (Integer) currentPath.get(h); currentCompletePath.add(intermediateID); } // Add Final destination source as well into the list currentCompletePath.add((Integer) currentPath.get(totalIntermediateNodes - 1)); /* ############# Form complete path including source ############# */ int currentPathLength = currentCompletePath.size(); // Get link types for every pair of <source & destination> for (int h = 0; h < currentPathLength - 1; ++h) { // get the link types between src and dest List srcDestLinks = getLinkTypes(((Integer) currentCompletePath.get(h)).intValue(), ((Integer) currentCompletePath.get(h + 1)).intValue()); linkCounts.add(new Integer(srcDestLinks.size())); } // Get the total number of paths such that different link types between same // pair of adjacent nodes appear in different paths) // Calculate total number of paths int totalPaths = 1; for (int m = 0; m < linkCounts.size(); m++) { totalPaths = totalPaths * ((Integer) linkCounts.get(m)).intValue(); } // Intialize link type array to hold all comabination of possible link types for the current path int[][] linkTypesForAllCombinationsOfCurrentPath = new int[totalPaths][]; for (int t = 0; t < totalPaths; t++) { linkTypesForAllCombinationsOfCurrentPath[t] = new int[linkCounts.size()]; } int groupCount = totalPaths; int mainGroupCnt = 1; int cnt = 0; // Loop though all the sources in the path for (int n = 0; n < currentPathLength - 1; n++) { cnt = 0; //number of times each link needs to be repeated with main group groupCount = groupCount / ((Integer) linkCounts.get(n)).intValue(); // Fetch data-structure which holds linktype indexes // between source and destination List srcDestLinks = getLinkTypes(((Integer) currentCompletePath.get(n)).intValue(), ((Integer) currentCompletePath.get(n + 1)).intValue()); // The main group for which links type combination has to be repeated for (int mainG = 0; mainG < mainGroupCnt; ++mainG) { for (int j = 0; j < ((Integer) linkCounts.get(n)).intValue(); ++j) { // Repeat Number of groups for every sub-group for (int k = 0; k < groupCount; ++k) { linkTypesForAllCombinationsOfCurrentPath[cnt][n] = ((Integer) (srcDestLinks .get(j))).intValue(); ++cnt; } } } mainGroupCnt = mainGroupCnt * (((Integer) linkCounts.get(n)).intValue()); } StringBuffer sbOntFile = new StringBuffer(); StringBuffer sbOntDb = new StringBuffer(); StringBuffer sbPathOntMapping = new StringBuffer(); writeOntForGivenPath(loopCount, currentCompletePath, linkTypesForAllCombinationsOfCurrentPath, sbOntFile, sbOntDb, sbPathOntMapping); opOntFile.write(sbOntFile.toString()); opOntFile.flush(); opOntFileForDB.write(sbOntDb.toString()); opOntFileForDB.flush(); opPathOntMappingForDB.write(sbPathOntMapping.toString()); opPathOntMappingForDB.flush(); loopCount++; // ######################################################################## } // end of path } } } catch (IOException ioex) { System.out.println("Failed to create output file in current directory" + ioex.getMessage()); } }
From source file:com.manydesigns.portofino.pageactions.AbstractPageAction.java
public MultiMap initEmbeddedPageActions() { if (embeddedPageActions == null) { MultiMap mm = new MultiHashMap(); Layout layout = pageInstance.getLayout(); for (ChildPage childPage : layout.getChildPages()) { String layoutContainerInParent = childPage.getContainer(); if (layoutContainerInParent != null) { String newPath = context.getActionPath() + "/" + childPage.getName(); File pageDir = new File(pageInstance.getChildrenDirectory(), childPage.getName()); try { Page page = DispatcherLogic.getPage(pageDir); EmbeddedPageAction embeddedPageAction = new EmbeddedPageAction(childPage.getName(), childPage.getActualOrder(), newPath, page); mm.put(layoutContainerInParent, embeddedPageAction); } catch (PageNotActiveException e) { logger.warn("Embedded page action is not active, skipping! " + pageDir, e); }/*from w w w .j av a 2 s.c o m*/ } } for (Object entryObj : mm.entrySet()) { Map.Entry entry = (Map.Entry) entryObj; List pageActionContainer = (List) entry.getValue(); Collections.sort(pageActionContainer); } embeddedPageActions = mm; } return embeddedPageActions; }