List of usage examples for org.apache.commons.collections.keyvalue MultiKey MultiKey
public MultiKey(Object key1, Object key2, Object key3)
From source file:com.level3.tca.schema.migration.ResourceMgr.java
public MultiKey makeResourceKey(String circuit, String concatField) { String clli = null;/* w ww .j a v a 2 s .c o m*/ String vcircuit = null; String[] fields = concatField.split("::"); if (fields.length > 1) { clli = fields[1]; } if (fields.length > 2) { vcircuit = fields[2]; } return new MultiKey(circuit, vcircuit, clli); }
From source file:com.nextep.designer.sqlgen.mysql.impl.MySqlCapturer.java
/** * Builds the map of unique keys for the specified table. Note that the * fetched unique keys will be added to the table * // ww w. j a v a 2 s .c o m * @param md * {@link DatabaseMetaData} of the underlying database connection * @param monitor * a {@link IProgressMonitor} to report progress to * @param table * the {@link IBasicTable} to fetch unique keys for * @param columnsMap * the map of {@link IBasicColumn} hashed by their unique name * @return a map of {@link IKeyConstraint} hashed by their unique name * @throws SQLException */ private Map<String, IKeyConstraint> buildUniqueKeyMap(DatabaseMetaData md, IProgressMonitor monitor, IBasicTable table, Map<String, IBasicColumn> columnsMap) throws SQLException { final Map<String, IKeyConstraint> keysMap = new HashMap<String, IKeyConstraint>(); final String tabName = table.getName(); ResultSet rset = null; try { // Creating primary keys for this table rset = md.getPrimaryKeys(null, null, tabName); IKeyConstraint uk = null; List<MultiKey> pkCols = new ArrayList<MultiKey>(); // Because JDBC may not give us a sorted list, we first fill // a list with all pk columns, we sort it by KEY_SEQ, and we // fill our neXtep PK. while (rset.next()) { monitor.worked(1); final String pkName = rset.getString("PK_NAME"); //$NON-NLS-1$ final String colName = rset.getString("COLUMN_NAME"); //$NON-NLS-1$ final int colIndex = rset.getInt("KEY_SEQ") - 1; //$NON-NLS-1$ pkCols.add(new MultiKey(pkName, colIndex, colName)); } Collections.sort(pkCols, new Comparator<MultiKey>() { @Override public int compare(MultiKey o1, MultiKey o2) { if ((Integer) o1.getKeys()[1] > (Integer) o2.getKeys()[1]) { return 1; } return -1; } }); for (MultiKey pkCol : pkCols) { final String pkName = (String) pkCol.getKey(0); final String colName = (String) pkCol.getKey(2); monitor.worked(1); if (uk == null) { uk = new UniqueKeyConstraint(pkName, "", table); //$NON-NLS-1$ uk.setConstraintType(ConstraintType.PRIMARY); table.addConstraint(uk); keysMap.put(tabName.toUpperCase(), uk); } // Retrieving UK column and adding it to UK final String columnKey = CaptureHelper.getUniqueObjectName(tabName, colName); final IBasicColumn ukColumn = columnsMap.get(columnKey); if (ukColumn != null) { uk.addColumn(ukColumn); } else { LOGGER.warn(MessageFormat.format(MySQLMessages.getString("capturer.mysql.uniqueKeyNotFound"), //$NON-NLS-1$ columnKey)); } } } finally { CaptureHelper.safeClose(rset, null); } return keysMap; }
From source file:org.acre.analytics.SnapshotFactory.java
public synchronized Snapshot getSnapshot(String system, String version, Date timestamp) { MultiKey key = new MultiKey(system, version, timestamp); Snapshot ss = (Snapshot) snapshots.get(key); if (ss == null) { ss = new Snapshot(system, version, timestamp); snapshots.put(key, ss);// w w w .j a va 2s.c o m } return ss; }
From source file:org.compiere.model.MAssetAcct.java
/** * Get asset accounting./*w w w .j a v a2s . co m*/ * @param ctx context * @param A_Asset_ID asset * @param postingType Posting type * @param dateAcct check ValidFrom * @return asset accounting for the given asset */ public static MAssetAcct forA_Asset_ID(Properties ctx, int A_Asset_ID, String postingType, Timestamp dateAcct, String trxName) { MultiKey key = new MultiKey(A_Asset_ID, postingType, dateAcct); MAssetAcct acct = null; if (trxName == null) { // do not use cache //acct = s_cacheAsset.get(key); } if (acct != null) { return acct; } // ArrayList<Object> params = new ArrayList<Object>(); StringBuffer whereClause = new StringBuffer( COLUMNNAME_A_Asset_ID + "=? AND " + COLUMNNAME_PostingType + "=?"); params.add(A_Asset_ID); params.add(postingType); if (dateAcct != null) { whereClause.append(" AND " + COLUMNNAME_ValidFrom).append("<=?"); params.add(dateAcct); } acct = new Query(ctx, Table_Name, whereClause.toString(), trxName).setParameters(params) .setOrderBy(COLUMNNAME_ValidFrom + " DESC NULLS LAST").first(); if (trxName == null) { addToCache(acct, key); } return acct; }
From source file:org.compiere.process.RequisitionPOCreate.java
/** * Create new Order// www . j a v a 2 s .c o m * @param rLine request line * @param C_BPartner_ID b.partner * @throws Exception */ private void newOrder(MRequisitionLine rLine, int C_BPartner_ID) throws Exception { if (m_order != null) { closeOrder(); } // BPartner if (m_bpartner == null || C_BPartner_ID != m_bpartner.get_ID()) { m_bpartner = MBPartner.get(getCtx(), C_BPartner_ID); } // Order Timestamp DateRequired = rLine.getDateRequired(); int M_PriceList_ID = rLine.getParent().getM_PriceList_ID(); MultiKey key = new MultiKey(C_BPartner_ID, DateRequired, M_PriceList_ID); m_order = m_cacheOrders.get(key); if (m_order == null) { m_order = new MOrder(getCtx(), 0, get_TrxName()); m_order.setDatePromised(DateRequired); m_order.setIsSOTrx(false); m_order.setC_DocTypeTarget_ID(); m_order.setBPartner(m_bpartner); m_order.setM_PriceList_ID(M_PriceList_ID); // default po document type if (!p_ConsolidateDocument) { m_order.setDescription( Msg.getElement(getCtx(), "M_Requisition_ID") + ": " + rLine.getParent().getDocumentNo()); } // Prepare Save m_order.saveEx(); // Put to cache m_cacheOrders.put(key, m_order); } m_M_Requisition_ID = rLine.getM_Requisition_ID(); }
From source file:org.intermine.bio.dataconversion.HpoConverter.java
protected void processAnnoFile(Reader reader) throws IOException, ObjectStoreException { BufferedReader br = new BufferedReader(reader); String line = null;/*from w ww.j a va2 s.c om*/ // loop through entire file while ((line = br.readLine()) != null) { String[] array = line.split("\t", -1); // keep trailing empty Strings // HPO Annotation File Format: // http://www.human-phenotype-ontology.org/contao/index.php/annotation-guide.html if (array.length < 9) { throw new IllegalArgumentException( "Not enough elements (should be > 8 not " + array.length + ") in line: " + line); } String db = array[0]; if (ignoreDbList.contains(db)) { continue; } String dbId = db + ":" + array[1]; String dbName = array[2]; if (dbName.contains(toDiscard)) { continue; } // Save id and namne to map for future use dbName = dbName.replaceAll(regex, "").replaceAll("@", ""); diseaseIdNameMap.put(dbId, dbName.trim()); String qualifier = array[3]; String hpoId = array[4]; String dbRef = array[5]; String eviCode = array[6]; String freq = array[8]; String assignedBy = array[13]; storeEvidenceCode(eviCode); String[] eviInfo = { dbRef, eviCode, freq, assignedBy }; if (annoMap.get(new MultiKey(dbId, hpoId, qualifier)) == null) { Set<String[]> eviInfoSet = new HashSet<String[]>(); eviInfoSet.add(eviInfo); annoMap.put(new MultiKey(dbId, hpoId, qualifier), eviInfoSet); } else { annoMap.get(new MultiKey(dbId, hpoId, qualifier)).add(eviInfo); } } }
From source file:org.intermine.bio.dataconversion.SequenceProcessor.java
private void processFeaturePropTable(Connection connection) throws SQLException, ObjectStoreException { ResultSet res = getFeaturePropResultSet(connection); int count = 0; while (res.next()) { Integer featureId = new Integer(res.getInt("feature_id")); String identifier = res.getString("value"); if (identifier == null) { continue; }//w w w. ja v a 2s .c om String propTypeName = res.getString("type_name"); if (featureMap.containsKey(featureId)) { FeatureData fdat = featureMap.get(featureId); MultiKey key = new MultiKey("prop", fdat.getInterMineType(), propTypeName); int taxonId = fdat.organismData.getTaxonId(); List<ConfigAction> actionList = getConfig(taxonId).get(key); if (actionList == null) { // no actions configured for this prop continue; } Set<String> fieldsSet = new HashSet<String>(); for (ConfigAction action : actionList) { if (action instanceof SetFieldConfigAction) { SetFieldConfigAction setAction = (SetFieldConfigAction) action; if (setAction.isValidValue(identifier)) { String newFieldValue = setAction.processValue(identifier); setAttribute(fdat.getIntermineObjectId(), setAction.getFieldName(), newFieldValue); fieldsSet.add(newFieldValue); if ("primaryIdentifier".equals(setAction.getFieldName())) { fdat.setFlag(FeatureData.IDENTIFIER_SET, true); } } } } for (ConfigAction action : actionList) { if (action instanceof CreateSynonymAction) { CreateSynonymAction synonymAction = (CreateSynonymAction) action; if (!synonymAction.isValidValue(identifier)) { continue; } String newFieldValue = synonymAction.processValue(identifier); Set<String> existingSynonyms = fdat.getExistingSynonyms(); if (existingSynonyms.contains(newFieldValue)) { continue; } Item synonym = createSynonym(fdat, newFieldValue); if (synonym != null) { getChadoDBConverter().store(synonym); count++; } } } } } LOG.info("created " + count + " synonyms from the featureprop table"); res.close(); }
From source file:org.intermine.bio.dataconversion.SequenceProcessor.java
/** * This method isn't used yet, it takes up too much memory. We need to use a temporary table * instead./* ww w. java2 s . co m*/ */ @SuppressWarnings("unused") private void processLibraryFeatureTable(Connection connection) throws SQLException, ObjectStoreException { ResultSet res = getLibraryFeatureResultSet(connection); while (res.next()) { Integer featureId = new Integer(res.getInt("feature_id")); String identifier = res.getString("value"); if (identifier == null) { continue; } String propTypeName = res.getString("type_name"); if (featureMap.containsKey(featureId)) { FeatureData fdat = featureMap.get(featureId); MultiKey key = new MultiKey("library", fdat.getInterMineType(), propTypeName); int taxonId = fdat.organismData.getTaxonId(); List<ConfigAction> actionList = getConfig(taxonId).get(key); if (actionList == null) { // no actions configured for this prop continue; } Set<String> fieldsSet = new HashSet<String>(); for (ConfigAction action : actionList) { if (action instanceof SetFieldConfigAction) { SetFieldConfigAction setAction = (SetFieldConfigAction) action; if (setAction.isValidValue(identifier)) { String newFieldValue = setAction.processValue(identifier); setAttribute(fdat.getIntermineObjectId(), setAction.getFieldName(), newFieldValue); fieldsSet.add(newFieldValue); if ("primaryIdentifier".equals(setAction.getFieldName())) { fdat.setFlag(FeatureData.IDENTIFIER_SET, true); } } } } } } res.close(); }
From source file:org.intermine.bio.dataconversion.SequenceProcessor.java
/** * This method isn't used yet, it takes up too much memory. We need to use a temporary table * instead./* w w w .j av a 2 s. co m*/ */ @SuppressWarnings("unused") private void processLibraryCVTermTable(Connection connection) throws SQLException, ObjectStoreException { ResultSet res = getLibraryCVTermResultSet(connection); while (res.next()) { Integer featureId = new Integer(res.getInt("feature_id")); String identifier = res.getString("term_identifier"); if (identifier == null) { continue; } if (featureMap.containsKey(featureId)) { FeatureData fdat = featureMap.get(featureId); MultiKey key = new MultiKey("anatomyterm", fdat.getInterMineType(), null); int taxonId = fdat.organismData.getTaxonId(); List<ConfigAction> actionList = getConfig(taxonId).get(key); if (actionList == null) { // no actions configured for this prop continue; } for (ConfigAction action : actionList) { if (action instanceof SetFieldConfigAction) { SetFieldConfigAction setAction = (SetFieldConfigAction) action; if (setAction.isValidValue(identifier)) { Reference termReference = new Reference(); termReference.setName(setAction.getFieldName()); String termRefId = makeAnatomyTerm(identifier); if (termRefId == null) { continue; } termReference.setRefId(termRefId); getChadoDBConverter().store(termReference, fdat.getIntermineObjectId()); } } } } } res.close(); }
From source file:org.intermine.bio.dataconversion.SequenceProcessor.java
/** * Read the feature, feature_cvterm and cvterm tables, then set fields, create synonyms or * create objects based on the cvterms./*from ww w. jav a 2 s . c o m*/ * @param connection the Connection */ private void processFeatureCVTermTable(Connection connection) throws SQLException, ObjectStoreException { ResultSet res = getFeatureCVTermResultSet(connection); int count = 0; Integer previousFeatureId = null; // map from reference/collection name to list of Items to store in the reference or // collection Map<String, List<Item>> dataMap = new HashMap<String, List<Item>>(); while (res.next()) { Integer featureId = new Integer(res.getInt("feature_id")); String cvtermName = res.getString("cvterm_name"); String cvName = res.getString("cv_name"); FeatureData fdat = featureMap.get(featureId); if (fdat == null) { continue; } if (!featureId.equals(previousFeatureId) && previousFeatureId != null) { processCVTermRefCols(previousFeatureId, dataMap); dataMap = new HashMap<String, List<Item>>(); } MultiKey key = new MultiKey("cvterm", fdat.getInterMineType(), cvName); int taxonId = fdat.organismData.getTaxonId(); List<ConfigAction> actionList = getConfig(taxonId).get(key); if (actionList == null) { // no actions configured for this prop continue; } Set<String> fieldsSet = new HashSet<String>(); for (ConfigAction action : actionList) { if (action instanceof SetFieldConfigAction) { SetFieldConfigAction setAction = (SetFieldConfigAction) action; if (setAction.isValidValue(cvtermName)) { String newFieldValue = setAction.processValue(cvtermName); setAttribute(fdat.getIntermineObjectId(), setAction.getFieldName(), newFieldValue); fieldsSet.add(newFieldValue); if ("primaryIdentifier".equals(setAction.getFieldName())) { fdat.setFlag(FeatureData.IDENTIFIER_SET, true); } } } else { if (action instanceof CreateSynonymAction) { CreateSynonymAction synonymAction = (CreateSynonymAction) action; if (!synonymAction.isValidValue(cvtermName)) { continue; } String newFieldValue = synonymAction.processValue(cvtermName); Set<String> existingSynonyms = fdat.getExistingSynonyms(); if (existingSynonyms.contains(newFieldValue)) { continue; } Item synonym = createSynonym(fdat, newFieldValue); if (synonym != null) { getChadoDBConverter().store(synonym); count++; } } else { // TODO fixme // if (action instanceof CreateCollectionAction) { // CreateCollectionAction cca = (CreateCollectionAction) action; // // Item item = null; // String fieldName = cca.getFieldName(); // String className = cca.getClassName(); // if (cca.createSingletons()) { // MultiKey singletonKey = // new MultiKey(className, fieldName, cvtermName); // item = (Item) singletonMap.get(singletonKey); // } // if (item == null) { // item = getChadoDBConverter().createItem(className); // item.setAttribute(fieldName, cvtermName); // getChadoDBConverter().store(item); // if (cca.createSingletons()) { // singletonMap.put(key, item); // } // } // // String referenceName = cca.getReferenceName(); // List<Item> itemList; // // creating collection, already seen this ref // if (dataMap.containsKey(referenceName)) { // itemList = dataMap.get(referenceName); // // new collection // } else { // itemList = new ArrayList<Item>(); // dataMap.put(referenceName, itemList); // } // itemList.add(item); // } } } } previousFeatureId = featureId; } if (previousFeatureId != null) { processCVTermRefCols(previousFeatureId, dataMap); } LOG.info("created " + count + " synonyms from the feature_cvterm table"); res.close(); }