List of usage examples for org.apache.commons.collections.map LRUMap LRUMap
public LRUMap(Map map)
From source file:org.lexevs.dao.database.operation.transitivity.DefaultTransitivityBuilder.java
public void computeTransitivityTable(String codingSchemeUri, String version) { BatchInsertController batchController = new BatchInsertController(codingSchemeUri, version); List<String> transitiveAssociations = this.getTransitiveAssociationPredicateIds(codingSchemeUri, version); for (String associationPredicateId : transitiveAssociations) { // make a hashset the holds the entire current set of // relations. String sourceECNS = null; String sourceEC = null;/* w w w . j ava2 s. co m*/ String targetECNS = null; String targetEC = null; LRUMap insertedCache = new LRUMap(10000000); TripleIterator tripleIterator = new TripleIterator(databaseServiceManager, codingSchemeUri, version, associationPredicateId); for (Triple triple : tripleIterator) { sourceECNS = triple.getSourceEntityNamespace(); sourceEC = triple.getSourceEntityCode(); targetECNS = triple.getTargetEntityNamespace(); targetEC = triple.getTargetEntityCode(); if (!sourceEC.equals("@") && !targetEC.equals("@@")) { StringTuple sourceCode = new StringTuple(); sourceCode.namespace = sourceECNS; sourceCode.code = sourceEC; StringTuple targetCode = new StringTuple(); targetCode.namespace = targetECNS; targetCode.code = targetEC; insertIntoTransitiveClosure(associationPredicateId, sourceCode, targetCode, insertedCache, sourceCode.code + CODE_NAMESPACE_DELIMITER + sourceCode.namespace + PATH_DELIMITER + targetCode.code + CODE_NAMESPACE_DELIMITER + targetCode.namespace, batchController); } } // get the unique source codes for this relationship - and // get all of the codes. logger.info("ComputeTransitive - Processing " + associationPredicateId); List<Node> distinctSourceTriples = getDistinctSourceTriples(codingSchemeUri, version, associationPredicateId); ArrayList<StringTuple> sourceCodes = new ArrayList<StringTuple>(); sourceECNS = null; sourceEC = null; targetECNS = null; targetEC = null; for (Node sourceNode : distinctSourceTriples) { sourceECNS = sourceNode.getEntityCodeNamespace(); sourceEC = sourceNode.getEntityCode(); if (!sourceEC.equals("@")) { StringTuple temp = new StringTuple(); temp.namespace = sourceECNS; temp.code = sourceEC; sourceCodes.add(temp); } } // Now I have all of the top source codes for this // relationship. Need to recurse down the // tree // adding nodes to the transitive table as necessary. for (int j = 0; j < sourceCodes.size(); j++) { List<Node> targetNodes = getTargetTriples(codingSchemeUri, version, associationPredicateId, sourceCodes.get(j).code, sourceCodes.get(j).namespace); ArrayList<StringTuple> targetCodes = new ArrayList<StringTuple>(); sourceECNS = null; sourceEC = null; targetECNS = null; targetEC = null; for (Node targetNode : targetNodes) { targetECNS = targetNode.getEntityCodeNamespace(); targetEC = targetNode.getEntityCode(); if (!targetEC.equals("@@")) { StringTuple temp = new StringTuple(); temp.namespace = targetECNS; temp.code = targetEC; targetCodes.add(temp); } } String path = sourceCodes.get(j).code + CODE_NAMESPACE_DELIMITER + sourceCodes.get(j).namespace + PATH_DELIMITER; processTransitive(codingSchemeUri, version, associationPredicateId, sourceCodes.get(j), targetCodes, insertedCache, batchController, path); } } batchController.flush(); }
From source file:org.lexevs.system.ResourceManager.java
/** * Inits the./* w w w .ja va 2 s. c o m*/ * * @throws Exception the exception */ public void init() throws Exception { cache_ = Collections.synchronizedMap(new LRUMap(systemVars_.getCacheSize())); // This increases the ability of Lucene to do queries against // large indexes like the MetaThesaurus without getting errors. BooleanQuery.setMaxClauseCount(systemVars_.getLuceneMaxClauseCount()); codingSchemeToServerMap_ = new Hashtable<String, String>(); sqlServerInterfaces_ = new Hashtable<String, SQLInterface>(); historySqlServerInterfaces_ = new Hashtable<String, SQLHistoryInterface>(); codingSchemeLocalNamesToInternalNameMap_ = new Hashtable<String, Hashtable<String, String>>(); internalCodingSchemeNameUIDMap_ = new Hashtable<String, List<LocalCodingScheme>>(); supportedCodingSchemeToInternalMap_ = new Hashtable<String, String>(); // populate the registry //registry_ = new XmlRegistry(systemVars_.getAutoLoadRegistryPath()); // connect to the histories readHistories(); // go through all of the sql servers and read all of the available code // systems. // initialize the SQL connections to each server. org.lexevs.registry.service.XmlRegistry.DBEntry[] entries = registry_.getDBEntries(); for (int i = 0; i < entries.length; i++) { SQLConnectionInfo temp = new SQLConnectionInfo(); temp.driver = systemVars_.getAutoLoadDBDriver(); temp.password = systemVars_.getAutoLoadDBPassword(); temp.server = entries[i].dbURL; temp.prefix = entries[i].prefix; temp.username = systemVars_.getAutoLoadDBUsername(); readTerminologiesFromServer(temp); } logger_.debug("Reading available terminologies from SQL servers."); // same thing as above, this time for pre-configured servers Hashtable<String, SQLConnectionInfo> servers = systemVars_.getSqlServers(); Enumeration<SQLConnectionInfo> e = servers.elements(); while (e.hasMoreElements()) { SQLConnectionInfo server = e.nextElement(); readTerminologiesFromServer(server); } logger_.debug("Reading available terminologies from the lucene index locations"); // go through all of the index locations, finding the right index for // each code system. // initialize the index readers. HashSet<String> indexLocations = systemVars_.getIndexLocations(); Iterator<String> iterator = indexLocations.iterator(); indexInterfaces_ = new Hashtable<String, IndexInterface>(); codingSchemeToIndexMap_ = new Hashtable<String, String>(); while (iterator.hasNext()) { String location = iterator.next(); File temp = new File(location); if (!temp.exists() || !temp.isDirectory()) { logger_.error("Bad index location " + location); } else { IndexInterface is = new IndexInterface(location); indexInterfaces_.put(location, is); ArrayList<String> keys = is.getCodeSystemKeys(); for (int i = 0; i < keys.size(); i++) { codingSchemeToIndexMap_.put(keys.get(i), location); } } } // Start up a thread to handle scheduled deactivations fdt_ = new FutureDeactivatorThread(); deactivatorThread_ = new Thread(fdt_); // This allows the JVM to exit while this thread is still active. deactivatorThread_.setDaemon(true); deactivatorThread_.start(); }
From source file:org.LexGrid.LexBIG.Impl.Extensions.GenericExtensions.LexBIGServiceConvenienceMethodsImpl.java
/** * Return the map used to cache codingScheme information, which maps from * key (derived from request parameters) to CodingScheme object. * <p>/*from w w w . ja v a2 s . c o m*/ * Note: Methods requiring the cache should invoke this method rather than * directly referencing the class variable in order to allow lazy * initialization. */ @LgClientSideSafe protected Map getCache_CopyRights() { if (cache_copyRights_ == null) cache_copyRights_ = Collections.synchronizedMap(new LRUMap(128)); return cache_copyRights_; }
From source file:org.LexGrid.LexBIG.Impl.Extensions.GenericExtensions.LexBIGServiceConvenienceMethodsImpl.java
/** * Return the map used to cache codingScheme copyRitght information, which * maps from key (derived from request parameters) to CopyRight String. * <p>//from w w w .jav a 2s . c o m * Note: Methods requiring the cache should invoke this method rather than * directly referencing the class variable in order to allow lazy * initialization. */ @LgClientSideSafe protected Map getCache_CodingSchemes() { if (cache_codingSchemes_ == null) cache_codingSchemes_ = Collections.synchronizedMap(new LRUMap(16)); return cache_codingSchemes_; }
From source file:org.LexGrid.LexBIG.Impl.Extensions.GenericExtensions.LexBIGServiceConvenienceMethodsImpl.java
/** * Return the map used to cache hierarchy ID information, which maps from * key (derived from request parameters) to an array of hierarchy IDs * (String[])./*from w w w .j av a2 s .c o m*/ * <p> * Note: Methods requiring the cache should invoke this method rather than * directly referencing the class variable in order to allow lazy * initialization. */ @LgClientSideSafe protected Map getCache_HIDs() { if (cache_hIDs_ == null) cache_hIDs_ = Collections.synchronizedMap(new LRUMap(128)); return cache_hIDs_; }
From source file:org.LexGrid.LexBIG.Impl.Extensions.GenericExtensions.LexBIGServiceConvenienceMethodsImpl.java
/** * Return the map used to cache hierarchy root information, which maps from * key (derived from request parameters) to a ResolvedConceptReferenceList. * <p>//from ww w . j a v a 2 s . c o m * Note: Methods requiring the cache should invoke this method rather than * directly referencing the class variable in order to allow lazy * initialization. */ @LgClientSideSafe protected Map getCache_HRoots() { if (cache_hRoots_ == null) cache_hRoots_ = Collections.synchronizedMap(new LRUMap(128)); return cache_hRoots_; }
From source file:org.LexGrid.LexBIG.Impl.Extensions.GenericExtensions.LexBIGServiceConvenienceMethodsImpl.java
/** * Return the map used to cache hierarchy root code information, which maps * from key (derived from request parameters) to an array of root codes * (String[])./*from w w w . j a v a2s. c o m*/ * <p> * Note: Methods requiring the cache should invoke this method rather than * directly referencing the class variable in order to allow lazy * initialization. */ @LgClientSideSafe protected Map getCache_HRootCodes() { if (cache_hRootCodes_ == null) cache_hRootCodes_ = Collections.synchronizedMap(new LRUMap(128)); return cache_hRootCodes_; }
From source file:org.LexGrid.LexBIG.Impl.Extensions.GenericExtensions.LexBIGServiceConvenienceMethodsImpl.java
/** * Return the map used to cache hierarchy root existence, which maps from * key (derived from request parameters) to a Boolean value. * <p>/*from w w w. j a va2 s .c om*/ * Note: Methods requiring the cache should invoke this method rather than * directly referencing the class variable in order to allow lazy * initialization. */ @LgClientSideSafe protected Map getCache_HPathToRootExists() { if (cache_hPathToRootExists_ == null) cache_hPathToRootExists_ = Collections.synchronizedMap(new LRUMap(2048)); return cache_hPathToRootExists_; }
From source file:org.LexGrid.util.sql.lgTables.SQLTableUtilities.java
public void computeTransitivityTable(String codingScheme, LgMessageDirectorIF md) throws SQLException { Connection conn = getConnection(); try {/*w ww.j a va 2 s . com*/ // now, the fun part... PreparedStatement getTransitiveAssociations = conn.prepareStatement("Select " + stc_.containerNameOrContainerDC + ", " + SQLTableConstants.TBLCOL_ENTITYCODENAMESPACE + ", " + stc_.entityCodeOrAssociationId + " from " + stc_.getTableName(SQLTableConstants.ASSOCIATION) + " Where " + SQLTableConstants.TBLCOL_ISTRANSITIVE + " = ? AND " + stc_.codingSchemeNameOrId + " = ?"); DBUtility.setBooleanOnPreparedStatment(getTransitiveAssociations, 1, new Boolean(true)); getTransitiveAssociations.setString(2, codingScheme); ArrayList<StringTriple> transitiveAssociations = new ArrayList<StringTriple>(); ResultSet results = getTransitiveAssociations.executeQuery(); while (results.next()) { StringTriple temp = new StringTriple(); temp.a = results.getString(stc_.containerNameOrContainerDC); temp.b = results.getString(stc_.entityCodeOrAssociationId); temp.c = results.getString(SQLTableConstants.TBLCOL_ENTITYCODENAMESPACE); transitiveAssociations.add(temp); } results.close(); getTransitiveAssociations.close(); PreparedStatement getAllRelations = conn .prepareStatement("Select " + stc_.sourceCSIdOrEntityCodeNS + ", " + stc_.sourceEntityCodeOrId + ", " + stc_.targetCSIdOrEntityCodeNS + ", " + stc_.targetEntityCodeOrId + " from " + stc_.getTableName(SQLTableConstants.ENTITY_ASSOCIATION_TO_ENTITY) + " where " + stc_.codingSchemeNameOrId + " = ? and " + stc_.containerNameOrContainerDC + " = ? and " + stc_.entityCodeOrAssociationId + " = ?"); PreparedStatement insertIntoTransitive = conn.prepareStatement( stc_.getInsertStatementSQL(SQLTableConstants.ENTITY_ASSOCIATION_TO_ENTITY_TRANSITIVE)); PreparedStatement getTargetsOfSource = conn .prepareStatement("SELECT " + stc_.targetCSIdOrEntityCodeNS + ", " + stc_.targetEntityCodeOrId + " FROM " + stc_.getTableName(SQLTableConstants.ENTITY_ASSOCIATION_TO_ENTITY) + " WHERE " + stc_.codingSchemeNameOrId + " = ? and " + stc_.containerNameOrContainerDC + " = ? and " + stc_.entityCodeOrAssociationId + " = ? and " + stc_.sourceCSIdOrEntityCodeNS + " = ? and " + stc_.sourceEntityCodeOrId + " = ?"); PreparedStatement getSourceCodes = conn.prepareStatement( "SELECT Distinct " + stc_.sourceCSIdOrEntityCodeNS + ", " + stc_.sourceEntityCodeOrId + " FROM " + stc_.getTableName(SQLTableConstants.ENTITY_ASSOCIATION_TO_ENTITY) + " WHERE " + stc_.codingSchemeNameOrId + " = ? and " + stc_.containerNameOrContainerDC + " = ? and " + stc_.entityCodeOrAssociationId + " = ?"); try { for (int i = 0; i < transitiveAssociations.size(); i++) { // make a hashset the holds the entire current set of // relations. getAllRelations.setString(1, codingScheme); getAllRelations.setString(2, transitiveAssociations.get(i).a); getAllRelations.setString(3, transitiveAssociations.get(i).b); String sourceECNS = null; String sourceEC = null; String targetECNS = null; String targetEC = null; results = getAllRelations.executeQuery(); LRUMap insertedCache = new LRUMap(50000); while (results.next()) { sourceECNS = results.getString(stc_.sourceCSIdOrEntityCodeNS); sourceEC = results.getString(stc_.sourceEntityCodeOrId); targetECNS = results.getString(stc_.targetCSIdOrEntityCodeNS); targetEC = results.getString(stc_.targetEntityCodeOrId); if (!sourceEC.equals("@") && !targetEC.equals("@@")) { StringTriple sourceCode = new StringTriple(); sourceCode.a = sourceECNS; sourceCode.c = sourceEC; StringTriple targetCode = new StringTriple(); targetCode.a = targetECNS; targetCode.c = targetEC; insertIntoTransitiveClosure(codingScheme, insertIntoTransitive, transitiveAssociations.get(i), sourceCode, targetCode, insertedCache); } } results.close(); // get the unique source codes for this relationship - and // get all of the codes. md.info("ComputeTransitive - Processing " + (transitiveAssociations.get(i)).b); getSourceCodes.setString(1, codingScheme); getSourceCodes.setString(2, transitiveAssociations.get(i).a); getSourceCodes.setString(3, transitiveAssociations.get(i).b); results = getSourceCodes.executeQuery(); ArrayList<StringTriple> sourceCodes = new ArrayList<StringTriple>(); sourceECNS = null; sourceEC = null; targetECNS = null; targetEC = null; while (results.next()) { sourceECNS = results.getString(stc_.sourceCSIdOrEntityCodeNS); sourceEC = results.getString(stc_.sourceEntityCodeOrId); if (!sourceEC.equals("@")) { StringTriple temp = new StringTriple(); temp.a = sourceECNS; temp.c = sourceEC; sourceCodes.add(temp); } } results.close(); // Now I have all of the top source codes for this // relationship. Need to recurse down the // tree // adding nodes to the transitive table as necessary. for (int j = 0; j < sourceCodes.size(); j++) { getTargetsOfSource.setString(1, codingScheme); getTargetsOfSource.setString(2, transitiveAssociations.get(i).a); getTargetsOfSource.setString(3, transitiveAssociations.get(i).b); getTargetsOfSource.setString(4, sourceCodes.get(j).a); getTargetsOfSource.setString(5, sourceCodes.get(j).c); results = getTargetsOfSource.executeQuery(); ArrayList<StringTriple> targetCodes = new ArrayList<StringTriple>(); sourceECNS = null; sourceEC = null; targetECNS = null; targetEC = null; while (results.next()) { targetECNS = results.getString(stc_.targetCSIdOrEntityCodeNS); targetEC = results.getString(stc_.targetEntityCodeOrId); if (!targetEC.equals("@@")) { StringTriple temp = new StringTriple(); temp.a = targetECNS; temp.c = targetEC; targetCodes.add(temp); } } results.close(); processTransitive(codingScheme, transitiveAssociations.get(i), sourceCodes.get(j), targetCodes, getTargetsOfSource, insertIntoTransitive, insertedCache); } } } finally { getAllRelations.close(); insertIntoTransitive.close(); getTargetsOfSource.close(); getSourceCodes.close(); } } finally { returnConnection(conn); } }
From source file:org.lockss.crawler.FollowLinkCrawler.java
protected void setCrawlConfig(Configuration config) { super.setCrawlConfig(config); // Do *not* require that maxDepth be greater than refetchDepth. Plugin // writers set refetchDepth high to mean infinite. maxDepth = config.getInt(PARAM_MAX_CRAWL_DEPTH, DEFAULT_MAX_CRAWL_DEPTH); excludedUrlCache = new LRUMap(config.getInt(PARAM_EXCLUDED_CACHE_SIZE, DEFAULT_EXCLUDED_CACHE_SIZE)); crawlEndReportEmail = config.get(PARAM_CRAWL_END_REPORT_EMAIL, DEFAULT_CRAWL_END_REPORT_EMAIL); crawlEndReportHashAlg = config.get(PARAM_CRAWL_END_REPORT_HASH_ALG, DEFAULT_CRAWL_END_REPORT_HASH_ALG); isRefetchEmptyFiles = config.getBoolean(PARAM_REFETCH_EMPTY_FILES, DEFAULT_REFETCH_EMPTY_FILES) && !isIgnoredException( AuUtil.mapException(au, null, new ContentValidationException.EmptyFile(), null)); }