List of usage examples for org.apache.commons.collections.map LRUMap LRUMap
public LRUMap(Map map)
From source file:edu.mayo.informatics.lexgrid.convert.directConversions.UMLSToSQL.java
private void loadConcept(String UMLSCodingSchemeName, String codingSchemeName, ConceptPresentation[] conceptPresentations) throws SQLException { // order them - this will organize them by language, and put the "best" // presentation // first for each language. default language will be the at the very // top./*from w w w.j a va 2 s . c o m*/ Arrays.sort(conceptPresentations, new ConceptPresentationSorter()); // now set each isPreferred to true for the first entry of each language String prevLang = conceptPresentations[0].language; conceptPresentations[0].isPreferred = new Boolean(true); for (int i = 1; i < conceptPresentations.length; i++) { if (!conceptPresentations[i].language.equals(prevLang)) { conceptPresentations[i].isPreferred = new Boolean(true); } prevLang = conceptPresentations[i].language; } // load the entry into the concepts table - must be at least 1 row. the // first one will // be the best text presentation to use here. try { addConceptToConcepts(codingSchemeName, conceptPresentations[0].conceptCode, null, null, null, new Boolean(true), null, new Boolean(false), SQLTableConstants.TBLCOL_ISACTIVE, new Boolean(false), conceptPresentations[0].value); } catch (SQLException e) { // Recover from a failure adding a new code. Likely caused by trying // to insert a duplicate code - some code systems are case sensitive // - and // most of our databases aren't. log.error("Problem inserting new code " + conceptPresentations[0].conceptCode, e); messages_.info("ERROR - Problem inserting new code " + conceptPresentations[0].conceptCode); return; } LRUMap TValues = new LRUMap(20); // load its properties for (int i = 0; i < conceptPresentations.length; i++) { String propertyId = "T-" + presentationCounter_++; addToEntityProperty(codingSchemeName, SQLTableConstants.ENTITYTYPE_CONCEPT, conceptPresentations[i].conceptCode, propertyId, SQLTableConstants.TBLCOLVAL_PRESENTATION, SQLTableConstants.TBLCOLVAL_TEXTUALPRESENTATION, conceptPresentations[i].language, conceptPresentations[i].presentationFormat, conceptPresentations[i].isPreferred, null, null, conceptPresentations[i].representationForm, conceptPresentations[i].value); TValues.put(conceptPresentations[i].AUI, propertyId); } // load the definitions addToDefinitions(UMLSCodingSchemeName, conceptPresentations[0].cui, codingSchemeName, conceptPresentations[0].conceptCode); // load the other properties loadOtherProperties(UMLSCodingSchemeName, conceptPresentations[0].cui, codingSchemeName, conceptPresentations[0].conceptCode, TValues); // TODO dig the history out - add the modVersion (not now) // addConceptToConceptsMultiAttributes(codingSchemeName, conceptCode, // "modVersion", ""); // this is where the usageContext would be loaded - but we don't have // any. // also, if we were to load the UMLS as a coding scheme (instead of // individual coding schemes) this is // where we would put the source information // addConceptToConceptPropertyMultiAttributes(codingSchemeName, // conceptCode, propertyId, "", "", "", // ""); }
From source file:edu.mayo.informatics.lexgrid.convert.directConversions.MetaThesaurusToSQL.java
/** * High level method with responsibility to map relationship content from * UMLS RRF-based format to LexGrid model. * //from w ww . j a v a2 s.c o m * @param codingSchemeName * @throws SQLException */ protected void loadRelations(String codingSchemeName) throws SQLException { // Sort the associations, loading RELA's first and keeping // primary names (potentially qualified by different SABs) // together. Arrays.sort(supportedAssociations_, new AssociationSorter()); for (int i = 0; i < supportedAssociations_.length; i++) { Association assoc = (Association) supportedAssociations_[i]; if (lastAssociation == null || !lastAssociation.equals(assoc.name)) { // Moving on to a new association name. lastAssociation = assoc.name; // Reset the map used to track source/target combinations // loaded. // 100,000 should be more than big enough for the other // associations, // items are ordered by source code. alreadyLoadedAssociations.clear(); alreadyLoadedAssociations = new LRUMap(100000); } int count = loadRelationsHelper(assoc, codingSchemeName); if (count == 0) { messages_.info("No relations were found for " + assoc.toShortString()); log.warn("No relations were found for " + assoc.toShortString()); } } // Now we know all the supported coding schemes (some may have been // identified as source or target while processing relationships) // and association qualifiers. Add them now to the coding scheme // metadata. loadSupportedCodingSchemes(codingSchemeName); loadSupportedAssociationQualifiers(codingSchemeName); loadSupportedPropertyLinks(codingSchemeName); }
From source file:net.sourceforge.vulcan.core.support.BuildOutcomeCache.java
@SuppressWarnings("unchecked") private static Map<UUID, ProjectStatusDto> createLRUMap(int cacheSize) { return new LRUMap(cacheSize); }
From source file:org.agnitas.util.TimeoutLRUMap.java
/** * Creates a new instance of TimeoutLRUMap */ public TimeoutLRUMap() { timeoutInMillis = 5000; // Milliseconds internalMap = new LRUMap(1000); }
From source file:org.agnitas.util.TimeoutLRUMap.java
/** * Creates a new instance of TimeoutLRUMap * //from w w w .ja v a2s .c o m * @param capacity * @param to */ public TimeoutLRUMap(int capacity, long timeoutInMillis) { setTimeout(timeoutInMillis); internalMap = new LRUMap(capacity); }
From source file:org.alfresco.repo.workflow.MemoryCache.java
/** * Instantiates a new memory cache.//from w ww . ja va2 s . c o m * * @param size * the size */ @SuppressWarnings("unchecked") public MemoryCache(int size) { this.maxSize = size; map = new LRUMap(maxSize); }
From source file:org.apache.accumulo.core.client.mapred.AccumuloFileOutputFormat.java
@Override public RecordWriter<Key, Value> getRecordWriter(FileSystem ignored, JobConf job, String name, Progressable progress) throws IOException { // get the path of the temporary output file final Configuration conf = job; final AccumuloConfiguration acuConf = FileOutputConfigurator.getAccumuloConfiguration(CLASS, job); final String extension = acuConf.get(Property.TABLE_FILE_TYPE); final Path file = new Path(getWorkOutputPath(job), getUniqueName(job, "part") + "." + extension); final LRUMap validVisibilities = new LRUMap(ConfiguratorBase.getVisibilityCacheSize(conf)); return new RecordWriter<Key, Value>() { FileSKVWriter out = null;//from ww w.ja va 2 s .c o m @Override public void close(Reporter reporter) throws IOException { if (out != null) out.close(); } @Override public void write(Key key, Value value) throws IOException { Boolean wasChecked = (Boolean) validVisibilities.get(key.getColumnVisibilityData()); if (wasChecked == null) { byte[] cv = key.getColumnVisibilityData().toArray(); new ColumnVisibility(cv); validVisibilities.put(new ArrayByteSequence(Arrays.copyOf(cv, cv.length)), Boolean.TRUE); } if (out == null) { out = FileOperations.getInstance().newWriterBuilder() .forFile(file.toString(), file.getFileSystem(conf), conf) .withTableConfiguration(acuConf).build(); out.startDefaultLocalityGroup(); } out.append(key, value); } }; }
From source file:org.apache.accumulo.core.client.mapreduce.AccumuloFileOutputFormat.java
@Override public RecordWriter<Key, Value> getRecordWriter(TaskAttemptContext context) throws IOException { // get the path of the temporary output file final Configuration conf = context.getConfiguration(); final AccumuloConfiguration acuConf = FileOutputConfigurator.getAccumuloConfiguration(CLASS, context.getConfiguration()); final String extension = acuConf.get(Property.TABLE_FILE_TYPE); final Path file = this.getDefaultWorkFile(context, "." + extension); final LRUMap validVisibilities = new LRUMap(1000); return new RecordWriter<Key, Value>() { FileSKVWriter out = null;//from w w w . ja v a 2s .co m @Override public void close(TaskAttemptContext context) throws IOException { if (out != null) out.close(); } @Override public void write(Key key, Value value) throws IOException { Boolean wasChecked = (Boolean) validVisibilities.get(key.getColumnVisibilityData()); if (wasChecked == null) { byte[] cv = key.getColumnVisibilityData().toArray(); new ColumnVisibility(cv); validVisibilities.put(new ArrayByteSequence(Arrays.copyOf(cv, cv.length)), Boolean.TRUE); } if (out == null) { out = FileOperations.getInstance().newWriterBuilder() .forFile(file.toString(), file.getFileSystem(conf), conf) .withTableConfiguration(acuConf).build(); out.startDefaultLocalityGroup(); } out.append(key, value); } }; }
From source file:org.apache.accumulo.core.client.rfile.RFileWriter.java
RFileWriter(FileSKVWriter fileSKVWriter, int visCacheSize) { this.writer = fileSKVWriter; this.validVisibilities = new LRUMap(visCacheSize); }
From source file:org.apache.accumulo.core.iterators.filter.VisibilityFilter.java
public VisibilityFilter(Authorizations authorizations, byte[] defaultVisibility) { this.ve = new VisibilityEvaluator(authorizations); this.defaultVisibility = new Text(defaultVisibility); this.cache = new LRUMap(1000); this.tmpVis = new Text(); }