List of usage examples for org.apache.commons.lang.time StopWatch start
public void start()
Start the stopwatch.
This method starts a new timing session, clearing any previous values.
From source file:edu.utah.further.i2b2.hook.further.FurtherInterceptionFilter.java
/** * Send the i2b2 query request message to the FURTHeR FQE. Must be run after the i2b2 * processing chain, because it depends on the i2b2 query ID generated by the i2b2 * server.//from w ww .j a va2s . c o m * * @param request * @param i2b2QueryId * i2b2 query ID, obtained from the i2b2 response */ private void spawnFurtherRequest(final HttpServletRequest request, final long i2b2QueryId) { if (log.isDebugEnabled()) { log.debug("Read i2b2QueryId from request: " + i2b2QueryId); } try { // Need to read create from request.getInputStream() multiple times // in this method ==> save a copy in a buffer first // inputStream is already at the end of the file. final InputStream inputStream = request.getInputStream(); final byte[] buffer = CoreUtil.readBytesFromStream(inputStream); inputStream.close(); // Decide whether to fork or not if (StringUtil.isValidLong(i2b2QueryId) && isProcessRequest(buffer)) { // Read the FURTHeR section of the i2b2 request body final String requestXml = new String(buffer); // Request contains an FQE processing flag, send to FURTHeR if (log.isDebugEnabled()) { ServletUtil.printRequestHeaders(request); ServletUtil.printRequestParameters(request); ServletUtil.printRequestAttributes(request); } // TODO: read query instance id from i2b2 response and pass to the // following call final QueryContextIdentifier id = furtherServices.i2b2QueryRequest(requestXml, i2b2QueryId); // Make available to response through the request, ensures thread safety // instead of using instance var request.setAttribute(QUERY_ID, id); QueryState state = furtherServices.getQueryState(id).getState(); final StopWatch stopWatch = new StopWatch(); final int interval = 10; int i = 0; stopWatch.start(); // Poll state every sec final long maxQueryTimeMillis = furtherServices.getMaxQueryTime() * 1000; while (state != QueryState.COMPLETED && state != QueryState.STOPPED && state != QueryState.FAILED && state != QueryState.INVALID && state != null && stopWatch.getTime() < maxQueryTimeMillis) { Thread.yield(); state = furtherServices.getQueryState(id).getState(); if (log.isDebugEnabled() && ((i % interval) == 0)) { log.debug("QueryState for query " + id.getId() + ": " + state); } i++; } stopWatch.stop(); } else { if (log.isDebugEnabled()) { log.info("Ignoring unrecognized/irrelvant requestXml"); } } } catch (final Throwable throwable) { if (log.isDebugEnabled()) { log.error("Caught " + throwable + ", ignoring", throwable); } } }
From source file:com.qualogy.qafe.mgwt.server.RPCServiceImpl.java
public GDataObject executeEvent(EventDataGVO eventData) throws GWTServiceException { StopWatch stopWatch = new StopWatch(); try {//from ww w . j a v a 2 s .com stopWatch.start(); String appIdUUID = eventData.getUuid().substring(eventData.getUuid().lastIndexOf('|') + 1);// uuid.substring(uuid.lastIndexOf('|')+1); ApplicationIdentifier appId = service.getApplicationId(appIdUUID); GDataObject gDataObject = null; if (appId != null) { eventData.setContext(appId.toString()); resolveRequest(eventData); gDataObject = eventProcessor.execute(eventData, appId, new SessionContainer(getLocale(), eventData.getParameters())); } stopWatch.stop(); if (gDataObject != null) { gDataObject.setTime(Long.valueOf(stopWatch.getTime())); } return gDataObject; } catch (Exception e) { GWTServiceException gWTServiceException = handleException(e); gWTServiceException.setGDataObject(ExceptionProcessor.handle(eventData, e)); stopWatch.stop(); if (gWTServiceException.getGDataObject() != null) { gWTServiceException.getGDataObject().setTime(Long.valueOf(stopWatch.getTime())); } throw gWTServiceException; } }
From source file:com.liferay.journal.util.impl.JournalContentImpl.java
@Override public JournalArticleDisplay getDisplay(JournalArticle article, String ddmTemplateKey, String viewMode, String languageId, int page, PortletRequestModel portletRequestModel, ThemeDisplay themeDisplay) { StopWatch stopWatch = new StopWatch(); stopWatch.start(); long groupId = article.getGroupId(); String articleId = article.getArticleId(); double version = article.getVersion(); articleId = StringUtil.toUpperCase(GetterUtil.getString(articleId)); ddmTemplateKey = StringUtil.toUpperCase(GetterUtil.getString(ddmTemplateKey)); long layoutSetId = 0; boolean secure = false; if (themeDisplay != null) { try {/*from w ww. j av a 2 s . co m*/ if (!JournalArticlePermission.contains(themeDisplay.getPermissionChecker(), article, ActionKeys.VIEW)) { return null; } } catch (Exception e) { } LayoutSet layoutSet = themeDisplay.getLayoutSet(); layoutSetId = layoutSet.getLayoutSetId(); secure = themeDisplay.isSecure(); } if (Validator.isNull(ddmTemplateKey)) { ddmTemplateKey = article.getDDMTemplateKey(); } JournalContentKey journalContentKey = new JournalContentKey(groupId, articleId, version, ddmTemplateKey, layoutSetId, viewMode, languageId, page, secure); JournalArticleDisplay articleDisplay = _portalCache.get(journalContentKey); boolean lifecycleRender = false; if (portletRequestModel != null) { lifecycleRender = RenderRequest.RENDER_PHASE.equals(portletRequestModel.getLifecycle()); } if ((articleDisplay == null) || !lifecycleRender) { articleDisplay = getArticleDisplay(article, ddmTemplateKey, viewMode, languageId, page, portletRequestModel, themeDisplay); if ((articleDisplay != null) && articleDisplay.isCacheable() && lifecycleRender) { _portalCache.put(journalContentKey, articleDisplay); } } if (_log.isDebugEnabled()) { _log.debug(StringBundler.concat("getDisplay for {", String.valueOf(groupId), ", ", articleId, ", ", ddmTemplateKey, ", ", viewMode, ", ", languageId, ", ", String.valueOf(page), "} takes ", String.valueOf(stopWatch.getTime()), " ms")); } return articleDisplay; }
From source file:ch.systemsx.cisd.openbis.generic.server.business.bo.samplelister.SampleListingWorker.java
/** * Load the samples defined by the criteria given to the constructor. The samples will be * enriched with sample properties and dependencies to parents and container will be resolved. *///from w w w. jav a2 s. co m public List<Sample> load() { final StopWatch watch = new StopWatch(); watch.start(); final Experiment expOrNull = tryLoadExperiment(); final boolean oneGroupPerSample = isOneGroupPerSamples(); final Group groupOrNull; if (oneGroupPerSample) { groupOrNull = null; final Group[] groups = referencedEntityDAO.getAllGroups(databaseInstanceId); for (Group group : groups) { group.setInstance(databaseInstance); groupMap.put(group.getId(), group); } } else { groupOrNull = tryLoadGroup(expOrNull); if (groupOrNull != null) { // For use by dependent samples. groupMap.put(groupOrNull.getId(), groupOrNull); } } loadSampleTypes(); retrievePrimaryBasicSamples(tryGetIteratorForSamplesByIds(), groupOrNull, oneGroupPerSample); retrievePrimaryBasicSamples(tryGetIteratorForGroupSamples(), groupOrNull, oneGroupPerSample); retrievePrimaryBasicSamples(tryGetIteratorForSharedSamples(), groupOrNull, oneGroupPerSample); retrievePrimaryBasicSamples(tryGetIteratorForExperimentSamples(), groupOrNull, oneGroupPerSample); retrievePrimaryBasicSamples(tryGetIteratorForContainedSamples(), groupOrNull, oneGroupPerSample); if (operationLog.isDebugEnabled()) { watch.stop(); operationLog.debug( String.format("Basic retrieval of %d samples took %s s", sampleList.size(), watch.toString())); watch.reset(); watch.start(); } // Only enrich the "primary" samples (matching the criteria) with properties, not // dependent samples. if (samplePropertiesEnricherOrNull != null) { samplePropertiesEnricherOrNull.enrich(sampleMap.keySet(), new IEntityPropertiesHolderResolver() { public Sample get(long id) { return sampleMap.get(id); } }); if (operationLog.isDebugEnabled()) { watch.stop(); operationLog.debug(String.format("Enrichment with properties took %s s", watch.toString())); } } retrieveDependentSamplesRecursively(); resolveParents(); resolveContainers(); return sampleList; }
From source file:com.microsoft.exchange.integration.AbstractIntegrationTest.java
/** * Create a single {@link CalendarItemType} and submit with {@link ExchangeWebServicesClient#createItem(CreateItem)}. * @throws JAXBException //from ww w . j a va 2 s . c om */ @Test public void testCreateCalendarItem() throws JAXBException { NonEmptyArrayOfBaseItemIdsType createdIds = new NonEmptyArrayOfBaseItemIdsType(); try { initializeCredentials(); CalendarItemType calendarItem = new CalendarItemType(); final Date start = DateHelp.parseDateTimePhrase("20121109-1200"); final Date end = DateHelp.parseDateTimePhrase("20121109-1300"); calendarItem.setStart(DateHelp.convertDateToXMLGregorianCalendar(start)); calendarItem.setEnd(DateHelp.convertDateToXMLGregorianCalendar(end)); calendarItem.setSubject("integration test: testCreateCalendarItem"); calendarItem.setLocation("test location"); BodyType body = new BodyType(); body.setBodyType(BodyTypeType.TEXT); body.setValue("test ran at " + new Date()); calendarItem.setBody(body); CreateItem request = new CreateItem(); request.setSendMeetingInvitations(CalendarItemCreateOrDeleteOperationType.SEND_TO_ALL_AND_SAVE_COPY); NonEmptyArrayOfAllItemsType arrayOfItems = new NonEmptyArrayOfAllItemsType(); arrayOfItems.getItemsAndMessagesAndCalendarItems().add(calendarItem); request.setItems(arrayOfItems); DistinguishedFolderIdType folder = new DistinguishedFolderIdType(); folder.setId(DistinguishedFolderIdNameType.CALENDAR); TargetFolderIdType target = new TargetFolderIdType(); target.setDistinguishedFolderId(folder); request.setSavedItemFolderId(target); StopWatch stopWatch = new StopWatch(); stopWatch.start(); CreateItemResponse response = ewsClient.createItem(request); stopWatch.stop(); Assert.assertNotNull(response); String captured = capture(response); log.debug("CreateItem request (1 CalendarItem) completed in " + stopWatch + ", response: " + captured); ArrayOfResponseMessagesType responseMessages = response.getResponseMessages(); Assert.assertNotNull(responseMessages); Assert.assertEquals(1, responseMessages .getCreateItemResponseMessagesAndDeleteItemResponseMessagesAndGetItemResponseMessages().size()); JAXBElement<? extends ResponseMessageType> m = responseMessages .getCreateItemResponseMessagesAndDeleteItemResponseMessagesAndGetItemResponseMessages().get(0); Assert.assertEquals(ResponseCodeType.NO_ERROR, m.getValue().getResponseCode()); ItemInfoResponseMessageType itemType = (ItemInfoResponseMessageType) m.getValue(); ArrayOfRealItemsType itemArray = itemType.getItems(); ItemType item = itemArray.getItemsAndMessagesAndCalendarItems().get(0); createdIds.getItemIdsAndOccurrenceItemIdsAndRecurringMasterItemIds().add(item.getItemId()); } finally { deleteItems(createdIds); } }
From source file:com.microsoft.exchange.integration.AbstractIntegrationTest.java
/** * Create 3 {@link CalendarItemType}s and submit with 1 {@link ExchangeWebServicesClient#createItem(CreateItem)} invocation. *///from www . jav a 2 s .co m @Test public void testCreate3CalendarItems() { NonEmptyArrayOfBaseItemIdsType createdIds = new NonEmptyArrayOfBaseItemIdsType(); try { initializeCredentials(); CalendarItemType item1 = constructCalendarItem(DateHelp.parseDateTimePhrase("20121109-1300"), DateHelp.parseDateTimePhrase("20121109-1400"), "integration test: testCreate3CalendarItems, item1", "test location", "test ran at " + new Date()); CalendarItemType item2 = constructCalendarItem(DateHelp.parseDateTimePhrase("20121109-1400"), DateHelp.parseDateTimePhrase("20121109-1500"), "integration test: testCreate3CalendarItems, item2", "test location", "test ran at " + new Date()); CalendarItemType item3 = constructCalendarItem(DateHelp.parseDateTimePhrase("20121109-1500"), DateHelp.parseDateTimePhrase("20121109-1600"), "integration test: testCreate3CalendarItems, item3", "test location", "test ran at " + new Date()); CreateItem request = new CreateItem(); request.setSendMeetingInvitations(CalendarItemCreateOrDeleteOperationType.SEND_TO_ALL_AND_SAVE_COPY); NonEmptyArrayOfAllItemsType arrayOfItems = new NonEmptyArrayOfAllItemsType(); arrayOfItems.getItemsAndMessagesAndCalendarItems().add(item1); arrayOfItems.getItemsAndMessagesAndCalendarItems().add(item2); arrayOfItems.getItemsAndMessagesAndCalendarItems().add(item3); request.setItems(arrayOfItems); DistinguishedFolderIdType folder = new DistinguishedFolderIdType(); folder.setId(DistinguishedFolderIdNameType.CALENDAR); TargetFolderIdType target = new TargetFolderIdType(); target.setDistinguishedFolderId(folder); request.setSavedItemFolderId(target); StopWatch stopWatch = new StopWatch(); stopWatch.start(); CreateItemResponse response = ewsClient.createItem(request); stopWatch.stop(); log.debug("CreateItem request (3 CalendarItems) completed in " + stopWatch); Assert.assertNotNull(response); ArrayOfResponseMessagesType responseMessages = response.getResponseMessages(); Assert.assertNotNull(responseMessages); Assert.assertEquals(3, responseMessages .getCreateItemResponseMessagesAndDeleteItemResponseMessagesAndGetItemResponseMessages().size()); for (JAXBElement<? extends ResponseMessageType> m : responseMessages .getCreateItemResponseMessagesAndDeleteItemResponseMessagesAndGetItemResponseMessages()) { Assert.assertEquals(ResponseCodeType.NO_ERROR, m.getValue().getResponseCode()); ItemInfoResponseMessageType itemType = (ItemInfoResponseMessageType) m.getValue(); ArrayOfRealItemsType itemArray = itemType.getItems(); ItemType item = itemArray.getItemsAndMessagesAndCalendarItems().get(0); createdIds.getItemIdsAndOccurrenceItemIdsAndRecurringMasterItemIds().add(item.getItemId()); } } finally { deleteItems(createdIds); } }
From source file:chibi.gemmaanalysis.ExperimentMetaDataExtractorCli.java
public void generateExperimentMetaData(Collection<BioAssaySet> expressionExperiments) throws IOException { File file = getOutputFile(this.viewFile); try (Writer writer = new OutputStreamWriter(new GZIPOutputStream(new FileOutputStream(file)));) { String[] colNames = { "ShortName", "Taxon", "DateUpload", "IsPublic", "NumPlatform", "Platform", "Channel", "IsExonArray", "QtIsRatio", "QtIsNormalized", "QtScale", "NumProfiles", "NumFilteredProfiles", "NumSamples", "NumConditions", "NumReplicatesPerCondition", "PossibleOutliers", "CuratedOutlier", "IsTroubled", "PubTroubled", "PubYear", "PubJournal", "Batch.PC1.Var", "Batch.PC2.Var", "Batch.PC3.Var", "Batch.PC1.Pval", "Batch.PC2.Pval", "Batch.PC3.Pval", "NumFactors", "FactorNames", "FactorCategories", "NumFactorValues" }; // log.info( StringUtils.join( colNames, "\t" ) + "\n" ); writer.write(StringUtils.join(colNames, "\t") + "\n"); int i = 0; Collection<String> failedEEs = new ArrayList<>(); StopWatch timer = new StopWatch(); timer.start(); for (BioAssaySet bas : expressionExperiments) { /*//from ww w. j a v a2 s . co m * Skip subsets */ if (bas instanceof ExpressionExperimentSubSet) return; try { ExpressionExperiment ee = (ExpressionExperiment) bas; ee = eeService.thawLite(ee); ExpressionExperimentValueObject vo = eeService.loadValueObject(ee.getId()); vo.setIsPublic(!securityService.isPrivate(ee)); log.info("Processing (" + ++i + "/" + expressionExperiments.size() + ") : " + ee); BibliographicReference primaryPublication = ee.getPrimaryPublication(); Status pubStatus = primaryPublication != null ? statusService.getStatus(primaryPublication) : null; Collection<ArrayDesign> arrayDesignsUsed = eeService.getArrayDesignsUsed(ee); Collection<String> arrayDesignIsExon = new ArrayList<>(); Collection<String> arrayDesignTechTypes = new ArrayList<>(); Collection<String> arrayDesignShortNames = new ArrayList<>(); // for multiple platforms e.g. GSE5949 for (ArrayDesign ad : arrayDesignsUsed) { arrayDesignShortNames.add(ad.getShortName()); arrayDesignTechTypes.add(ad.getTechnologyType().getValue()); arrayDesignIsExon.add(ad.getName().toLowerCase().contains("exon") + ""); } QuantitationType qt = null; for (QuantitationType q : ee.getQuantitationTypes()) { if (q.getIsPreferred().booleanValue()) { qt = q; break; } } int manualOutlierCount = 0; for (BioAssay ba : ee.getBioAssays()) { if (ba.getIsOutlier().booleanValue()) { manualOutlierCount++; } } ExperimentalDesign experimentalDesign = edService.load(vo.getExperimentalDesign()); // Batch PCs int maxcomp = 3; BatchEffectDetails batchEffectPC1 = null; BatchEffectDetails batchEffectPC2 = null; BatchEffectDetails batchEffectPC3 = null; Collection<BatchEffectDetails> batchEffects = getBatchEffect(ee, maxcomp); Iterator<BatchEffectDetails> batchEffectsIterator; if (batchEffects == null || batchEffects.size() == 0) { log.warn("No batch effect info"); } else { batchEffectsIterator = batchEffects.iterator(); if (batchEffectsIterator.hasNext()) { batchEffectPC1 = batchEffectsIterator.next(); } if (batchEffectsIterator.hasNext()) { batchEffectPC2 = batchEffectsIterator.next(); } if (batchEffectsIterator.hasNext()) { batchEffectPC3 = batchEffectsIterator.next(); } } // eeService.getExperimentsWithOutliers(); StopWatch timerOutlier = new StopWatch(); timerOutlier.start(); // log.info( "Outlier detection service started " + timer.getTime() + "ms" ); Collection<OutlierDetails> possibleOutliers = outlierDetectionService.identifyOutliers(ee); if (timerOutlier.getTime() > 10000) { log.info("Automatic outlier detection took " + timerOutlier.getTime() + "ms"); } // Collection<OutlierDetails> possibleOutliers = null; // samples per condition boolean removeBatchFactor = false; Collection<String> samplesPerConditionCount = new ArrayList<>(); CountingMap<FactorValueVector> assayCount = ExperimentalDesignUtils.getDesignMatrix(ee, removeBatchFactor); List<FactorValueVector> keys = assayCount.sortedKeyList(true); for (FactorValueVector key : keys) { samplesPerConditionCount.add(Integer.toString(assayCount.get(key).intValue())); } // factor names Collection<ExperimentalFactor> factors = ee.getExperimentalDesign().getExperimentalFactors(); Collection<String> factorNames = new ArrayList<>(); Collection<String> factorCategories = new ArrayList<>(); Collection<Integer> factorValues = new ArrayList<>(); for (ExperimentalFactor f : factors) { factorNames.add(f.getName()); String cat = f.getCategory() != null ? f.getCategory().getCategory() : NA; factorCategories.add(cat); factorValues.add(Integer.valueOf(f.getFactorValues().size())); } int filteredProfilesCount = -1; try { FilterConfig filterConfig = new FilterConfig(); filterConfig.setIgnoreMinimumSampleThreshold(true); filteredProfilesCount = expressionDataMatrixService.getFilteredMatrix(ee, filterConfig, expressionDataMatrixService.getProcessedExpressionDataVectors(ee)).rows(); } catch (Exception e) { log.error(e.getMessage(), e); } String val[] = { vo.getShortName(), vo.getTaxon(), DateFormat.getDateInstance(DateFormat.MEDIUM).format(vo.getDateCreated()), vo != null ? Boolean.toString(vo.getIsPublic()) : NA, Integer.toString(arrayDesignsUsed.size()), StringUtils.join(arrayDesignShortNames, ','), StringUtils.join(arrayDesignTechTypes, ','), // arrayDesign.getTechnologyType().getValue(), // ONE-COLOR, TWO-COLOR, NONE (RNA-seq // GSE37646), DUAL-MODE (one or two color) StringUtils.join(arrayDesignIsExon, ','), // exon GSE28383 qt != null ? Boolean.toString(qt.getIsRatio().booleanValue()) : NA, qt != null ? Boolean.toString(qt.getIsNormalized().booleanValue()) : NA, qt != null ? qt.getScale().getValue() : NA, Integer.toString(vo.getProcessedExpressionVectorCount().intValue()), // NumProfiles Integer.toString(filteredProfilesCount), // NumFilteredProfiles Integer.toString(vo.getBioAssayCount().intValue()), // NumSamples Integer.toString(assayCount.size()), // NumConditions StringUtils.join(samplesPerConditionCount, ","), possibleOutliers != null ? Integer.toString(possibleOutliers.size()) : NA, Integer.toString(manualOutlierCount), Boolean.toString(vo.getTroubled()), pubStatus != null ? Boolean.toString(pubStatus.getTroubled().booleanValue()) : NA, primaryPublication != null ? DateFormat.getDateInstance(DateFormat.MEDIUM) .format(primaryPublication.getPublicationDate()) : NA, primaryPublication != null ? primaryPublication.getPublication() : NA, batchEffectPC1 != null ? Double.toString(batchEffectPC1.getComponentVarianceProportion()) : NA, batchEffectPC2 != null ? Double.toString(batchEffectPC2.getComponentVarianceProportion()) : NA, batchEffectPC3 != null ? Double.toString(batchEffectPC3.getComponentVarianceProportion()) : NA, batchEffectPC1 != null ? Double.toString(batchEffectPC1.getPvalue()) : NA, batchEffectPC2 != null ? Double.toString(batchEffectPC2.getPvalue()) : NA, batchEffectPC3 != null ? Double.toString(batchEffectPC3.getPvalue()) : NA, // factors factors != null ? Integer.toString(factors.size()) : NA, // NumFactors factorNames != null ? StringUtils.join(factorNames, ",") : NA, factorCategories != null ? StringUtils.join(factorCategories, ",") : NA, factorValues != null ? StringUtils.join(factorValues, ",") : NA, }; // log.info( StringUtils.join( val, "\t" ) + "\n" ); writer.write(StringUtils.join(val, "\t") + "\n"); } catch (Exception e) { failedEEs.add(((ExpressionExperiment) bas).getShortName()); StringWriter sw = new StringWriter(); e.printStackTrace(new PrintWriter(sw)); log.error(sw.toString()); } } log.info("Finished processing " + expressionExperiments.size() + " datasets in " + timer.getTime() + " ms. "); log.info("Writen to " + file); log.info("Number of failed experiment metadata extraction(s): " + failedEEs.size() + " / " + expressionExperiments.size()); if (failedEEs.size() > 0) { log.info("Skipped experiments:"); log.info(StringUtils.join(failedEEs, ",")); } } }
From source file:eu.annocultor.tagger.vocabularies.AbstractVocabulary.java
private void loadTerms(String query, boolean isSesame, File cacheDir, String baseDirName, String... filePatterns) throws Exception { VocabularySerializer handler = new VocabularySerializer(this, this.getVocabularyName()); File baseDir = new File(baseDirName); List<String> locations = new ArrayList<String>(); // normalizing files to prevent problems with hashes in cache file names // when run from a different environment for (String pattern : filePatterns) { for (File file : Utils.expandFileTemplateFrom(baseDir, pattern)) { locations.add(file.getCanonicalPath()); }//from ww w . ja va 2s . c o m } String locationDescriptionForExceptionLogging = "Expanded from dir " + baseDirName + " pattern " + StringUtils.join(filePatterns, ",") + " to " + StringUtils.join(locations, ";"); if (baseDirName.startsWith("http://") && filePatterns.length == 0) { locations.add(baseDirName); locationDescriptionForExceptionLogging += "Loading from SPARQL endpoint " + baseDirName; } loadMap(locations, locationDescriptionForExceptionLogging, this.name, query, isSesame, cacheDir, handler); reconstructParents(handler); if (codeSet().size() == 0) throw new Exception("No terms loaded to vocabulary " + name); StopWatch timeElapsed = new StopWatch(); timeElapsed.start(); timeElapsed.stop(); // log.info("Parsed " + name + " in " + timeElapsed + " ms"); }
From source file:eu.europeana.enrichment.tagger.vocabularies.AbstractVocabulary.java
private void loadTerms(String query, boolean isSesame, File cacheDir, String baseDirName, String... filePatterns) throws Exception { VocabularySerializer handler = new VocabularySerializer(this, this.getVocabularyName()); File baseDir = new File(baseDirName); List<String> locations = new ArrayList<String>(); // normalizing files to prevent problems with hashes in cache file names // when run from a different environment for (String pattern : filePatterns) { for (File file : Utils.expandFileTemplateFrom(baseDir, pattern)) { locations.add(file.getCanonicalPath()); }/*from w ww . ja v a2s. com*/ } String locationDescriptionForExceptionLogging = "Expanded from dir " + baseDirName + " pattern " + StringUtils.join(filePatterns, ",") + " to " + StringUtils.join(locations, ";"); if (baseDirName.startsWith("http://") && filePatterns.length == 0) { locations.add(baseDirName); locationDescriptionForExceptionLogging += "Loading from SPARQL endpoint " + baseDirName; } loadMap(locations, locationDescriptionForExceptionLogging, this.name, query, isSesame, cacheDir, handler); reconstructParents(handler); // if (codeSet().size() == 0) // throw new Exception("No terms loaded to vocabulary " + name); StopWatch timeElapsed = new StopWatch(); timeElapsed.start(); timeElapsed.stop(); // log.info("Parsed " + name + " in " + timeElapsed + " ms"); }
From source file:eu.annocultor.tagger.vocabularies.AbstractVocabulary.java
boolean loadFromCache(List<String> locations, String locationDescriptionForExceptionLogging, VocabularySerializer handler, File cacheFileTerms, File cacheFileParents) throws Exception { long lastModified = 0; for (String location : locations) { File file = new File(location); if (file.lastModified() > lastModified) { lastModified = file.lastModified(); }//from ww w . j ava 2s . co m } if (cacheFileTerms.exists() && cacheFileTerms.lastModified() > lastModified && cacheFileParents.exists()) { logMessage("Recovered cached query result from file " + cacheFileTerms); StopWatch timeElapsed = new StopWatch(); timeElapsed.start(); VocabularySerializer.SerializedProperties props = new VocabularySerializer.SerializedProperties(); props.getTerms().load(new BufferedInputStream(new FileInputStream(cacheFileTerms), 1024 * 10)); props.getParents().load(new BufferedInputStream(new FileInputStream(cacheFileParents), 1024 * 10)); handler.deserializeFromProperties(props); timeElapsed.stop(); logMessage("Loaded " + props.getTerms().size() + " term codes in " + timeElapsed + " ms"); return true; } if (!cacheFileTerms.exists() || !cacheFileParents.exists()) { logMessage("No cache file " + cacheFileTerms.getCanonicalPath() + " found, loading RDF from the following files"); logMessage(locationDescriptionForExceptionLogging); } if (cacheFileTerms.exists() && !(cacheFileTerms.lastModified() > lastModified)) { logMessage("Cache file " + cacheFileTerms.getCanonicalPath() + " is older than RDF files, loading RDF from the following files"); } return false; }