List of usage examples for org.springframework.util StopWatch StopWatch
public StopWatch()
From source file:org.jsmiparser.AbstractMibTestCase.java
protected SmiMib getMib() { // this is a rather ugly hack to mimic JUnit4 @BeforeClass, without // having to annotate all test methods: if (m_mib.get() == null || m_testClass.get() != getClass()) { try {/* w w w .j av a 2s . c o m*/ SmiParser parser = createParser(); StopWatch stopWatch = new StopWatch(); stopWatch.start(); SmiMib mib = parser.parse(); stopWatch.stop(); m_log.info("Parsing time: " + stopWatch.getTotalTimeSeconds() + " s"); m_mib.set(mib); m_testClass.set(getClass()); } catch (Exception e) { throw new RuntimeException(e); } } return m_mib.get(); }
From source file:com.persistent.cloudninja.scheduler.TenantCreationTask.java
@Override public boolean execute() { boolean retval = true; try {//w w w.j a v a 2 s . c o m TenantCreationQueue tntCreationQueue = (TenantCreationQueue) getWorkQueue(); String message = tntCreationQueue.dequeue(SchedulerSettings.MessageVisibilityTimeout); if (message == null) { LOGGER.debug("Msg is null"); retval = false; } else { StopWatch watch = new StopWatch(); watch.start(); ProvisioningTenantDTO provisioningTenantDTO = createDTOfromMessage(message); boolean tenantCreationSuccess = provisioningService.provisionTenant(provisioningTenantDTO); if (tenantCreationSuccess) { LOGGER.debug("tenant created :" + provisioningTenantDTO.getTenantId()); } else { LOGGER.debug( "tenant creation for tenant ID :" + provisioningTenantDTO.getTenantId() + " failed."); } watch.stop(); taskCompletionDao.updateTaskCompletionDetails(watch.getTotalTimeSeconds(), "ProvisionTenantWork", "Tenant " + provisioningTenantDTO.getTenantId() + " is created."); } } catch (StorageException e) { retval = false; LOGGER.error(e.getMessage(), e); } return retval; }
From source file:org.olegz.uuid.TimeBasedUUIDGeneratorTests.java
@Test public void performanceTestSynch() { StopWatch stopWatch = new StopWatch(); stopWatch.start();// w w w. j a v a 2s.c o m for (int i = 0; i < 1000000; i++) { TimeBasedUUIDGenerator.generateId(); } stopWatch.stop(); System.out.println("Generated 1000000 UUID (sync) via TimeBasedUUIDGenerator.generateId(): in " + stopWatch.getTotalTimeSeconds() + " seconds"); stopWatch = new StopWatch(); stopWatch.start(); for (int i = 0; i < 1000000; i++) { UUID.randomUUID(); } stopWatch.stop(); System.out.println("Generated 1000000 UUID (sync) via UUID.randomUUID(): in " + stopWatch.getTotalTimeSeconds() + " seconds"); }
From source file:com.hd123.oauth2.config.SwaggerConfiguration.java
/** * Swagger Springfox configuration.//from w ww . ja v a 2 s.c o m */ @Bean @Role(ROLE_SUPPORT) @Profile(UN_PRODUCTION) @Description("Heading OAuth2 API Documentation") public Docket swaggerSpringfoxDocket() { final boolean debugAble = logger.isDebugEnabled(); if (debugAble) { logger.debug("Starting Swagger"); } final StopWatch watch = new StopWatch(); watch.start(); final ApiInfo apiInfo = apiInfo(); final Docket docket = new Docket(SWAGGER_2).apiInfo(apiInfo).enable(!profileUtil.isProd()) .enableUrlTemplating(false).forCodeGeneration(true).genericModelSubstitutes(ResponseEntity.class) .ignoredParameterTypes(Pageable.class) .directModelSubstitute(java.time.LocalDate.class, String.class) .directModelSubstitute(java.time.ZonedDateTime.class, Date.class) .directModelSubstitute(java.time.LocalDateTime.class, Date.class).useDefaultResponseMessages(false) .alternateTypeRules(newRule( typeResolver.resolve(DeferredResult.class, typeResolver.resolve(ResponseEntity.class, WildcardType.class)), typeResolver.resolve(WildcardType.class))) .globalResponseMessage(GET, newArrayList(new ResponseMessageBuilder().code(500).message("Internal Server Error") .responseModel(new ModelRef("Error")).build())) .select().apis(any()).paths(regex(appProperties.getSwagger().getApiPattern())).build(); watch.stop(); if (debugAble) { logger.debug("Started Swagger in {} ms", watch.getTotalTimeMillis()); } return docket; }
From source file:com.persistent.cloudninja.scheduler.TenantBlobSizeProcessor.java
/** * Calculates the blob sizes of private and public container. * // w w w . ja v a 2s . c om */ @Override public boolean execute() { boolean retVal = true; String tenantId = null; long blobSize = 0; try { LOGGER.debug("In Processor"); TenantBlobSizeQueue queue = (TenantBlobSizeQueue) getWorkQueue(); tenantId = queue.dequeue(SchedulerSettings.MessageVisibilityTimeout); if (tenantId == null) { retVal = false; LOGGER.debug("Processor : msg is null"); } else { StopWatch watch = new StopWatch(); watch.start(); //get the size of blobs in private container. blobSize = storageUtility.getContainerSize("tntp-" + tenantId.toLowerCase()); //get the size of blobs in public container. blobSize = blobSize + storageUtility.getContainerSize("tnts-" + tenantId.toLowerCase()); LOGGER.debug("Processor : msg is " + tenantId); MeteringEntity metering = new MeteringEntity(); metering.setTenantId(tenantId); Calendar calendar = Calendar.getInstance(); SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.S z"); dateFormat.setTimeZone(TimeZone.getTimeZone("UTC")); String date = dateFormat.format(calendar.getTime()); metering.setSnapshotTime(dateFormat.parse(date)); //set the calculated size blobSize = blobSize / 1024; metering.setBlobStoreUsage(blobSize); meteringDao.add(metering); LOGGER.info("Processor : blobSize is " + blobSize); watch.stop(); taskCompletionDao.updateTaskCompletionDetails(watch.getTotalTimeSeconds(), "ProcessMeteringBlobSizes", "Measured " + blobSize + " kb for tenant " + tenantId); } } catch (Exception e) { retVal = false; LOGGER.error(e.getMessage(), e); } return retVal; }
From source file:com.api.foobar.config.apidoc.SwaggerConfiguration.java
/** * Swagger Springfox configuration./*from w w w .j a va2s. c o m*/ * * @param swaggerProperties the properties of the application * @return the Swagger Springfox configuration */ @Bean public Docket swaggerSpringfoxDocket(SwaggerProperties swaggerProperties) { log.debug("Starting Swagger"); StopWatch watch = new StopWatch(); watch.start(); Contact contact = getContact(swaggerProperties); ApiInfo apiInfo = getApiInfo(swaggerProperties, contact); Docket docket = new Docket(DocumentationType.SWAGGER_2).apiInfo(apiInfo).forCodeGeneration(true) // .genericModelSubstitutes(ResponseEntity.class) // .ignoredParameterTypes(Pageable.class) // .ignoredParameterTypes(java.sql.Date.class) // .directModelSubstitute(java.time.LocalDate.class, java.sql.Date.class) // .directModelSubstitute(java.time.ZonedDateTime.class, Date.class) // .directModelSubstitute(java.time.LocalDateTime.class, Date.class) .select().apis(RequestHandlerSelectors.any()).paths(apiPaths()).build().pathMapping("/") .directModelSubstitute(DateTime.class, String.class).genericModelSubstitutes(ResponseEntity.class) .alternateTypeRules(newRule( typeResolver.resolve(DeferredResult.class, typeResolver.resolve(ResponseEntity.class, WildcardType.class)), typeResolver.resolve(WildcardType.class))) .globalOperationParameters(globalOperationParameters()).useDefaultResponseMessages(true) //Flag to indicate if default http response codes need to be used or not .securitySchemes(newArrayList(apiKey())).securityContexts(newArrayList(securityContext())) .enableUrlTemplating(false); //if true, use 'from style query' for generated paths watch.stop(); log.debug("Started Swagger in {} ms", watch.getTotalTimeMillis()); return docket; }
From source file:org.ameba.aop.IntegrationLayerAspect.java
/** * Around intercepted methods do some logging and exception translation. <p> <ul> <li> Set log level of {@link * LoggingCategories#INTEGRATION_LAYER_ACCESS} to INFO to enable method tracing. <li>Set log level of {@link * LoggingCategories#INTEGRATION_LAYER_EXCEPTION} to ERROR to enable exception logging. </ul> </p> * * @param pjp The joinpoint/*from w ww .jav a 2s .co m*/ * @return Method return value * @throws Throwable in case of errors */ @Around("org.ameba.aop.Pointcuts.integrationPointcut()") public Object measure(ProceedingJoinPoint pjp) throws Throwable { StopWatch sw = null; if (P_LOGGER.isInfoEnabled()) { sw = new StopWatch(); sw.start(); P_LOGGER.info("[I]>> {}#{}", pjp.getTarget().getClass().getSimpleName(), pjp.getSignature().getName()); } try { return pjp.proceed(); } catch (Exception ex) { throw translateException(ex); } finally { if (P_LOGGER.isInfoEnabled() && sw != null) { sw.stop(); P_LOGGER.info("[I]<< {}#{} took {} [ms]", pjp.getTarget().getClass().getSimpleName(), pjp.getSignature().getName(), sw.getTotalTimeMillis()); } } }
From source file:com.googlecode.flyway.core.validation.DbValidator.java
/** * Validate the checksum of all existing sql migration in the metadata table with the checksum of the sql migrations * in the classpath/*from ww w . j ava2 s . co m*/ * * @param resolvedMigrations All migrations available on the classpath, sorted by version, newest first. * @return description of validation error or NULL if no validation error was found */ public String validate(List<Migration> resolvedMigrations) { if (ValidationMode.NONE.equals(validationMode)) { return null; } LOG.debug(String.format("Validating (mode %s) migrations ...", validationMode)); StopWatch stopWatch = new StopWatch(); stopWatch.start(); final List<MetaDataTableRow> appliedMigrations = new ArrayList<MetaDataTableRow>( metaDataTable.allAppliedMigrations()); if (appliedMigrations.isEmpty()) { LOG.info("No migrations applied yet. No validation necessary."); return null; } List<Migration> migrations = new ArrayList<Migration>(resolvedMigrations); // migrations now with newest last Collections.reverse(migrations); final MetaDataTableRow firstAppliedMigration = appliedMigrations.get(0); if (MigrationType.INIT.equals(firstAppliedMigration.getMigrationType())) { // if first migration is INIT, just check checksum of following migrations final SchemaVersion initVersion = firstAppliedMigration.getVersion(); appliedMigrations.remove(firstAppliedMigration); Iterator<Migration> iterator = migrations.iterator(); while (iterator.hasNext()) { Migration migration = iterator.next(); if (migration.getVersion().compareTo(initVersion) <= 0) { iterator.remove(); } } } if (appliedMigrations.size() > migrations.size()) { List<SchemaVersion> schemaVersions = new ArrayList<SchemaVersion>(); for (MetaDataTableRow metaDataTableRow : appliedMigrations) { schemaVersions.add(metaDataTableRow.getVersion()); } for (Migration migration : migrations) { schemaVersions.remove(migration.getVersion()); } String diff = StringUtils.collectionToCommaDelimitedString(schemaVersions); return String.format( "More applied migrations than classpath migrations: DB=%s, Classpath=%s, Missing migrations=(%s)", appliedMigrations.size(), migrations.size(), diff); } for (int i = 0; i < appliedMigrations.size(); i++) { MetaDataTableRow appliedMigration = appliedMigrations.get(i); //Migrations are sorted in the opposite order: newest first. Migration classpathMigration = migrations.get(i); if (!appliedMigration.getVersion().equals(classpathMigration.getVersion())) { return String.format("Version mismatch for migration %s: DB=%s, Classpath=%s", appliedMigration.getScript(), appliedMigration.getVersion(), classpathMigration.getVersion()); } if (!appliedMigration.getMigrationType().equals(classpathMigration.getMigrationType())) { return String.format("Migration Type mismatch for migration %s: DB=%s, Classpath=%s", appliedMigration.getScript(), appliedMigration.getMigrationType(), classpathMigration.getMigrationType()); } final Integer appliedChecksum = appliedMigration.getChecksum(); final Integer classpathChecksum = classpathMigration.getChecksum(); if (!ObjectUtils.nullSafeEquals(appliedChecksum, classpathChecksum)) { return String.format("Checksum mismatch for migration %s: DB=%s, Classpath=%s", appliedMigration.getScript(), appliedChecksum, classpathMigration.getChecksum()); } } stopWatch.stop(); if (appliedMigrations.size() == 1) { LOG.info(String.format("Validated 1 migration (mode: %s) (execution time %s)", validationMode, TimeFormat.format(stopWatch.getTotalTimeMillis()))); } else { LOG.info(String.format("Validated %d migrations (mode: %s) (execution time %s)", appliedMigrations.size(), validationMode, TimeFormat.format(stopWatch.getTotalTimeMillis()))); } return null; }
From source file:com.persistent.cloudninja.scheduler.TenantDBSizeProcessor.java
/** * Calculates DB size of tenant DB./* w ww . ja va 2 s.c o m*/ */ @Override public boolean execute() { boolean retVal = true; String tenantId = null; try { LOGGER.debug("In Processor"); long dbSize = 0; TenantDBSizeQueue queue = (TenantDBSizeQueue) getWorkQueue(); tenantId = queue.dequeue(SchedulerSettings.MessageVisibilityTimeout); if (tenantId == null) { retVal = false; LOGGER.debug("Processor : msg is null"); } else { StopWatch watch = new StopWatch(); watch.start(); LOGGER.debug("Processor : msg is " + tenantId); dbSize = partitionStatsAndBWUsageDao.getDBSize(tenantId); MeteringEntity metering = new MeteringEntity(); metering.setTenantId(tenantId); Calendar calendar = Calendar.getInstance(); SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.S z"); dateFormat.setTimeZone(TimeZone.getTimeZone("UTC")); String date = dateFormat.format(calendar.getTime()); metering.setSnapshotTime(dateFormat.parse(date)); metering.setDatabaseSize(dbSize); meteringDao.add(metering); LOGGER.info("Processor : dbSize is " + dbSize); watch.stop(); taskCompletionDao.updateTaskCompletionDetails(watch.getTotalTimeSeconds(), "ProcessMeteringTenantDatabaseSize", "Measured " + dbSize + " for tenant " + tenantId + " database"); } } catch (StorageException e) { retVal = false; LOGGER.error(e.getMessage(), e); } catch (ParseException e) { retVal = false; LOGGER.error(e.getMessage(), e); } return retVal; }
From source file:de.codecentric.batch.metrics.AbstractBatchMetricsAspect.java
private StopWatch startStopWatch() { StopWatch stopWatch = new StopWatch(); stopWatch.start(); return stopWatch; }