List of usage examples for com.google.common.net MediaType CSV_UTF_8
MediaType CSV_UTF_8
To view the source code for com.google.common.net MediaType CSV_UTF_8.
Click Source Link
From source file:org.haiku.haikudepotserver.security.job.AuthorizationRulesSpreadsheetJobRunner.java
@Override public void run(JobService jobService, AuthorizationRulesSpreadsheetJobSpecification specification) throws IOException, JobRunnerException { final ObjectContext context = serverRuntime.newContext(); DateTimeFormatter dateTimeFormatter = DateTimeHelper.createStandardDateTimeFormat(); // this will register the outbound data against the job. JobDataWithByteSink jobDataWithByteSink = jobService.storeGeneratedData(specification.getGuid(), "download", MediaType.CSV_UTF_8.toString()); try (OutputStream outputStream = jobDataWithByteSink.getByteSink().openBufferedStream(); OutputStreamWriter outputStreamWriter = new OutputStreamWriter(outputStream); CSVWriter writer = new CSVWriter(outputStreamWriter, ',')) { writer.writeNext(new String[] { "create-timestamp", "user-nickname", "user-active", "permission-code", "permission-name", "pkg-name" }); ObjectSelect<PermissionUserPkg> objectSelect = ObjectSelect.query(PermissionUserPkg.class).orderBy( PermissionUserPkg.USER.dot(User.NICKNAME).asc(), PermissionUserPkg.PERMISSION.dot(Permission.CODE).asc()); try (ResultBatchIterator<PermissionUserPkg> batchIterator = objectSelect.batchIterator(context, 50)) { batchIterator.forEach((pups) -> pups.forEach((pup) -> writer.writeNext(new String[] { dateTimeFormatter.format(Instant.ofEpochMilli(pup.getCreateTimestamp().getTime())), pup.getUser().getNickname(), Boolean.toString(pup.getUser().getActive()), pup.getPermission().getCode(), pup.getPermission().getName(), null != pup.getPkg() ? pup.getPkg().getName() : "" }))); }/*from w w w .j av a 2 s . c o m*/ writer.flush(); outputStreamWriter.flush(); } }
From source file:org.haiku.haikudepotserver.pkg.job.PkgScreenshotSpreadsheetJobRunner.java
@Override public void run(JobService jobService, PkgScreenshotSpreadsheetJobSpecification specification) throws IOException { Preconditions.checkArgument(null != jobService); Preconditions.checkArgument(null != specification); final ObjectContext context = serverRuntime.newContext(); // this will register the outbound data against the job. JobDataWithByteSink jobDataWithByteSink = jobService.storeGeneratedData(specification.getGuid(), "download", MediaType.CSV_UTF_8.toString()); try (OutputStream outputStream = jobDataWithByteSink.getByteSink().openBufferedStream(); OutputStreamWriter outputStreamWriter = new OutputStreamWriter(outputStream); CSVWriter writer = new CSVWriter(outputStreamWriter, ',')) { String[] headings = new String[] { "pkg-name", "repository-codes", "screenshot-count", "screenshot-bytes" }; writer.writeNext(headings);/*from ww w . j a v a 2s . c o m*/ String[] cells = new String[4]; // stream out the packages. long startMs = System.currentTimeMillis(); LOGGER.info("will produce spreadsheet spreadsheet report"); long count = pkgService.eachPkg(context, false, pkg -> { cells[0] = pkg.getName(); cells[1] = repositoryService.getRepositoriesForPkg(context, pkg).stream().map(Repository::getCode) .collect(Collectors.joining(";")); cells[2] = Integer.toString(pkg.getPkgScreenshots().size()); cells[3] = Integer .toString(pkg.getPkgScreenshots().stream().mapToInt(_PkgScreenshot::getLength).sum()); writer.writeNext(cells); return true; }); LOGGER.info("did produce spreadsheet report for {} packages in {}ms", count, System.currentTimeMillis() - startMs); } }
From source file:org.haiku.haikudepotserver.pkg.job.PkgCategoryCoverageExportSpreadsheetJobRunner.java
@Override public void run(JobService jobService, PkgCategoryCoverageExportSpreadsheetJobSpecification specification) throws IOException { Preconditions.checkArgument(null != jobService); Preconditions.checkArgument(null != specification); final ObjectContext context = serverRuntime.newContext(); // this will register the outbound data against the job. JobDataWithByteSink jobDataWithByteSink = jobService.storeGeneratedData(specification.getGuid(), "download", MediaType.CSV_UTF_8.toString()); try (OutputStream outputStream = jobDataWithByteSink.getByteSink().openBufferedStream(); OutputStreamWriter outputStreamWriter = new OutputStreamWriter(outputStream); CSVWriter writer = new CSVWriter(outputStreamWriter, ',')) { // headers final List<String> pkgCategoryCodes = getPkgCategoryCodes(); String[] headings = getHeadingRow(pkgCategoryCodes); long startMs = System.currentTimeMillis(); writer.writeNext(headings);/*from w ww .jav a 2 s .c om*/ // stream out the packages. LOGGER.info("will produce category coverage spreadsheet report"); long count = pkgService.eachPkg(context, false, pkg -> { List<String> cols = new ArrayList<>(); Optional<PkgVersionLocalization> locOptional = Optional.empty(); if (null != pkg) { locOptional = PkgVersionLocalization.getAnyPkgVersionLocalizationForPkg(context, pkg); } cols.add(pkg.getName()); cols.add(repositoryService.getRepositoriesForPkg(context, pkg).stream().map(Repository::getCode) .collect(Collectors.joining(";"))); cols.add(locOptional.isPresent() ? locOptional.get().getSummary().orElse("") : ""); cols.add(pkg.getPkgPkgCategories().isEmpty() ? AbstractJobRunner.MARKER : ""); for (String pkgCategoryCode : pkgCategoryCodes) { cols.add(pkg.getPkgPkgCategory(pkgCategoryCode).isPresent() ? AbstractJobRunner.MARKER : ""); } cols.add(""); // no action writer.writeNext(cols.toArray(new String[cols.size()])); return true; // keep going! }); LOGGER.info("did produce category coverage spreadsheet report for {} packages in {}ms", count, System.currentTimeMillis() - startMs); } }
From source file:org.haiku.haikudepotserver.pkg.job.PkgProminenceAndUserRatingSpreadsheetJobRunner.java
@Override public void run(JobService jobService, PkgProminenceAndUserRatingSpreadsheetJobSpecification specification) throws IOException { Preconditions.checkArgument(null != jobService); Preconditions.checkArgument(null != specification); final ObjectContext context = serverRuntime.newContext(); // this will register the outbound data against the job. JobDataWithByteSink jobDataWithByteSink = jobService.storeGeneratedData(specification.getGuid(), "download", MediaType.CSV_UTF_8.toString()); try (OutputStream outputStream = jobDataWithByteSink.getByteSink().openBufferedStream(); OutputStreamWriter outputStreamWriter = new OutputStreamWriter(outputStream); CSVWriter writer = new CSVWriter(outputStreamWriter, ',')) { writer.writeNext(new String[] { "pkg-name", "repository-code", "prominence-name", "prominence-ordering", "derived-rating", "derived-rating-sample-size" }); // stream out the packages. long startMs = System.currentTimeMillis(); LOGGER.info("will produce prominence spreadsheet report"); long count = pkgService.eachPkg(context, false, pkg -> { List<PkgProminence> pkgProminences = PkgProminence.findByPkg(context, pkg); List<PkgUserRatingAggregate> pkgUserRatingAggregates = PkgUserRatingAggregate.findByPkg(context, pkg);/*from w w w. ja va 2 s .c o m*/ List<Repository> repositories = Stream .concat(pkgProminences.stream().map(PkgProminence::getRepository), pkgUserRatingAggregates.stream().map(PkgUserRatingAggregate::getRepository)) .distinct().sorted().collect(Collectors.toList()); if (repositories.isEmpty()) { writer.writeNext(new String[] { pkg.getName(), "", "", "", "", "" }); } else { for (Repository repository : repositories) { Optional<PkgProminence> pkgProminenceOptional = pkgProminences.stream() .filter(pp -> pp.getRepository().equals(repository)) .collect(SingleCollector.optional()); Optional<PkgUserRatingAggregate> pkgUserRatingAggregateOptional = pkgUserRatingAggregates .stream().filter(pura -> pura.getRepository().equals(repository)) .collect(SingleCollector.optional()); writer.writeNext(new String[] { pkg.getName(), repository.getCode(), pkgProminenceOptional.map(p -> p.getProminence().getName()).orElse(""), pkgProminenceOptional.map(p -> p.getProminence().getOrdering().toString()) .orElse(""), pkgUserRatingAggregateOptional.map(p -> p.getDerivedRating().toString()).orElse(""), pkgUserRatingAggregateOptional.map(p -> p.getDerivedRatingSampleSize().toString()) .orElse(""), }); } } return true; }); LOGGER.info("did produce prominence spreadsheet report for {} packages in {}ms", count, System.currentTimeMillis() - startMs); } }
From source file:org.haiku.haikudepotserver.pkg.job.PkgCategoryCoverageImportSpreadsheetJobRunner.java
@Override public void run(JobService jobService, PkgCategoryCoverageImportSpreadsheetJobSpecification specification) throws IOException, JobRunnerException { Preconditions.checkArgument(null != jobService); Preconditions.checkArgument(null != specification); Preconditions.checkArgument(null != specification.getInputDataGuid(), "missing imput data guid on specification"); // this will register the outbound data against the job. JobDataWithByteSink jobDataWithByteSink = jobService.storeGeneratedData(specification.getGuid(), "download", MediaType.CSV_UTF_8.toString()); // if there is input data then feed it in and process it to manipulate the packages' // categories. Optional<JobDataWithByteSource> jobDataWithByteSourceOptional = jobService .tryObtainData(specification.getInputDataGuid()); if (!jobDataWithByteSourceOptional.isPresent()) { throw new IllegalStateException( "the job data was not able to be found for guid; " + specification.getInputDataGuid()); }// w ww.j av a 2 s . c o m try (OutputStream outputStream = jobDataWithByteSink.getByteSink().openBufferedStream(); OutputStreamWriter outputStreamWriter = new OutputStreamWriter(outputStream); CSVWriter writer = new CSVWriter(outputStreamWriter, ','); InputStream inputStream = jobDataWithByteSourceOptional.get().getByteSource().openStream(); InputStreamReader inputStreamReader = new InputStreamReader(inputStream); CSVReader reader = new CSVReader(inputStreamReader);) { // headers List<String> pkgCategoryCodes = getPkgCategoryCodes(); String[] headings = getHeadingRow(pkgCategoryCodes); // read in the first row of the input and check the headings are there to quasi-validate // that the input is not some random rubbish. String[] headerRow = reader.readNext(); if (headings.length != headerRow.length) { throw new JobRunnerException("wrong number of header columns in input"); } if (!Arrays.equals(headerRow, headings)) { throw new JobRunnerException("mismatched input headers"); } writer.writeNext(headings); serverRuntime.performInTransaction(() -> { try { String[] row; while (null != (row = reader.readNext())) { if (0 != row.length) { ObjectContext rowContext = serverRuntime.newContext(); Action action = Action.NOACTION; if (row.length < headings.length - 1) { // -1 because it is possible to omit the action column. action = Action.INVALID; LOGGER.warn("inconsistent number of cells on line"); } else { String pkgName = row[0]; // 1; display boolean isNone = AbstractJobRunner.MARKER.equals(row[COLUMN_NONE]); Optional<Pkg> pkgOptional = Pkg.tryGetByName(rowContext, pkgName); List<String> selectedPkgCategoryCodes = new ArrayList<>(); if (pkgOptional.isPresent()) { for (int i = 0; i < pkgCategoryCodes.size(); i++) { if (AbstractJobRunner.MARKER.equals(row[COLUMN_NONE + 1 + i].trim())) { if (isNone) { action = Action.INVALID; LOGGER.warn( "line for package {} has 'none' marked as well as an actual category", row[0]); } selectedPkgCategoryCodes.add(pkgCategoryCodes.get(i)); } } if (action == Action.NOACTION) { List<PkgCategory> selectedPkgCategories = PkgCategory.getByCodes(rowContext, selectedPkgCategoryCodes); if (selectedPkgCategories.size() != selectedPkgCategoryCodes.size()) { throw new IllegalStateException( "one or more of the package category codes was not able to be found"); } if (pkgService.updatePkgCategories(rowContext, pkgOptional.get(), selectedPkgCategories)) { action = Action.UPDATED; rowContext.commitChanges(); LOGGER.debug("did update for package {}", row[0]); } } } else { action = Action.NOTFOUND; LOGGER.debug("unable to find the package for {}", row[0]); } } // copy the row back verbatim, but with the action result at the // end. List<String> rowOutput = new ArrayList<>(); Collections.addAll(rowOutput, row); while (rowOutput.size() < headings.length) { rowOutput.add(""); } rowOutput.remove(rowOutput.size() - 1); rowOutput.add(action.name()); writer.writeNext(rowOutput.toArray(new String[rowOutput.size()])); } } } catch (Throwable th) { LOGGER.error("a problem has arisen importing package categories from a spreadsheet", th); } return null; }); } }
From source file:org.opentestsystem.delivery.testreg.rest.TemplateDownloadController.java
private MediaType determineMediaType(String fileName) { switch (FileType.findByFilename(fileName)) { case XLS://ww w .ja v a 2s . c om case XLSX: return MediaType.MICROSOFT_EXCEL; case CSV: return MediaType.CSV_UTF_8; case TXT: return MediaType.TSV_UTF_8; } return null; }
From source file:org.haiku.haikudepotserver.pkg.job.PkgIconSpreadsheetJobRunner.java
@Override public void run(JobService jobService, PkgIconSpreadsheetJobSpecification specification) throws IOException { Preconditions.checkArgument(null != jobService); Preconditions.checkArgument(null != specification); final ObjectContext context = serverRuntime.newContext(); // this will register the outbound data against the job. JobDataWithByteSink jobDataWithByteSink = jobService.storeGeneratedData(specification.getGuid(), "download", MediaType.CSV_UTF_8.toString()); try (OutputStream outputStream = jobDataWithByteSink.getByteSink().openBufferedStream(); OutputStreamWriter outputStreamWriter = new OutputStreamWriter(outputStream); CSVWriter writer = new CSVWriter(outputStreamWriter, ',')) { final List<PkgIconConfiguration> pkgIconConfigurations = pkgIconService .getInUsePkgIconConfigurations(context); {// w w w. j av a 2 s .c om List<String> headings = new ArrayList<>(); headings.add("pkg-name"); headings.add("repository-codes"); headings.add("no-icons"); for (PkgIconConfiguration pkgIconConfiguration : pkgIconConfigurations) { StringBuilder heading = new StringBuilder(); heading.append(pkgIconConfiguration.getMediaType().getCode()); if (null != pkgIconConfiguration.getSize()) { heading.append("@"); heading.append(pkgIconConfiguration.getSize().toString()); } headings.add(heading.toString()); } writer.writeNext(headings.toArray(new String[headings.size()])); } // stream out the packages. long startMs = System.currentTimeMillis(); LOGGER.info("will produce icon spreadsheet report"); long count = pkgService.eachPkg(context, false, pkg -> { List<String> cells = new ArrayList<>(); cells.add(pkg.getName()); cells.add(repositoryService.getRepositoriesForPkg(context, pkg).stream().map(Repository::getCode) .collect(Collectors.joining(";"))); cells.add(pkg.getPkgIcons().isEmpty() ? MARKER : ""); for (PkgIconConfiguration pkgIconConfiguration : pkgIconConfigurations) { cells.add(pkg.getPkgIcon(pkgIconConfiguration.getMediaType(), pkgIconConfiguration.getSize()) .isPresent() ? MARKER : ""); } writer.writeNext(cells.toArray(new String[cells.size()])); return true; }); LOGGER.info("did produce icon report for {} packages in {}ms", count, System.currentTimeMillis() - startMs); } }
From source file:org.haiku.haikudepotserver.userrating.job.UserRatingSpreadsheetJobRunner.java
@Override public void run(JobService jobService, UserRatingSpreadsheetJobSpecification specification) throws IOException { Preconditions.checkArgument(null != jobService); Preconditions.checkArgument(null != specification); final ObjectContext context = serverRuntime.newContext(); // this will register the outbound data against the job. JobDataWithByteSink jobDataWithByteSink = jobService.storeGeneratedData(specification.getGuid(), "download", MediaType.CSV_UTF_8.toString()); try (OutputStream outputStream = jobDataWithByteSink.getByteSink().openBufferedStream(); OutputStreamWriter outputStreamWriter = new OutputStreamWriter(outputStream); CSVWriter writer = new CSVWriter(outputStreamWriter, ',')) { Optional<Pkg> paramPkgOptional = Optional.empty(); Optional<User> paramUserOptional = Optional.empty(); Optional<Repository> paramRepositoryOptional = Optional.empty(); if (!Strings.isNullOrEmpty(specification.getRepositoryCode())) { paramRepositoryOptional = Repository.tryGetByCode(context, specification.getRepositoryCode()); if (!paramRepositoryOptional.isPresent()) { throw new IllegalStateException( "unable to find the repository; " + specification.getRepositoryCode()); }// www. j a v a 2 s . com } if (!Strings.isNullOrEmpty(specification.getUserNickname())) { paramUserOptional = User.tryGetByNickname(context, specification.getUserNickname()); if (!paramUserOptional.isPresent()) { throw new IllegalStateException("unable to find the user; " + specification.getUserNickname()); } } if (!Strings.isNullOrEmpty(specification.getPkgName())) { paramPkgOptional = Pkg.tryGetByName(context, specification.getPkgName()); if (!paramPkgOptional.isPresent()) { throw new IllegalStateException("unable to find the package; " + specification.getPkgName()); } } writer.writeNext(new String[] { "pkg-name", "repository-code", "architecture-code", "version-coordinates", "user-nickname", "create-timestamp", "modify-timestamp", "rating", "stability-code", "natural-language-code", "comment", "code" }); // stream out the packages. long startMs = System.currentTimeMillis(); LOGGER.info("will user rating spreadsheet report"); final DateTimeFormatter dateTimeFormatter = DateTimeHelper.createStandardDateTimeFormat(); UserRatingSearchSpecification spec = new UserRatingSearchSpecification(); spec.setPkg(paramPkgOptional.orElse(null)); spec.setUser(paramUserOptional.orElse(null)); spec.setRepository(paramRepositoryOptional.orElse(null)); // TODO; provide a prefetch tree into the user, pkgversion. int count = userRatingService.each(context, spec, userRating -> { writer.writeNext(new String[] { userRating.getPkgVersion().getPkg().getName(), userRating.getPkgVersion().getRepositorySource().getRepository().getCode(), userRating.getPkgVersion().getArchitecture().getCode(), userRating.getPkgVersion().toVersionCoordinates().toString(), userRating.getUser().getNickname(), dateTimeFormatter.format(Instant.ofEpochMilli(userRating.getCreateTimestamp().getTime())), dateTimeFormatter.format(Instant.ofEpochMilli(userRating.getModifyTimestamp().getTime())), null != userRating.getRating() ? userRating.getRating().toString() : "", null != userRating.getUserRatingStability() ? userRating.getUserRatingStability().getCode() : "", userRating.getNaturalLanguage().getCode(), userRating.getComment(), userRating.getCode() }); return true; }); LOGGER.info("did produce user rating spreadsheet report for {} user ratings in {}ms", count, System.currentTimeMillis() - startMs); } }
From source file:reflex.file.CSVFileReadAdapterHeader.java
@Override public MediaType getMimeType() { return MediaType.CSV_UTF_8; }
From source file:org.haiku.haikudepotserver.pkg.job.PkgLocalizationCoverageExportSpreadsheetJobRunner.java
@Override public void run(JobService jobService, PkgLocalizationCoverageExportSpreadsheetJobSpecification specification) throws IOException, JobRunnerException { Preconditions.checkArgument(null != jobService); Preconditions.checkArgument(null != specification); final ObjectContext context = serverRuntime.newContext(); final List<NaturalLanguage> naturalLanguages = getNaturalLanguages(context); if (naturalLanguages.isEmpty()) { throw new RuntimeException("there appear to be no natural languages in the system"); }/*w w w. ja v a 2s . c o m*/ // this will register the outbound data against the job. JobDataWithByteSink jobDataWithByteSink = jobService.storeGeneratedData(specification.getGuid(), "download", MediaType.CSV_UTF_8.toString()); try (OutputStream outputStream = jobDataWithByteSink.getByteSink().openBufferedStream(); OutputStreamWriter outputStreamWriter = new OutputStreamWriter(outputStream); CSVWriter writer = new CSVWriter(outputStreamWriter, ',')) { final String[] cells = new String[1 + naturalLanguages.size()]; // headers { int c = 0; cells[c++] = "pkg-name"; for (NaturalLanguage naturalLanguage : naturalLanguages) { cells[c++] = naturalLanguage.getCode(); } } long startMs = System.currentTimeMillis(); writer.writeNext(cells); // stream out the packages. final long expectedTotal = pkgService.totalPkg(context, false); final AtomicLong counter = new AtomicLong(0); LOGGER.info("will produce package localization report for {} packages", expectedTotal); long count = pkgService.eachPkg(context, false, // allow source only. pkg -> { int c = 0; cells[c++] = pkg.getName(); for (NaturalLanguage naturalLanguage : naturalLanguages) { cells[c++] = pkg.getPkgLocalization(naturalLanguage).isPresent() ? MARKER : ""; } writer.writeNext(cells); jobService.setJobProgressPercent(specification.getGuid(), (int) ((100 * counter.incrementAndGet()) / expectedTotal)); return true; // keep going! }); LOGGER.info("did produce pkg localization coverage spreadsheet report for {} packages in {}ms", count, System.currentTimeMillis() - startMs); } }