List of usage examples for com.google.common.io Files write
public static void write(CharSequence from, File to, Charset charset) throws IOException
From source file:org.corpus_tools.annis.benchmark.generator.QuerySetPersistance.java
private static void writeQuery(File parentDir, Query q) throws IOException { Preconditions.checkNotNull(q.getName()); Preconditions.checkNotNull(q.getAql()); String name = q.getName();//from ww w .ja va2s.co m File fAQL = new File(parentDir, name + ".aql"); Files.write(q.getAql() + "\n", fAQL, StandardCharsets.UTF_8); if (q.getJson() != null) { File fJSON = new File(parentDir, name + ".json"); Files.write(q.getJson(), fJSON, StandardCharsets.UTF_8); } if (q.getCount().isPresent()) { File fCount = new File(parentDir, name + ".count"); Files.write("" + q.getCount().get(), fCount, StandardCharsets.UTF_8); } if (q.getExecutionTime().isPresent()) { File fTime = new File(parentDir, name + ".time"); Files.write("" + q.getExecutionTime().get(), fTime, StandardCharsets.UTF_8); } if (q.getCorpora() != null && !q.getCorpora().isEmpty()) { File fCorpora = new File(parentDir, name + ".corpora"); Files.write(Joiner.on(",").join(q.getCorpora()), fCorpora, StandardCharsets.UTF_8); } }
From source file:org.primefaces.extensions.optimizerplugin.ClosureCompilerOptimizer.java
@Override public void optimize(final ResourcesSetAdapter rsa, final Log log) throws MojoExecutionException { CompilationLevel compLevel = rsa.getCompilationLevel(); CompilerOptions options = new CompilerOptions(); compLevel.setOptionsForCompilationLevel(options); WarningLevel warnLevel = rsa.getWarningLevel(); warnLevel.setOptionsForWarningLevel(options); com.google.javascript.jscomp.Compiler.setLoggingLevel(Level.WARNING); try {//from w w w . java 2 s. c om Charset cset = Charset.forName(rsa.getEncoding()); if (rsa.getAggregation() == null) { // no aggregation for (File file : rsa.getFiles()) { log.info("Optimize JS file " + file.getName() + " ..."); addToOriginalSize(file); JSSourceFile jsSourceFile = JSSourceFile.fromFile(file, cset); List<JSSourceFile> interns = new ArrayList<JSSourceFile>(); interns.add(jsSourceFile); // compile Compiler compiler = compile(log, interns, options, rsa.isFailOnWarning()); // generate output String path = file.getCanonicalPath(); if (StringUtils.isNotBlank(rsa.getSuffix())) { // write compiled content into the new file File outputFile = getFileWithSuffix(path, rsa.getSuffix()); Files.write(compiler.toSource(), outputFile, cset); // statistic addToOptimizedSize(outputFile); } else { // path of temp. file String pathOptimized = FileUtils.removeExtension(path) + OPTIMIZED_FILE_EXTENSION; // create a new temp. file File outputFile = new File(pathOptimized); Files.touch(outputFile); // write compiled content into the new file and rename it (overwrite the original file) Files.write(compiler.toSource(), outputFile, cset); FileUtils.rename(outputFile, file); // statistic addToOptimizedSize(file); } } } else if (rsa.getAggregation().getOutputFile() != null) { // aggregation to one output file File outputFile; if (!rsa.getAggregation().isWithoutCompress()) { // with compressing before aggregation List<JSSourceFile> interns = new ArrayList<JSSourceFile>(); for (File file : rsa.getFiles()) { log.info("Optimize JS file " + file.getName() + " ..."); addToOriginalSize(file); interns.add(JSSourceFile.fromFile(file, cset)); } // compile Compiler compiler = compile(log, interns, options, rsa.isFailOnWarning()); int filesCount = rsa.getFiles().size(); if (rsa.getAggregation().getPrependedFile() != null) { filesCount++; } if (filesCount > 1) { log.info("Aggregation is running ..."); } // get right output file outputFile = getOutputFile(rsa); long sizeBefore = outputFile.length(); if (rsa.getAggregation().getPrependedFile() != null) { // write / append to be prepended file into / to the output file prependFile(rsa.getAggregation().getPrependedFile(), outputFile, cset, rsa.getEncoding()); } // write / append compiled content into / to the output file Files.append(compiler.toSource(), outputFile, cset); // statistic addToOptimizedSize(outputFile.length() - sizeBefore); if (filesCount > 1) { log.info(filesCount + " files were successfully aggregated."); } } else { // only aggregation without compressing outputFile = aggregateFiles(rsa, cset, log); } // delete single files if necessary deleteFilesIfNecessary(rsa, log); // rename aggregated file if necessary renameOutputFileIfNecessary(rsa, outputFile); } else { // should not happen log.error("Wrong plugin's internal state."); } } catch (Exception e) { throw new MojoExecutionException("Resources optimization failure: " + e.getLocalizedMessage(), e); } }
From source file:org.apache.sentry.binding.hive.SentryIniPolicyFileFormatter.java
/** * Write the sentry mapping data to ini file. * /*from w ww.j a v a 2s. c om*/ * @param resourcePath * The path of the output file * @param sentryMappingData * The map for sentry mapping data, eg: * for the following mapping data: * group1=role1,role2 * group2=role2,role3 * role1=server=server1->db=db1 * role2=server=server1->db=db1->table=tbl1,server=server1->db=db1->table=tbl2 * role3=server=server1->url=hdfs://localhost/path * * The sentryMappingData will be inputed as: * { * groups={[group1={role1, role2}], group2=[role2, role3]}, * roles={role1=[server=server1->db=db1], * role2=[server=server1->db=db1->table=tbl1,server=server1->db=db1->table=tbl2], * role3=[server=server1->url=hdfs://localhost/path] * } * } */ @Override public void write(String resourcePath, Map<String, Map<String, Set<String>>> sentryMappingData) throws Exception { File destFile = new File(resourcePath); if (destFile.exists() && !destFile.delete()) { throw new IllegalStateException("Unable to delete " + destFile); } String contents = Joiner.on(NL).join( generateSection(PolicyFileConstants.GROUPS, sentryMappingData.get(PolicyFileConstants.GROUPS)), generateSection(PolicyFileConstants.ROLES, sentryMappingData.get(PolicyFileConstants.ROLES)), ""); LOGGER.info("Writing policy file to " + destFile + ":\n" + contents); Files.write(contents, destFile, Charsets.UTF_8); }
From source file:fr.xebia.workshop.caching.DocumentationGenerator.java
public void generateDocs(WorkshopInfrastructure workshopInfrastructure, String baseWikiFolder) throws IOException { File wikiBaseFolder = new File(baseWikiFolder); if (wikiBaseFolder.exists()) { logger.debug("Delete wiki folder {}", wikiBaseFolder); wikiBaseFolder.delete();//from w ww .j a v a 2 s . c o m } wikiBaseFolder.mkdirs(); Map<String, String> wikiPageNamesByTeamIdentifier = Maps.newHashMap(); for (String teamIdentifier : workshopInfrastructure.getTeamIdentifiers()) { Map<String, Object> rootMap = Maps.newHashMap(); rootMap.put("infrastructure", workshopInfrastructure); rootMap.put("teamIdentifier", teamIdentifier); String templatePath = "/fr/xebia/workshop/caching/lab.md.ftl"; rootMap.put("generator", "This page has been generaterd by '{{{" + getClass() + "}}}' with template '{{{" + templatePath + "}}}' on the " + new DateTime()); String page = FreemarkerUtils.generate(rootMap, templatePath); String wikiPageName = "Lab_team_" + teamIdentifier; wikiPageNamesByTeamIdentifier.put(teamIdentifier, wikiPageName); File wikiPageFile = new File(wikiBaseFolder, wikiPageName + ".md"); Files.write(page, wikiPageFile, Charsets.UTF_8); logger.debug("Generated file {}", wikiPageFile.getAbsoluteFile()); } StringWriter indexPageStringWriter = new StringWriter(); PrintWriter indexPageWriter = new PrintWriter(indexPageStringWriter); indexPageWriter.println("# Labs Per Team"); for (String teamIdentifier : new TreeSet<String>(workshopInfrastructure.getTeamIdentifiers())) { indexPageWriter.println("* [Lab for team " + teamIdentifier + "]" + "(https://github.com/xebia-france/workshop-web-caching-cocktail/wiki/" + wikiPageNamesByTeamIdentifier.get(teamIdentifier) + ")"); } String indexPageName = "Home"; Files.write(indexPageStringWriter.toString(), new File(baseWikiFolder, indexPageName + ".md"), Charsets.UTF_8); System.out.println("GENERATED WIKI PAGES TO BE COMMITTED IN XEBIA-FRANCE GITHUB"); System.out.println("================================================================="); System.out.println(); System.out.println("Base folder: " + baseWikiFolder); System.out.println("All the files in " + baseWikiFolder + " must be committed in https://xebia-france.googlecode.com/svn/wiki"); System.out.println("Index page: " + indexPageName); System.out.println("Per team pages: \n\t" + Joiner.on("\n\t").join(new TreeSet<String>(wikiPageNamesByTeamIdentifier.values()))); }
From source file:org.apache.flink.test.util.AbstractTestBase.java
public String createTempFile(String fileName, String contents) throws IOException { File f = createAndRegisterTempFile(fileName); Files.write(contents, f, Charsets.UTF_8); return f.toURI().toString(); }
From source file:net.minecraftforge.gradle.tasks.CreateStartTask.java
@TaskAction public void doStuff() throws IOException { // resolve the replacements for (Entry<String, Object> entry : replacements.entrySet()) { replacements.put(entry.getKey(), resolveString(entry.getValue())); }//from www . ja v a2 s .c o m // set the output of the files File resourceDir = compile ? new File(getTemporaryDir(), "extracted") : getStartOut(); // replace and extract for (Entry<String, String> resEntry : resources.entrySet()) { String out = resEntry.getValue(); for (Entry<String, Object> replacement : replacements.entrySet()) { out = out.replace(replacement.getKey(), (String) replacement.getValue()); } // replace extra lines if (!extraLines.isEmpty()) { String replacement = Joiner.on('\n').join(extraLines); out = out.replace(EXTRA_LINES, replacement); } // write file File outFile = new File(resourceDir, resEntry.getKey()); outFile.getParentFile().mkdirs(); Files.write(out, outFile, Charsets.UTF_8); } // now compile, if im compiling. if (compile) { final File compiled = getStartOut(); // quas compiled.mkdirs(); // wex // build claspath exort FileCollection col = null; for (String s : classpath) { FileCollection config = getProject().getConfigurations().getByName(s); if (col == null) col = config; else col = col.plus(config); } // Remove errors on normal runs LoggingManager log = getLogging(); LogLevel startLevel = getProject().getGradle().getStartParameter().getLogLevel(); if (startLevel.compareTo(LogLevel.LIFECYCLE) >= 0) { log.setLevel(LogLevel.ERROR); } // INVOKE! this.getAnt().invokeMethod("javac", ImmutableMap.builder().put("srcDir", resourceDir.getCanonicalPath()) .put("destDir", compiled.getCanonicalPath()).put("failonerror", true) .put("includeantruntime", false).put("classpath", col.getAsPath()) // because ant knows what a file collection is .put("encoding", "utf-8").put("source", "1.6").put("target", "1.6").put("debug", "true") .build()); // copy the sources too, for debugging through GradleStart getProject().fileTree(resourceDir).visit(new FileVisitor() { @Override public void visitDir(FileVisitDetails arg0) { // ignore } @Override public void visitFile(FileVisitDetails arg0) { arg0.copyTo(arg0.getRelativePath().getFile(compiled)); } }); } }
From source file:org.apache.gobblin.service.modules.orchestration.FSDagStateStore.java
/** * {@inheritDoc}//from w w w . j a v a 2s . co m */ @Override public synchronized void writeCheckpoint(Dag<JobExecutionPlan> dag) throws IOException { // write to a temporary name then rename to make the operation atomic when the file system allows a file to be // replaced String fileName = DagManagerUtils.generateDagId(dag) + DAG_FILE_EXTENSION; String serializedDag = serializeDag(dag); File checkpointDir = new File(this.dagCheckpointDir); if (!checkpointDir.exists()) { if (!checkpointDir.mkdirs()) { throw new IOException("Could not create dir - " + this.dagCheckpointDir); } } File tmpCheckpointFile = new File(this.dagCheckpointDir, fileName + ".tmp"); File checkpointFile = new File(this.dagCheckpointDir, fileName); Files.write(serializedDag, tmpCheckpointFile, Charsets.UTF_8); Files.move(tmpCheckpointFile, checkpointFile); }
From source file:org.sonar.plugins.python.pylint.PylintIssuesAnalyzer.java
public List<Issue> analyze(String path, Charset charset, File out) throws IOException { Command command = Command.create(pylint).addArguments(pylintArguments.arguments()).addArgument(path); if (pylintConfigParam != null) { command.addArgument(pylintConfigParam); }/*from w w w . j a v a 2 s. c o m*/ LOG.debug("Calling command: '{}'", command.toString()); long timeoutMS = 300000; // =5min CommandStreamConsumer stdOut = new CommandStreamConsumer(); CommandStreamConsumer stdErr = new CommandStreamConsumer(); CommandExecutor.create().execute(command, stdOut, stdErr, timeoutMS); // the error stream can contain a line like 'no custom config found, using default' // any bigger output on the error stream is likely a pylint malfunction if (stdErr.getData().size() > 1) { LOG.warn("Output on the error channel detected: this is probably due to a problem on pylint's side."); LOG.warn("Content of the error stream: \n\"{}\"", StringUtils.join(stdErr.getData(), "\n")); } Files.write(StringUtils.join(stdOut.getData(), "\n"), out, charset); return parseOutput(stdOut.getData()); }
From source file:com.github.rinde.jaamas17.ResultWriter.java
@Override public void startComputing(int numberOfSimulations, ImmutableSet<MASConfiguration> configurations, ImmutableSet<Scenario> scenarios, int repetitions, int seedRepetitions) { final StringBuilder sb = new StringBuilder("Experiment summary"); sb.append(System.lineSeparator()).append("Number of simulations: ").append(numberOfSimulations) .append(System.lineSeparator()).append("Number of configurations: ").append(configurations.size()) .append(System.lineSeparator()).append("Number of scenarios: ").append(scenarios.size()) .append(System.lineSeparator()).append("Number of repetitions: ").append(repetitions) .append(System.lineSeparator()).append("Number of seed repetitions: ").append(seedRepetitions) .append(System.lineSeparator()).append("Configurations:").append(System.lineSeparator()); for (final MASConfiguration config : configurations) { sb.append(config.getName()).append(System.lineSeparator()); }/* ww w . j av a 2 s. c om*/ final File setup = new File(experimentDirectory, "experiment-setup.txt"); try { setup.createNewFile(); Files.write(sb.toString(), setup, Charsets.UTF_8); } catch (final IOException e) { throw new IllegalStateException(e); } }
From source file:com.android.build.gradle.tasks.GdbSetupTask.java
@TaskAction public void taskAction() { File gdbSetupFile = new File(outputDir, "gdb.setup"); StringBuilder sb = new StringBuilder(); sb.append("set solib-search-path ").append(outputDir.toString()).append("\n").append("directory ") .append(ndkHandler.getSysroot(Abi.getByName(binary.getTargetPlatform().getName()))) .append("/usr/include "); final Set<String> sources = Sets.newHashSet(); binary.getSource().withType(CSourceSet.class, new Action<CSourceSet>() { @Override/*from w ww .jav a2 s.co m*/ public void execute(CSourceSet sourceSet) { for (File src : sourceSet.getSource().getSrcDirs()) { sources.add(src.toString()); } } }); binary.getSource().withType(CppSourceSet.class, new Action<CppSourceSet>() { @Override public void execute(CppSourceSet sourceSet) { for (File src : sourceSet.getSource().getSrcDirs()) { sources.add(src.toString()); } } }); sources.addAll(StlConfiguration.getStlSources(ndkHandler, extension.getStl())); sb.append(Joiner.on(' ').join(sources)); if (!outputDir.exists()) { outputDir.mkdirs(); } try { Files.write(sb.toString(), gdbSetupFile, Charsets.UTF_8); } catch (IOException e) { throw new RuntimeException(e); } }