List of usage examples for com.google.common.io Files write
public static void write(CharSequence from, File to, Charset charset) throws IOException
From source file:org.jcruncher.hbs.HbsProcessor.java
private void processItem(Item item) throws Exception { StringBuffer sb = new StringBuffer(); sb.append("Handlebars.templates = Handlebars.templates || {};\n\n"); System.out.print("hbs - processing to " + item.dest.getName() + " ... "); for (Part part : item.parts) { String tmplpc = precompile(part.content); sb.append("\n// template --- ").append(part.name).append(" ---\n"); sb.append("Handlebars.templates['").append(part.name).append("'] = Handlebars.template("); sb.append(tmplpc);/*from w ww. j av a2s . co m*/ sb.append("\n);\n"); } if (item.dest.getParentFile() != null && !item.dest.getParentFile().exists()) { item.dest.getParentFile().mkdirs(); } Files.write(sb.toString(), item.dest, Charsets.UTF_8); System.out.println("DONE"); }
From source file:com.google.api.server.spi.tools.GetDiscoveryDocAction.java
/** * Generates a Java client library for an API. Combines the steps of generating API * configuration, generating Discovery doc and generating client library into one. * @param classPath Class path to load service classes and their dependencies * @param outputDirPath Directory to write output files into * @param warPath Directory or file containing a WAR layout * @param serviceClassNames Array of service class names of the API * @param debug Whether or not to output intermediate output files *///from w w w .ja va2s.c o m public Map<String, String> getDiscoveryDoc(URL[] classPath, String outputDirPath, String warPath, List<String> serviceClassNames, boolean debug) throws ClassNotFoundException, IOException, ApiConfigException { File outputDir = new File(outputDirPath); if (!outputDir.isDirectory()) { throw new IllegalArgumentException(outputDirPath + " is not a directory"); } ClassLoader classLoader = new URLClassLoader(classPath, getClass().getClassLoader()); ApiConfig.Factory configFactory = new ApiConfig.Factory(); TypeLoader typeLoader = new TypeLoader(classLoader); DiscoveryGenerator discoveryGenerator = new DiscoveryGenerator(typeLoader); List<ApiConfig> apiConfigs = Lists.newArrayListWithCapacity(serviceClassNames.size()); ApiConfigLoader configLoader = new ApiConfigLoader(configFactory, typeLoader, new ApiConfigAnnotationReader(typeLoader.getAnnotationTypes())); ServiceContext serviceContext = ServiceContext.create(AppEngineUtil.getApplicationId(warPath), ServiceContext.DEFAULT_API_NAME); for (Class<?> serviceClass : loadClasses(classLoader, serviceClassNames)) { apiConfigs.add(configLoader.loadConfiguration(serviceContext, serviceClass)); } DiscoveryGenerator.Result result = discoveryGenerator.writeDiscovery(apiConfigs, new DiscoveryContext().setHostname(serviceContext.getAppHostName())); ObjectWriter writer = ObjectMapperUtil.createStandardObjectMapper().writer(new EndpointsPrettyPrinter()); ImmutableMap.Builder<String, String> builder = ImmutableMap.builder(); for (Map.Entry<ApiKey, RestDescription> entry : result.discoveryDocs().entrySet()) { ApiKey key = entry.getKey(); String discoveryDocFilePath = outputDir + "/" + key.getName() + "-" + key.getVersion() + "-rest.discovery"; String docString = writer.writeValueAsString(entry.getValue()); Files.write(docString, new File(discoveryDocFilePath), UTF_8); builder.put(discoveryDocFilePath, docString); System.out.println("API Discovery Document written to " + discoveryDocFilePath); } return builder.build(); }
From source file:com.opera.core.systems.OperaExtensions.java
/** * Create initial widgets directory and extension configuration file (widgets.dat) if needed. * * @throws IOException If// w ww . j a v a2 s . co m */ private void createInitialDirectoryIfNecessary() throws IOException { if (!directory.exists()) { if (!directory.mkdirs()) { throw new WebDriverException("Unable to create directory path: " + directory.getPath()); } } if (!widgetsDat.exists()) { Files.write(WIDGET_DAT_CONTENT, widgetsDat, Charsets.UTF_8); } }
From source file:org.apache.whirr.service.hadoop.HadoopNameNodeClusterActionHandler.java
private void createProxyScript(ClusterSpec clusterSpec, Cluster cluster) { File configDir = getConfigDir(clusterSpec); File hadoopProxyFile = new File(configDir, "hadoop-proxy.sh"); try {/*www .j a v a 2 s. c o m*/ HadoopProxy proxy = new HadoopProxy(clusterSpec, cluster); InetAddress namenode = HadoopCluster.getNamenodePublicAddress(cluster); String script = String.format( "echo 'Running proxy to Hadoop cluster at %s. " + "Use Ctrl-c to quit.'\n", namenode.getHostName()) + Joiner.on(" ").join(proxy.getProxyCommand()); Files.write(script, hadoopProxyFile, Charsets.UTF_8); hadoopProxyFile.setExecutable(true); LOG.info("Wrote Hadoop proxy script {}", hadoopProxyFile); } catch (IOException e) { LOG.error("Problem writing Hadoop proxy script {}", hadoopProxyFile, e); } }
From source file:org.pshdl.model.utils.PSAbstractCompiler.java
public static File[] writeFiles(File outDir, CompileResult result) throws FileNotFoundException, IOException { if (result.hasError()) return new File[0]; final List<File> res = new LinkedList<File>(); final File target = new File(outDir, result.fileName); res.add(target);/*from www .j a va 2s .co m*/ Files.write(result.code, target, StandardCharsets.UTF_8); if (result.sideFiles != null) { for (final AuxiliaryContent sd : result.sideFiles) { final File file = new File(outDir + "/" + sd.relPath); res.add(file); final File parentFile = file.getParentFile(); if ((parentFile != null) && !parentFile.exists()) { if (!parentFile.mkdirs()) throw new IllegalArgumentException("Failed to create directory:" + parentFile); } if (sd.contents == AuxiliaryContent.THIS) { Files.write(result.code, file, StandardCharsets.UTF_8); } else { Files.write(sd.contents, file); } } } return res.toArray(new File[res.size()]); }
From source file:scoutdoc.main.fetch.ScoutDocFetch.java
public void executeRecentChanges(IPageFilter pageFilter) { String lastTimestamp = null;/*from ww w . jav a 2s . c om*/ File f = new File(ProjectProperties.getFolderWikiSource(), "info.txt"); if (f.exists() && f.canRead()) { try { String line = Files.readFirstLine(f, Charsets.UTF_8); lastTimestamp = line; } catch (IOException e) { e.printStackTrace(); } catch (NumberFormatException e) { e.printStackTrace(); } } if (lastTimestamp == null) { lastTimestamp = "0000-00-00T00:00:00Z"; Collection<Page> pages; try { pages = PageUtility.loadPages(ProjectProperties.getFolderWikiSource()); for (Page page : pages) { String timestamp = ApiFileUtility.readTimestamp(PageUtility.toApiFile(page)); if (timestamp.compareTo(lastTimestamp) > 0) { lastTimestamp = timestamp; } } } catch (IOException e) { e.printStackTrace(); } } List<Page> pages = new ArrayList<Page>(); String queryContinue = lastTimestamp; while (queryContinue != null) { Map<String, String> parameters = new LinkedHashMap<String, String>(); parameters.put("action", "query"); parameters.put("list", "recentchanges"); parameters.put("rcprop", Joiner.on("|").join("title", "timestamp", "ids")); parameters.put("rcdir", "newer"); parameters.put("rclimit", "50"); parameters.put("rcstart", queryContinue); parameters.put("format", "xml"); try { String queryContent = downlaod( UrlUtility.createFullUrl(ProjectProperties.getWikiApiUrl(), parameters)); List<Page> rcPages = ApiFileUtility.createPages(queryContent, "//recentchanges/rc"); for (Page page : rcPages) { if (pageFilter.keepPage(page)) { pages.add(page); } } List<String> pageTimestamps = ApiFileUtility.readValues(queryContent, "//recentchanges/rc/@timestamp"); lastTimestamp = Collections.max(pageTimestamps); queryContinue = ApiFileUtility.readValue(queryContent, "//query-continue/recentchanges/@rcstart"); } catch (IOException e) { e.printStackTrace(); queryContinue = null; } catch (TransformerException e) { e.printStackTrace(); queryContinue = null; } } execute(pages, RelatedPagesStrategy.CATEGORIES_IMAGES_TEMPLATES_AND_LINKS); try { Files.write(lastTimestamp, f, Charsets.UTF_8); } catch (IOException e) { e.printStackTrace(); } }
From source file:es.ehu.si.ixa.pipe.convert.Convert.java
/** * Calls the ancorat2treebank function to generate Penn Treebank trees from * Ancora XML constituent parsing./* w w w. ja va2 s.c o m*/ * * @param dir * the directory containing the documents * @throws IOException * if io problems */ public void processAncoraConstituentXMLCorpus(File dir) throws IOException { // process one file if (dir.isFile()) { File outfile = new File(Files.getNameWithoutExtension(dir.getPath()) + ".th"); String outTree = ancora2treebank(dir); Files.write(outTree, outfile, Charsets.UTF_8); System.err.println(">> Wrote XML ancora file to Penn Treebank in " + outfile); } else { // recursively process directories File listFile[] = dir.listFiles(); if (listFile != null) { for (int i = 0; i < listFile.length; i++) { if (listFile[i].isDirectory()) { processAncoraConstituentXMLCorpus(listFile[i]); } else { try { File outfile = new File(Files.getNameWithoutExtension((listFile[i].getPath()) + ".th")); String outTree = ancora2treebank(listFile[i]); Files.write(outTree, outfile, Charsets.UTF_8); System.err.println(">> Wrote XML Ancora file Penn treebank format in " + outfile); } catch (FileNotFoundException noFile) { continue; } } } } } }
From source file:com.android.build.gradle.tasks.JackTask.java
private File computeEcjOptionFile() throws IOException { File folder = getTempFolder(); //noinspection ResultOfMethodCallIgnored folder.mkdirs();//from w w w.j a v a 2s . c o m File file = new File(folder, "ecj-options.txt"); StringBuilder sb = new StringBuilder(); for (File sourceFile : getSource().getFiles()) { sb.append(sourceFile.getAbsolutePath()).append("\n"); } //noinspection ResultOfMethodCallIgnored file.getParentFile().mkdirs(); Files.write(sb.toString(), file, Charsets.UTF_8); return file; }
From source file:org.pshdl.model.simulation.codegenerator.JavaClassRuntimeLoader.java
public Class<?> compileClass(String mainClassFQN, String sourceCode) throws Exception { final String pathName = mainClassFQN.replace('.', File.separatorChar) + ".java"; final File sourceFile = new File(tempDir, pathName); final File pkgDir = sourceFile.getParentFile(); if (pkgDir == null) throw new IllegalArgumentException("Failed to get parent of:" + sourceFile); if (!pkgDir.exists() && !pkgDir.mkdirs()) throw new IllegalArgumentException("Failed to create package directories:" + pkgDir); Files.write(sourceCode, sourceFile, StandardCharsets.UTF_8); final StringWriter error = new StringWriter(); final ErrorCheckDiagnostic diagnostic = new ErrorCheckDiagnostic(); compiler.getTask(error, fileManager, diagnostic, null, null, fileManager.getJavaFileObjectsFromFiles(Arrays.asList(sourceFile))).call(); if (diagnostic.kind == Kind.ERROR) throw new DiagnosticsException(diagnostic.diagnostics); // Load and instantiate compiled class. return Class.forName(mainClassFQN, true, classLoader); }
From source file:io.airlift.airship.coordinator.HttpServiceInventory.java
private List<ServiceDescriptor> getServiceInventory(SlotStatus slotStatus) { Assignment assignment = slotStatus.getAssignment(); if (assignment == null) { return null; }//from w w w. j a v a 2s. com String config = assignment.getConfig(); File cacheFile = getCacheFile(config); if (cacheFile.canRead()) { try { String json = Files.asCharSource(cacheFile, Charsets.UTF_8).read(); List<ServiceDescriptor> descriptors = descriptorsJsonCodec.fromJson(json); invalidServiceInventory.remove(config); return descriptors; } catch (Exception ignored) { // delete the bad cache file cacheFile.delete(); } } ByteSource configFile = ConfigUtils.newConfigEntrySupplier(repository, config, "airship-service-inventory.json"); if (configFile == null) { return null; } try { String json; try { json = configFile.asCharSource(Charsets.UTF_8).read(); } catch (FileNotFoundException e) { // no service inventory in the config, so replace with json null so caching works json = "null"; } invalidServiceInventory.remove(config); // cache json cacheFile.getParentFile().mkdirs(); Files.write(json, cacheFile, Charsets.UTF_8); List<ServiceDescriptor> descriptors = descriptorsJsonCodec.fromJson(json); return descriptors; } catch (Exception e) { if (invalidServiceInventory.add(config)) { log.error(e, "Unable to read service inventory for %s" + config); } } return null; }