List of usage examples for com.google.common.io Files write
public static void write(CharSequence from, File to, Charset charset) throws IOException
From source file:net.sf.maven.plugin.autotools.FileUtils.java
/** * Replace all occurences, in <code>files</code>, of <code>regex</code> with <code>replace</code> * @param regex the original text to replace * @param replace the placeholder/*from w w w.j a v a2s . co m*/ * @param files the files */ public static void replace(Log log, @Nonnull String regex, @Nullable String replace, File... files) throws IOException { if (files == null) return; Preconditions.checkArgument(!Strings.isNullOrEmpty(regex), "regex is empty"); replace = Strings.nullToEmpty(replace); for (File file : files) { log.debug(String.format("Replacing [%s] with [%s] in [%s]", regex, replace, file.getAbsolutePath())); String text = Files.toString(file, Charset.defaultCharset()); Files.write(text.replaceAll(regex, replace), file, Charset.defaultCharset()); } }
From source file:co.cask.cdap.explore.guice.LocalMapreduceClasspathSetter.java
public void setupClasspathScript() throws IOException { if (hbaseProtocolJarPaths.isEmpty()) { LOG.info("No HBase Protocol jar found. Not setting up HADOOP_CLASSPATH"); return;// ww w . j ava 2 s . c o m } File exploreHadoopBin = new File(directory, "explore_hadoop"); LOG.info("Adding {} to HADOOP_CLASSPATH", hbaseProtocolJarPaths); String hadoopBin = hiveConf.get(HiveConf.ConfVars.HADOOPBIN.toString()); // We over-ride HADOOPBIN setting in HiveConf to the script below, so that Hive uses this script to execute // map reduce jobs. // The below script updates HADOOP_CLASSPATH to contain hbase-protocol jar for RunJar commands, // so that the right version of protocol buffer jar gets loaded for HBase. // It also puts all the user jars, ie hive aux jars, in this classpath and in first position, so that // the right version of ASM jar gets loaded for Twill. // It then calls the real Hadoop bin with the same arguments. StringBuilder fileBuilder = new StringBuilder(); fileBuilder.append("#!/usr/bin/env bash\n"); fileBuilder.append("# This file is a hack to set HADOOP_CLASSPATH for Hive local mapreduce tasks.\n"); fileBuilder .append("# This hack should go away when Twill supports setting of environmental variables for a "); fileBuilder.append("TwillRunnable.\n"); fileBuilder.append("\n"); fileBuilder.append("function join { local IFS=\"$1\"; shift; echo \"$*\"; }\n"); fileBuilder.append("if [ $# -ge 1 -a \"$1\" = \"jar\" ]; then\n"); fileBuilder.append(" HADOOP_CLASSPATH=$(join ").append(File.pathSeparatorChar).append(" ") .append(Joiner.on(' ').join(hiveAuxJars)).append(" ${HADOOP_CLASSPATH} ") .append(Joiner.on(' ').join(hbaseProtocolJarPaths)).append(')').append("\n"); fileBuilder.append(" # Put user jars first in Hadoop classpath so that the ASM jar needed by Twill has\n"); fileBuilder.append(" # the right version, and not the one provided with the Hadoop libs.\n"); fileBuilder.append(" export HADOOP_USER_CLASSPATH_FIRST=true\n"); fileBuilder.append(" export HADOOP_CLASSPATH\n"); fileBuilder.append(" echo \"Explore modified HADOOP_CLASSPATH = $HADOOP_CLASSPATH\" 1>&2\n"); fileBuilder.append("fi\n"); fileBuilder.append("\n"); fileBuilder.append("exec ").append(hadoopBin).append(" \"$@\"\n"); Files.write(fileBuilder.toString(), exploreHadoopBin, Charsets.UTF_8); if (!exploreHadoopBin.setExecutable(true, false)) { throw new RuntimeException("Cannot set executable permission on " + exploreHadoopBin.getAbsolutePath()); } LOG.info("Setting Hadoop bin to Explore Hadoop bin {}", exploreHadoopBin.getAbsolutePath()); System.setProperty(HiveConf.ConfVars.HADOOPBIN.toString(), exploreHadoopBin.getAbsolutePath()); }
From source file:com.comphenix.protocol.ProtocolConfig.java
/** * Store the given time stamp./*w ww. jav a 2 s.com*/ * * @param value - time stamp to store. */ private void saveLastUpdate(long value) { File dataFile = getLastUpdateFile(); // The data folder must exist dataFile.getParentFile().mkdirs(); if (dataFile.exists()) dataFile.delete(); try { Files.write(Long.toString(value), dataFile, Charsets.UTF_8); } catch (IOException e) { throw new RuntimeException("Cannot write " + dataFile, e); } }
From source file:me.hzhou.ext.jfinal.test.ControllerTestCase.java
public String invoke() { if (bodyFile != null) { List<String> req = null; try {/*from w w w . j a va2 s . c o m*/ req = Files.readLines(bodyFile, Charsets.UTF_8); } catch (IOException e) { Throwables.propagate(e); } bodyData = Joiner.on("").join(req); } StringWriter resp = new StringWriter(); request = new MockHttpRequest(bodyData); response = new MockHttpResponse(resp); Reflect.on(handler).call("handle", getTarget(actionUrl, request), request, response, new boolean[] { true }); String response = resp.toString(); if (responseFile != null) { try { Files.write(response, responseFile, Charsets.UTF_8); } catch (IOException e) { Throwables.propagate(e); } } return response; }
From source file:me.emily.config.PlusConfig.java
@Override protected void configure() { TypeLiteral<Function<String, String>> literal = new TypeLiteral<Function<String, String>>() { };//www .j av a2s. c om final Gson gson = new GsonBuilder().setPrettyPrinting().create(); final GsonFactory jsonFactory = new GsonFactory(); final NetHttpTransport transport = new NetHttpTransport(); bind(Gson.class).toInstance(gson); bind(JsonFactory.class).toInstance(jsonFactory); bind(HttpTransport.class).toInstance(transport); final Urlshortener shortener = new Urlshortener.Builder(transport, jsonFactory, null).build(); bind(literal).annotatedWith(Names.named("shortner")).toInstance(new Function<String, String>() { @Override public String apply(String input) { try { return shortener.url().insert(new Url().setLongUrl(input)).execute().getId(); } catch (IOException e) { log.error(e.getMessage(), e); return input; } } }); bind(GoogleClientSecrets.class).toProvider(new Provider<GoogleClientSecrets>() { @Override public GoogleClientSecrets get() { try { File secrets = new File(Context.get(Context.contextDirectory), "client-secrets.json"); log.warn("Loading secrets from {}", secrets.getAbsolutePath()); return GoogleClientSecrets.load(jsonFactory, new FileInputStream(secrets)); } catch (Exception e) { Throwables.propagate(e); return null; } } }); bind(CredentialStore.class).toInstance(new CredentialStore() { Cache<String, SerializableCredentials> cache = CacheBuilder.newBuilder() .expireAfterAccess(300, TimeUnit.SECONDS).build(); File directory = new File(Context.get(Context.contextDirectory), "/creds"); { directory.mkdir(); } @Override public void store(String userId, Credential credential) throws IOException { SerializableCredentials permCreds = new SerializableCredentials(credential); cache.put(userId, permCreds); Files.write(gson.toJson(permCreds), credFile(userId), Charsets.UTF_8); } @Override public boolean load(String userId, Credential credential) throws IOException { SerializableCredentials cred = cache.getIfPresent(userId); if (cred != null) { cred.push(credential); return true; } cred = loadFromDisc(userId); if (cred != null) { cred.push(credential); cache.put(userId, cred); return true; } return false; } private SerializableCredentials loadFromDisc(String userId) throws JsonSyntaxException, JsonIOException, FileNotFoundException { File credfile = credFile(userId); if (credfile.exists()) { return gson.fromJson(new FileReader(credfile), SerializableCredentials.class); } return null; } @Override public void delete(String userId, Credential credential) throws IOException { cache.invalidate(userId); File credFile = credFile(userId); if (credFile.exists()) { credFile.delete(); } } private File credFile(String userId) { return new File(directory, userId.replaceAll("(\\|/)", ".") + ".cred.json"); } }); bind(OAuth2Native.class).asEagerSingleton(); }
From source file:org.sonar.plugins.delphi.pmd.DelphiPmdSensor.java
private File dumpXmlRuleSet(String repositoryKey, String rulesXml) { try {//from w ww. ja v a2 s .c o m File configurationFile = new File(delphiProjectHelper.workDir(), repositoryKey + ".xml"); Files.write(rulesXml, configurationFile, Charsets.UTF_8); DelphiUtils.LOG.info("PMD configuration: " + configurationFile.getAbsolutePath()); return configurationFile; } catch (IOException e) { throw new IllegalStateException("Fail to save the PMD configuration", e); } }
From source file:com.squareup.osstrich.JavadocPublisher.java
private void writeIndexFiles(String groupId, Multimap<String, Artifact> artifacts) throws IOException { for (String majorVersion : artifacts.keySet()) { StringBuilder html = new StringBuilder(); html.append("<!DOCTYPE html>\n<html><head><title>").append(groupId) .append("</title></head>\n<body>\n<h1>").append(groupId).append("</h1>\n<ul>\n"); for (Artifact artifact : artifacts.get(majorVersion)) { html.append("<li><a href=\"").append(artifact.artifactId).append("\">").append(artifact.artifactId) .append("</li>\n"); }/*from www . j av a2 s . com*/ html.append("</ul>\n</body>\n</html>"); File indexHtml = new File(directory + "/" + majorVersion + "/index.html"); Files.write(html, indexHtml, UTF_8); gitAdd(indexHtml); } }
From source file:org.jboss.maven.plugins.qstools.fixers.JavaSourcesFormatFixer.java
@SuppressWarnings("unchecked") @Override/*from w w w. ja v a2s . c om*/ public void fixProject(MavenProject project, Document doc) throws Exception { Rules rules = getConfigurationProvider().getQuickstartsRules(project.getGroupId()); // Read DefaultEclipseSettings Map<String, String> options = DefaultCodeFormatterConstants.getEclipseDefaultSettings(); // initialize the compiler settings to be able to format 1.6 code String compilerSource = rules.getExpectedCompilerSource(); options.put(JavaCore.COMPILER_COMPLIANCE, compilerSource); options.put(JavaCore.COMPILER_CODEGEN_TARGET_PLATFORM, compilerSource); options.put(JavaCore.COMPILER_SOURCE, compilerSource); // Configure CodeFormatter with Eclipse XML Formatter Profile InputStream xmlInputStream = resources .getExpirationalFileInputStream(new URL(rules.getEclipseFormatterProfileLocation())); Document formatterSettingsDoc = PositionalXMLReader.readXML(xmlInputStream); NodeList settingsNodes = formatterSettingsDoc.getElementsByTagName("setting"); for (int i = 0; i < settingsNodes.getLength(); i++) { Node node = settingsNodes.item(i); String id = node.getAttributes().getNamedItem("id").getTextContent(); String value = node.getAttributes().getNamedItem("value").getTextContent(); options.put(id, value); } // Instantiate the default code formatter with the given options CodeFormatter codeFormatter = ToolFactory.createCodeFormatter(options); // Apply the formatter to every Java source under the project's folder List<File> javaSources = FileUtils.getFiles(project.getBasedir(), "**/*.java", ""); for (File javaSource : javaSources) { getLog().debug("Formating " + javaSource); String source = Files.toString(javaSource, Charset.forName("UTF-8")); TextEdit edit = codeFormatter.format(CodeFormatter.K_COMPILATION_UNIT, // format a compilation unit source, // source to format 0, // starting position source.length(), // length 0, // initial indentation System.getProperty("line.separator") // line separator ); IDocument document = new org.eclipse.jface.text.Document(source); edit.apply(document); Files.write(document.get(), javaSource, Charset.forName("UTF-8")); } }
From source file:com.eucalyptus.reporting.dw.commands.ReportCommand.java
@Override protected void runCommand(final Arguments arguments) { final Period defaultPeriod = Period.defaultPeriod(); final String type = arguments.getArgument("type", "instance"); final String format = arguments.getArgument("format", "html"); final String start = arguments.getArgument("start", formatDate(defaultPeriod.getBeginningMs())); final String end = arguments.getArgument("end", formatDate(defaultPeriod.getEndingMs())); final TimeUnit timeUnit = TimeUnit.fromString(arguments.getArgument("time-unit", null), Units.getDefaultDisplayUnits().getTimeUnit()); final SizeUnit sizeUnit = SizeUnit.fromString(arguments.getArgument("size-unit", null), Units.getDefaultDisplayUnits().getSizeUnit()); final TimeUnit sizeTimeTimeUnit = TimeUnit.fromString( arguments.getArgument("size-time-time-unit", timeUnit.name()), Units.getDefaultDisplayUnits().getSizeTimeTimeUnit()); final SizeUnit sizeTimeSizeUnit = SizeUnit.fromString( arguments.getArgument("size-time-size-unit", sizeUnit.name()), Units.getDefaultDisplayUnits().getSizeTimeSizeUnit()); final String reportFilename = arguments.getArgument("file", null); long startTime = parseDate(start, "start"); long endTime = parseDate(end, "end"); final String reportData; try {/* ww w.j a v a 2 s . c om*/ final Units units = new Units(timeUnit, sizeUnit, sizeTimeTimeUnit, sizeTimeSizeUnit); reportData = ReportGenerationFacade.generateReport(type, format, units, startTime, endTime); } catch (ReportGenerationArgumentException e) { throw new ArgumentException(e.getMessage()); } catch (ReportGenerationException e) { throw Exceptions.toUndeclared(e); } if (reportFilename != null) { try { Files.write(reportData, new File(reportFilename), Charsets.UTF_8); } catch (IOException e) { throw Exceptions.toUndeclared(e); } } else { System.out.println(reportData); } }
From source file:com.streamsets.datacollector.main.RuntimeInfo.java
protected String getSdcId(String dir) { File dataDir = new File(dir); if (!dataDir.exists()) { if (!dataDir.mkdirs()) { throw new RuntimeException(Utils.format("Could not create data directory '{}'", dataDir)); }/* w w w . j ava2 s. co m*/ } File idFile = new File(dataDir, "sdc.id"); if (!idFile.exists()) { try { Files.write(UUID.randomUUID().toString(), idFile, StandardCharsets.UTF_8); } catch (IOException ex) { throw new RuntimeException( Utils.format("Could not create SDC ID file '{}': {}", idFile, ex.toString(), ex)); } } try { return Files.readFirstLine(idFile, StandardCharsets.UTF_8).trim(); } catch (IOException ex) { throw new RuntimeException( Utils.format("Could not read SDC ID file '{}': {}", idFile, ex.toString(), ex)); } }