Example usage for com.google.common.io Files asCharSink

List of usage examples for com.google.common.io Files asCharSink

Introduction

In this page you can find the example usage for com.google.common.io Files asCharSink.

Prototype

public static CharSink asCharSink(File to, Charset charset) 

Source Link

Usage

From source file:org.glowroot.agent.plugin.kafka.KafkaWrapper.java

private static void downloadAndExtract(File baseDir, File kafkaDir) throws IOException {
    // using System.out to make sure user sees why there is a big delay here
    System.out.print("Downloading Kafka " + KAFKA_VERSION + "...");
    URL url = new URL("https://archive.apache.org/dist/kafka/" + KAFKA_VERSION + "/kafka_" + SCALA_VERSION + "-"
            + KAFKA_VERSION + ".tgz");
    InputStream in = url.openStream();
    File archiveFile = File.createTempFile("kafka_" + SCALA_VERSION + "-" + KAFKA_VERSION + "-", ".tar.gz");
    Files.asByteSink(archiveFile).writeFrom(in);
    in.close();/*from w ww .j a va  2  s . c  o  m*/
    Archiver archiver = ArchiverFactory.createArchiver(ArchiveFormat.TAR, CompressionType.GZIP);
    archiver.extract(archiveFile, baseDir);
    archiveFile.delete();
    System.out.println(" OK");

    File configDir = new File(kafkaDir, "config");
    // put zookeeper state under target directory so it can be cleaned up, otherwise it
    // maintains kafka connection info and can cause subsequent runs to fail with "Error while
    // creating ephemeral at /brokers/ids/0, node already exists and owner does not match
    // current session"
    File zookeeperPropertiesFile = new File(configDir, "zookeeper.properties");
    String contents = Files.asCharSource(zookeeperPropertiesFile, UTF_8).read();
    contents = contents.replace("dataDir=/tmp/zookeeper", "dataDir=../../target/zookeeper");
    Files.asCharSink(zookeeperPropertiesFile, UTF_8).write(contents);
}

From source file:org.glowroot.tests.CassandraWrapper.java

private static void downloadAndExtract(File baseDir) throws IOException {
    // using System.out to make sure user sees why there is a big delay here
    System.out.print("Downloading Cassandra " + CASSANDRA_VERSION + "...");
    URL url = new URL("http://www-us.apache.org/dist/cassandra/" + CASSANDRA_VERSION + "/apache-cassandra-"
            + CASSANDRA_VERSION + "-bin.tar.gz");
    InputStream in = url.openStream();
    File archiveFile = File.createTempFile("cassandra-" + CASSANDRA_VERSION + "-", ".tar.gz");
    Files.asByteSink(archiveFile).writeFrom(in);
    in.close();/*from ww w . j  ava  2s  .c o m*/
    Archiver archiver = ArchiverFactory.createArchiver(ArchiveFormat.TAR, CompressionType.GZIP);
    archiver.extract(archiveFile, baseDir);
    archiveFile.delete();
    System.out.println(" OK");

    File cassandraDir = new File(baseDir, "apache-cassandra-" + CASSANDRA_VERSION);
    File confDir = new File(cassandraDir, "conf");
    // reduce logging to stdout
    File logbackXmlFile = new File(confDir, "logback.xml");
    String xml = Files.asCharSource(logbackXmlFile, UTF_8).read();
    xml = xml.replace("<root level=\"INFO\">", "<root level=\"ERROR\">");
    xml = xml.replace("<logger name=\"org.apache.cassandra\" level=\"DEBUG\"/>", "");
    Files.asCharSink(logbackXmlFile, UTF_8).write(xml);
    // long timeouts needed on slow travis ci machines
    File yamlFile = new File(confDir, "cassandra.yaml");
    String yaml = Files.asCharSource(yamlFile, UTF_8).read();
    yaml = yaml.replaceAll("(?m)^read_request_timeout_in_ms: .*$", "read_request_timeout_in_ms: 30000");
    yaml = yaml.replaceAll("(?m)^write_request_timeout_in_ms: .*$", "write_request_timeout_in_ms: 30000");
    Files.asCharSink(yamlFile, UTF_8).write(yaml);
}

From source file:org.glowroot.agent.plugin.elasticsearch.ElasticsearchWrapper.java

private static void downloadAndExtract(File baseDir) throws IOException {
    // using System.out to make sure user sees why there is a big delay here
    System.out.print("Downloading Elasticsearch " + ELASTICSEARCH_VERSION + "...");
    URL url;//from ww w  .  j a  v a  2 s. c o m
    if (ELASTICSEARCH_VERSION.startsWith("5.") || ELASTICSEARCH_VERSION.startsWith("6.")) {
        url = new URL("https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-"
                + ELASTICSEARCH_VERSION + ".tar.gz");
    } else if (ELASTICSEARCH_VERSION.startsWith("2.")) {
        url = new URL("https://download.elastic.co/elasticsearch/release/org/elasticsearch"
                + "/distribution/tar/elasticsearch/" + ELASTICSEARCH_VERSION + "/elasticsearch-"
                + ELASTICSEARCH_VERSION + ".tar.gz");
    } else {
        throw new IllegalStateException("Unexpected Elasticsearch version: " + ELASTICSEARCH_VERSION);
    }
    InputStream in = url.openStream();
    File archiveFile = File.createTempFile("elasticsearch-" + ELASTICSEARCH_VERSION + "-", ".tar.gz");
    Files.asByteSink(archiveFile).writeFrom(in);
    in.close();
    Archiver archiver = ArchiverFactory.createArchiver(ArchiveFormat.TAR, CompressionType.GZIP);
    archiver.extract(archiveFile, baseDir);
    archiveFile.delete();
    System.out.println(" OK");

    File elasticsearchDir = new File(baseDir, "elasticsearch-" + ELASTICSEARCH_VERSION);
    File configDir = new File(elasticsearchDir, "config");
    // reduce logging to stdout
    if (ELASTICSEARCH_VERSION.startsWith("5.") || ELASTICSEARCH_VERSION.startsWith("6.")) {
        File log4j2PropertiesFile = new File(configDir, "log4j2.properties");
        String contents = Files.asCharSource(log4j2PropertiesFile, UTF_8).read();
        contents = contents.replace("rootLogger.level = info", "rootLogger.level = warn");
        Files.asCharSink(log4j2PropertiesFile, UTF_8).write(contents);
    } else if (ELASTICSEARCH_VERSION.startsWith("2.")) {
        File loggingYamlFile = new File(configDir, "logging.yml");
        String contents = Files.asCharSource(loggingYamlFile, UTF_8).read();
        contents = contents.replace("es.logger.level: INFO", "es.logger.level: WARN");
        contents = contents.replace("action: DEBUG", "action: INFO");
        Files.asCharSink(loggingYamlFile, UTF_8).write(contents);
    } else {
        throw new IllegalStateException("Unexpected Elasticsearch version: " + ELASTICSEARCH_VERSION);
    }
}

From source file:org.anarres.dblx.core.preset.PresetManager.java

public void write() throws IOException {
    List<String> lines = new ArrayList<>();
    for (Preset preset : presets)
        lines.add(preset.serialize());/*w  w w  . j  a v a 2 s . c o m*/
    File file = new File(FILENAME);
    Files.asCharSink(file, StandardCharsets.UTF_8).writeLines(lines);
}

From source file:org.glowroot.agent.plugin.cassandra.CassandraWrapper.java

private static void downloadAndExtract(File baseDir) throws IOException {
    // using System.out to make sure user sees why there is a big delay here
    System.out.print("Downloading Cassandra " + CASSANDRA_VERSION + "...");
    URL url = new URL("http://www-us.apache.org/dist/cassandra/" + CASSANDRA_VERSION + "/apache-cassandra-"
            + CASSANDRA_VERSION + "-bin.tar.gz");
    InputStream in = url.openStream();
    File archiveFile = File.createTempFile("cassandra-" + CASSANDRA_VERSION + "-", ".tar.gz");
    Files.asByteSink(archiveFile).writeFrom(in);
    in.close();/*from  ww  w  .  j a  va 2s .  com*/
    Archiver archiver = ArchiverFactory.createArchiver(ArchiveFormat.TAR, CompressionType.GZIP);
    archiver.extract(archiveFile, baseDir);
    archiveFile.delete();
    System.out.println(" OK");

    File cassandraDir = new File(baseDir, "apache-cassandra-" + CASSANDRA_VERSION);
    File confDir = new File(cassandraDir, "conf");
    // reduce logging to stdout
    File logbackXmlFile = new File(confDir, "logback.xml");
    String xml = Files.toString(logbackXmlFile, UTF_8);
    xml = xml.replace("<root level=\"INFO\">", "<root level=\"ERROR\">");
    xml = xml.replace("<logger name=\"org.apache.cassandra\" level=\"DEBUG\"/>", "");
    Files.asCharSink(logbackXmlFile, UTF_8).write(xml);
    // long timeouts needed on slow travis ci machines
    File yamlFile = new File(confDir, "cassandra.yaml");
    String yaml = Files.toString(yamlFile, UTF_8);
    yaml = yaml.replaceAll("(?m)^read_request_timeout_in_ms: .*$", "read_request_timeout_in_ms: 30000");
    yaml = yaml.replaceAll("(?m)^write_request_timeout_in_ms: .*$", "write_request_timeout_in_ms: 30000");
    Files.asCharSink(yamlFile, UTF_8).write(yaml);
}

From source file:uk.ac.ebi.fg.annotare2.magetabcheck.CheckListGenerator.java

private void generateMarkdown(final File out) throws IOException {
    if (!out.exists()) {
        File parent = out.getParentFile();
        if (!parent.exists() && !parent.mkdirs()) {
            throw new IllegalStateException("Can't create directories " + parent);
        }//from  w  w w. java  2s.  c  om
    }
    CharSink sink = Files.asCharSink(out, Charsets.UTF_8);
    sink.write(markdown());
}

From source file:org.gradle.internal.operations.trace.BuildOperationTrace.java

private void writeDetailTree(List<BuildOperationRecord> roots) throws IOException {
    try {/*  w w  w  . j  a v a2 s.  c  o m*/
        String rawJson = JsonOutput.toJson(BuildOperationTree.serialize(roots));
        String prettyJson = JsonOutput.prettyPrint(rawJson);
        Files.asCharSink(file(basePath, "-tree.json"), Charsets.UTF_8).write(prettyJson);
    } catch (OutOfMemoryError e) {
        System.err.println("Failed to write build operation trace JSON due to out of memory.");
    }
}

From source file:com.gradleware.tooling.toolingutils.distribution.PublishedGradleVersions.java

private static void storeCacheVersionFile(String json) {
    //noinspection ResultOfMethodCallIgnored
    CACHE_FILE.getParentFile().mkdirs();

    try {/*from  www .  j a v  a  2 s. c om*/
        CharSource.wrap(json).copyTo(Files.asCharSink(CACHE_FILE, Charsets.UTF_8));
    } catch (IOException e) {
        LOG.error("Cannot write Gradle version information cache file.", e);
        // do not throw an exception if cache file cannot be written to be more robust against file system problems
    }
}

From source file:org.gradle.internal.operations.trace.BuildOperationTrace.java

private void writeSummaryTree(final List<BuildOperationRecord> roots) throws IOException {
    Files.asCharSink(file(basePath, "-tree.txt"), Charsets.UTF_8).writeLines(new Iterable<String>() {
        @Override/*from w ww  . jav a2  s.c  o m*/
        @Nonnull
        public Iterator<String> iterator() {

            final Deque<Queue<BuildOperationRecord>> stack = new ArrayDeque<Queue<BuildOperationRecord>>(
                    Collections.singleton(new ArrayDeque<BuildOperationRecord>(roots)));
            final StringBuilder stringBuilder = new StringBuilder();

            return new Iterator<String>() {
                @Override
                public boolean hasNext() {
                    if (stack.isEmpty()) {
                        return false;
                    } else if (stack.peek().isEmpty()) {
                        stack.pop();
                        return hasNext();
                    } else {
                        return true;
                    }
                }

                @Override
                public String next() {
                    Queue<BuildOperationRecord> children = stack.peek();
                    BuildOperationRecord record = children.poll();

                    stringBuilder.setLength(0);

                    int indents = stack.size() - 1;

                    for (int i = 0; i < indents; ++i) {
                        stringBuilder.append("  ");
                    }

                    if (!record.children.isEmpty()) {
                        stack.addFirst(new ArrayDeque<BuildOperationRecord>(record.children));
                    }

                    stringBuilder.append(record.displayName);

                    if (record.details != null) {
                        stringBuilder.append(" ");
                        stringBuilder.append(JsonOutput.toJson(record.details));
                    }

                    if (record.result != null) {
                        stringBuilder.append(" ");
                        stringBuilder.append(JsonOutput.toJson(record.result));
                    }

                    stringBuilder.append(" [");
                    stringBuilder.append(record.endTime - record.startTime);
                    stringBuilder.append("ms]");

                    stringBuilder.append(" (");
                    stringBuilder.append(record.id);
                    stringBuilder.append(")");

                    if (!record.progress.isEmpty()) {
                        for (BuildOperationRecord.Progress progress : record.progress) {
                            stringBuilder.append(StandardSystemProperty.LINE_SEPARATOR.value());
                            for (int i = 0; i < indents; ++i) {
                                stringBuilder.append("  ");
                            }
                            stringBuilder.append("- ").append(progress.details).append(" [")
                                    .append(progress.time - record.startTime).append("]");
                        }
                    }

                    return stringBuilder.toString();
                }

                @Override
                public void remove() {
                    throw new UnsupportedOperationException();
                }
            };
        }
    });
}

From source file:com.tasktop.koans.PathToEnlightment.java

private void storeFailures() {
    List<String> failures = getLastFailures();
    failures.add(firstFailure.getDescription().getDisplayName());
    try {//from   ww w.ja  v a 2  s . c o  m
        File file = getTmpFile();
        CharSink sink = Files.asCharSink(file, Charsets.UTF_8);
        sink.writeLines(Lists.reverse(Lists.reverse(failures).stream().limit(10).collect(Collectors.toList())),
                "\n");
    } catch (IOException shouldNotHappen) {
        throw new IllegalStateException(shouldNotHappen);
    }
}