Example usage for com.google.common.io Files append

List of usage examples for com.google.common.io Files append

Introduction

In this page you can find the example usage for com.google.common.io Files append.

Prototype

public static void append(CharSequence from, File to, Charset charset) throws IOException 

Source Link

Usage

From source file:gtl.spark.java.example.apache.streaming.JavaWordBlacklist.java

private static JavaStreamingContext createContext(String ip, int port, String checkpointDirectory,
        String outputPath) {/*from  w  w w .  ja v a 2  s .co  m*/

    // If you do not see this printed, that means the StreamingContext has been loaded
    // from the new checkpoint
    System.out.println("Creating new context");
    File outputFile = new File(outputPath);
    if (outputFile.exists()) {
        outputFile.delete();
    }
    SparkConf sparkConf = new SparkConf().setAppName("JavaRecoverableNetworkWordCount");
    // Create the context with a 1 second batch size
    JavaStreamingContext ssc = new JavaStreamingContext(sparkConf, Durations.seconds(1));
    ssc.checkpoint(checkpointDirectory);

    // Create a socket stream on target ip:port and count the
    // words in input stream of \n delimited text (eg. generated by 'nc')
    JavaReceiverInputDStream<String> lines = ssc.socketTextStream(ip, port);
    JavaDStream<String> words = lines.flatMap(x -> Arrays.asList(SPACE.split(x)).iterator());
    JavaPairDStream<String, Integer> wordCounts = words.mapToPair(s -> new Tuple2<>(s, 1))
            .reduceByKey((i1, i2) -> i1 + i2);

    wordCounts.foreachRDD((rdd, time) -> {
        // Get or register the blacklist Broadcast
        Broadcast<List<String>> blacklist = JavaWordBlacklist.getInstance(new JavaSparkContext(rdd.context()));
        // Get or register the droppedWordsCounter Accumulator
        LongAccumulator droppedWordsCounter = JavaDroppedWordsCounter
                .getInstance(new JavaSparkContext(rdd.context()));
        // Use blacklist to drop words and use droppedWordsCounter to count them
        String counts = rdd.filter(wordCount -> {
            if (blacklist.value().contains(wordCount._1())) {
                droppedWordsCounter.add(wordCount._2());
                return false;
            } else {
                return true;
            }
        }).collect().toString();
        String output = "Counts at time " + time + " " + counts;
        System.out.println(output);
        System.out.println("Dropped " + droppedWordsCounter.value() + " word(s) totally");
        System.out.println("Appending to " + outputFile.getAbsolutePath());
        Files.append(output + "\n", outputFile, Charset.defaultCharset());
    });

    return ssc;
}

From source file:com.github.rinde.gpem17.eval.ResultWriter.java

void writeFinal(ExperimentResults results) {
    final Multimap<MASConfiguration, SimulationResult> groupedResults = LinkedHashMultimap.create();
    for (final SimulationResult sr : results.sortedResults()) {
        groupedResults.put(sr.getSimArgs().getMasConfig(), sr);
    }/*from www  . jav a 2s  . c o  m*/

    if (minimizeIO) {
        StringBuilder sb = new StringBuilder("name,").append(createHeader().toString());
        for (final MASConfiguration config : groupedResults.keySet()) {
            final Collection<SimulationResult> group = groupedResults.get(config);
            for (final SimulationResult sr : group) {
                sb.append(config.getName()).append(",");
                appendTo(sr, sb);
            }
        }

        final File combinedResult = new File(experimentDirectory, "combined-final.csv");
        try {
            Files.createParentDirs(combinedResult);
            Files.append(sb, combinedResult, Charsets.UTF_8);
        } catch (final IOException e1) {
            throw new IllegalStateException(e1);
        }
    } else {

        for (final MASConfiguration config : groupedResults.keySet()) {
            final Collection<SimulationResult> group = groupedResults.get(config);

            final File configResult = new File(experimentDirectory, config.getName() + "-final.csv");

            // deletes the file in case it already exists
            configResult.delete();

            StringBuilder sb = createHeader();
            for (final SimulationResult sr : group) {
                appendTo(sr, sb);
            }

            try {
                Files.createParentDirs(configResult);
                Files.append(sb, configResult, Charsets.UTF_8);
            } catch (final IOException e1) {
                throw new IllegalStateException(e1);
            }

        }
    }
}

From source file:org.freeeed.mr.MetadataWriter.java

private void appendMetadata(String string) throws IOException {
    Files.append(string + ParameterProcessing.NL, metadataFile, Charset.defaultCharset());
}

From source file:org.opendaylight.controller.config.persist.storage.file.FileStorageAdapter.java

private void persistLastConfig(ConfigSnapshotHolder holder) throws IOException {
    Files.append(SEPARATOR_SL, storage, ENCODING);
    String snapshotAsString = holder.getConfigSnapshot();
    Files.append(newLine(snapshotAsString), storage, ENCODING);
    Files.append(SEPARATOR_M, storage, ENCODING);
    Files.append(toStringCaps(holder.getCapabilities()), storage, ENCODING);
    Files.append(SEPARATOR_E, storage, ENCODING);
}

From source file:com.github.rinde.gpem17.eval.VanLonHolvoetResultWriter.java

@Override
void appendSimResult(SimulationResult sr, File destFile) {
    try {/*from ww w  . j a v  a 2 s .com*/
        String line = appendTo(sr, new StringBuilder()).toString();
        Files.append(line, destFile, Charsets.UTF_8);
    } catch (final IOException e) {
        throw new IllegalStateException(e);
    }
}

From source file:com.hyperiongray.rcmp.ReportExtractor.java

private void writeKeyFile() throws IOException {
    logger.info("Writing the key file: {}", getOutputKeyFile());
    new File(getOutputKeyFile()).delete();
    Files.append(flatten(key_fileColumns, separator), new File(getOutputKeyFile()), Charset.defaultCharset());
    Map<String, KeyEntry> keyTable = KeyTable.getInstance().getKeyTable();
    Iterator<String> iter = keyTable.keySet().iterator();
    String[] values = new String[5];
    while (iter.hasNext()) {
        KeyEntry entry = keyTable.get(iter.next());
        values[0] = entry.getPersonName();
        values[1] = entry.getTicketNumber();
        values[2] = entry.getOfficerName();
        values[3] = entry.getOffenderName();
        values[4] = entry.getHashKey();//ww  w  .  j  a  v  a2s  .  com
        Files.append(flatten((String[]) values, separator), new File(getOutputKeyFile()),
                Charset.defaultCharset());
    }
}

From source file:com.swissbit.accesscontrol.AccessControl.java

/** {@inheritDoc}} */
@Override/*from  w w  w . ja  va2  s .  co m*/
protected void doPost(final CloudletTopic reqTopic, final KuraRequestPayload reqPayload,
        final KuraResponsePayload respPayload) throws KuraException {

    final String secureElementId = (String) reqPayload.getMetric("secure_element");
    final String encryptedString = String.valueOf(reqPayload.getMetric("encVal"));
    final List<String> list = this.m_assdCommunication.decrypt(encryptedString);

    String decryptedString = null;

    if (list != null) {
        decryptedString = list.get(1);
    }

    if (decryptedString != null) {

        this.m_activityLogService.saveLog("Saving New Permissions..");
        try {
            Files.append(secureElementId + System.lineSeparator(), new File(ALL_CLIENTS_FILE_LOCATION),
                    Charsets.UTF_8);
        } catch (final IOException e) {
            LOGGER.error(Throwables.getStackTraceAsString(e));
        }
    }
}

From source file:org.primefaces.extensions.optimizerplugin.ClosureCompilerOptimizer.java

@Override
public void optimize(final ResourcesSetAdapter rsa, final Log log) throws MojoExecutionException {
    CompilationLevel compLevel = rsa.getCompilationLevel();
    CompilerOptions options = new CompilerOptions();
    compLevel.setOptionsForCompilationLevel(options);

    WarningLevel warnLevel = rsa.getWarningLevel();
    warnLevel.setOptionsForWarningLevel(options);
    com.google.javascript.jscomp.Compiler.setLoggingLevel(Level.WARNING);

    try {//from   w  w w .j av a 2 s.com
        Charset cset = Charset.forName(rsa.getEncoding());

        if (rsa.getAggregation() == null) {
            // no aggregation
            for (File file : rsa.getFiles()) {
                log.info("Optimize JS file " + file.getName() + " ...");
                addToOriginalSize(file);

                JSSourceFile jsSourceFile = JSSourceFile.fromFile(file, cset);
                List<JSSourceFile> interns = new ArrayList<JSSourceFile>();
                interns.add(jsSourceFile);

                // compile
                Compiler compiler = compile(log, interns, options, rsa.isFailOnWarning());

                // generate output
                String path = file.getCanonicalPath();
                if (StringUtils.isNotBlank(rsa.getSuffix())) {
                    // write compiled content into the new file
                    File outputFile = getFileWithSuffix(path, rsa.getSuffix());
                    Files.write(compiler.toSource(), outputFile, cset);

                    // statistic
                    addToOptimizedSize(outputFile);
                } else {
                    // path of temp. file
                    String pathOptimized = FileUtils.removeExtension(path) + OPTIMIZED_FILE_EXTENSION;

                    // create a new temp. file
                    File outputFile = new File(pathOptimized);
                    Files.touch(outputFile);

                    // write compiled content into the new file and rename it (overwrite the original file)
                    Files.write(compiler.toSource(), outputFile, cset);
                    FileUtils.rename(outputFile, file);

                    // statistic
                    addToOptimizedSize(file);
                }
            }
        } else if (rsa.getAggregation().getOutputFile() != null) {
            // aggregation to one output file
            File outputFile;

            if (!rsa.getAggregation().isWithoutCompress()) {
                // with compressing before aggregation
                List<JSSourceFile> interns = new ArrayList<JSSourceFile>();
                for (File file : rsa.getFiles()) {
                    log.info("Optimize JS file " + file.getName() + " ...");
                    addToOriginalSize(file);

                    interns.add(JSSourceFile.fromFile(file, cset));
                }

                // compile
                Compiler compiler = compile(log, interns, options, rsa.isFailOnWarning());

                int filesCount = rsa.getFiles().size();
                if (rsa.getAggregation().getPrependedFile() != null) {
                    filesCount++;
                }

                if (filesCount > 1) {
                    log.info("Aggregation is running ...");
                }

                // get right output file
                outputFile = getOutputFile(rsa);

                long sizeBefore = outputFile.length();

                if (rsa.getAggregation().getPrependedFile() != null) {
                    // write / append to be prepended file into / to the output file
                    prependFile(rsa.getAggregation().getPrependedFile(), outputFile, cset, rsa.getEncoding());
                }

                // write / append compiled content into / to the output file
                Files.append(compiler.toSource(), outputFile, cset);

                // statistic
                addToOptimizedSize(outputFile.length() - sizeBefore);

                if (filesCount > 1) {
                    log.info(filesCount + " files were successfully aggregated.");
                }
            } else {
                // only aggregation without compressing
                outputFile = aggregateFiles(rsa, cset, log);
            }

            // delete single files if necessary
            deleteFilesIfNecessary(rsa, log);

            // rename aggregated file if necessary
            renameOutputFileIfNecessary(rsa, outputFile);
        } else {
            // should not happen
            log.error("Wrong plugin's internal state.");
        }
    } catch (Exception e) {
        throw new MojoExecutionException("Resources optimization failure: " + e.getLocalizedMessage(), e);
    }
}

From source file:org.primefaces.extensions.optimizerplugin.YuiCompressorOptimizer.java

@Override
public void optimize(final ResourcesSetAdapter rsa, final Log log) throws MojoExecutionException {
    InputStreamReader in = null;/*from ww  w. j  a  v  a2  s . co m*/
    OutputStreamWriter out = null;

    try {
        if (rsa.getAggregation() == null) {
            // no aggregation
            for (File file : rsa.getFiles()) {
                log.info("Optimize CSS file " + file.getName() + " ...");
                addToOriginalSize(file);

                in = new InputStreamReader(new FileInputStream(file), rsa.getEncoding());

                // generate output
                String path = file.getCanonicalPath();
                if (StringUtils.isNotBlank(rsa.getSuffix())) {
                    // create a new output stream
                    File outputFile = getFileWithSuffix(path, rsa.getSuffix());
                    out = new OutputStreamWriter(new FileOutputStream(outputFile), rsa.getEncoding());

                    // compress and write compressed content into the new file
                    CssCompressor compressor = new CssCompressor(in);
                    compressor.compress(out, 500);
                    closeStreams(in, out);

                    // statistic
                    addToOptimizedSize(outputFile);
                } else {
                    // path of temp. file
                    String pathOptimized = FileUtils.removeExtension(path) + OPTIMIZED_FILE_EXTENSION;

                    // create a new temp. file and output stream
                    File outputFile = new File(pathOptimized);
                    Files.touch(outputFile);
                    out = new OutputStreamWriter(new FileOutputStream(outputFile), rsa.getEncoding());

                    // compress and write compressed content into the new file
                    CssCompressor compressor = new CssCompressor(in);
                    compressor.compress(out, 500);
                    closeStreams(in, out);

                    // rename the new file (overwrite the original file)
                    FileUtils.rename(outputFile, file);

                    // statistic
                    addToOptimizedSize(file);
                }
            }
        } else if (rsa.getAggregation().getOutputFile() != null) {
            // aggregation to one output file
            File outputFile;
            Charset cset = Charset.forName(rsa.getEncoding());

            if (!rsa.getAggregation().isWithoutCompress()) {
                ByteArrayOutputStream baos = new ByteArrayOutputStream();
                OutputStreamWriter osw = new OutputStreamWriter(baos, rsa.getEncoding());

                // with compressing before aggregation
                for (File file : rsa.getFiles()) {
                    log.info("Optimize CSS file " + file.getName() + " ...");
                    addToOriginalSize(file);

                    // create input stream for the current file
                    in = new InputStreamReader(new FileInputStream(file), rsa.getEncoding());

                    // compress and write compressed content into the output stream
                    CssCompressor compressor = new CssCompressor(in);
                    compressor.compress(osw, 500);

                    // close stream
                    IOUtil.close(in);
                }

                // close stream
                IOUtil.close(osw);

                int filesCount = rsa.getFiles().size();
                if (rsa.getAggregation().getPrependedFile() != null) {
                    filesCount++;
                }

                if (filesCount > 1) {
                    log.info("Aggregation is running ...");
                }

                // get right output file
                outputFile = getOutputFile(rsa);

                long sizeBefore = outputFile.length();

                if (rsa.getAggregation().getPrependedFile() != null) {
                    // write / append to be prepended file into / to the output file
                    prependFile(rsa.getAggregation().getPrependedFile(), outputFile, cset, rsa.getEncoding());
                }

                // write / append compiled content into / to the output file
                Files.append(baos.toString(rsa.getEncoding()), outputFile, cset);

                // statistic
                addToOptimizedSize(outputFile.length() - sizeBefore);

                if (filesCount > 1) {
                    log.info(filesCount + " files were successfully aggregated.");
                }
            } else {
                // only aggregation without compressing
                outputFile = aggregateFiles(rsa, cset, log);
            }

            // delete single files if necessary
            deleteFilesIfNecessary(rsa, log);

            // rename aggregated file if necessary
            renameOutputFileIfNecessary(rsa, outputFile);
        } else {
            // should not happen
            log.error("Wrong plugin's internal state.");
        }
    } catch (Exception e) {
        throw new MojoExecutionException("Resources optimization failure: " + e.getLocalizedMessage(), e);
    } finally {
        closeStreams(in, out);
    }
}

From source file:org.shaf.core.util.FileSystemRepository.java

/**
 * Appends a message to the repository log-file.
 * //  w w w . j  a  v  a 2s. co m
 * @param message
 *            the message to append.
 * @param withTimestamp
 *            the {@code true} forces to put a time stamp before the message
 *            and {@code false} otherwise.
 */
private final void record(final String message, final boolean withTimestamp) {
    if (this.isAllowRecord) {
        File dist = new File(this.path.toFile(), this.path.getFileName() + ".reg");

        if (dist.exists()) {
            String line = ((withTimestamp) ? TimeUtils.formatTime(System.currentTimeMillis()) + " " : "")
                    + message + System.lineSeparator();

            try {
                Files.append(line, dist, Charset.defaultCharset());
            } catch (IOException exc) {
                LOG.error("Failed to record the repository action: \"" + line + "\" to the file: " + dist, exc);
            }
        }
    }
}