Example usage for com.google.common.io Closeables close

List of usage examples for com.google.common.io Closeables close

Introduction

In this page you can find the example usage for com.google.common.io Closeables close.

Prototype

public static void close(@Nullable Closeable closeable, boolean swallowIOException) throws IOException 

Source Link

Document

Closes a Closeable , with control over whether an IOException may be thrown.

Usage

From source file:com.google.jenkins.plugins.storage.StdoutUpload.java

/**
 * {@inheritDoc}//from  w  w  w  .  ja va2  s.  c om
 */
@Override
@Nullable
protected UploadSpec getInclusions(AbstractBuild<?, ?> build, FilePath workspace, TaskListener listener)
        throws UploadException {
    try {
        OutputStream outputStream = null;
        try {
            FilePath logDir = new FilePath(build.getLogFile()).getParent();
            FilePath logFile = new FilePath(logDir, getLogName());

            outputStream = new PlainTextConsoleOutputStream(logFile.write());
            copy(build.getLogInputStream(), outputStream);

            return new UploadSpec(logDir, ImmutableList.of(logFile));
        } finally {
            Closeables.close(outputStream, true /* swallowIOException */);
        }
    } catch (InterruptedException e) {
        throw new UploadException(Messages.AbstractUpload_IncludeException(), e);
    } catch (IOException e) {
        throw new UploadException(Messages.AbstractUpload_IncludeException(), e);
    }
}

From source file:org.jclouds.examples.rackspace.cloudblockstorage.ListVolumeAttachments.java

/**
 * Always close your service when you're done with it.
 */
public void close() throws IOException {
    Closeables.close(nova, true);
}

From source file:net.oneandone.troilus.EmbeddedCassandra.java

@SuppressWarnings("unchecked")
protected static int prepare() throws IOException {

    String cassandraDirName = "target" + File.separator + "cassandra-junit-" + new Random().nextInt(1000000);

    File cassandraDir = new File(cassandraDirName);
    cassandraDir.mkdirs();/*from ww w  . j a v a  2  s  .  c  o m*/

    InputStream cassandraConfigurationInput = null;
    Writer cassandraConfigurationOutput = null;

    try {
        ClassLoader loader = Thread.currentThread().getContextClassLoader();
        cassandraConfigurationInput = loader.getResourceAsStream(CASSANDRA_YAML_FILE);

        Yaml yaml = new Yaml();
        Map<String, Object> cassandraConfiguration = (Map<String, Object>) yaml
                .load(cassandraConfigurationInput);

        int rpcPort = findUnusedLocalPort();
        if (rpcPort == -1) {
            throw new RuntimeException("Can not start embedded cassandra: no unused local port found!");
        }
        cassandraConfiguration.put("rpc_port", rpcPort);

        int storagePort = findUnusedLocalPort();
        if (storagePort == -1) {
            throw new RuntimeException("Can not start embedded cassandra: no unused local port found!");
        }
        cassandraConfiguration.put("storage_port", storagePort);

        int nativeTransportPort = findUnusedLocalPort();
        if (nativeTransportPort == -1) {
            throw new RuntimeException("Can not start embedded cassandra: no unused local port found!");
        }
        cassandraConfiguration.put("native_transport_port", nativeTransportPort);

        cassandraConfiguration.put("start_native_transport", "true");

        cassandraConfigurationOutput = new OutputStreamWriter(
                new FileOutputStream(cassandraDirName + File.separator + CASSANDRA_YAML_FILE), Charsets.UTF_8);

        yaml.dump(cassandraConfiguration, cassandraConfigurationOutput);

        System.setProperty("cassandra.config",
                new File(cassandraDirName, CASSANDRA_YAML_FILE).toURI().toString());
        System.setProperty("cassandra-foreground", "true");

        DatabaseDescriptor.createAllDirectories();

        return nativeTransportPort;

    } finally {
        Closeables.closeQuietly(cassandraConfigurationInput);
        Closeables.close(cassandraConfigurationOutput, true);
    }
}

From source file:org.jclouds.examples.rackspace.autoscale.ExecuteWebhook.java

/**
 * Always close your service when you're done with it.
 *
 * Note that closing quietly like this is not necessary in Java 7.
 * You would use try-with-resources in the main method instead.
 *//*from   w w  w  .  j a  v  a 2  s .  co  m*/
public void close() throws IOException {
    Closeables.close(autoscaleApi, true);
}

From source file:ch.ledcom.maven.sitespeed.report.ResourceFiles.java

private void export(File resource, File targetDir) throws IOException {
    File target = new File(targetDir, resource.getPath());
    if (!target.getParentFile().exists()) {
        target.getParentFile().mkdirs();
    }/* www . j a va 2 s. c o m*/
    InputStream in = null;
    OutputStream out = null;
    boolean threw = true;
    try {
        in = this.getClass().getClassLoader().getResourceAsStream(BASE_DIR + "/" + resource.getPath());
        out = new FileOutputStream(target);
        IOUtil.copy(in, out);
        threw = false;
    } finally {
        Closeables.close(in, true);
        Closeables.close(out, threw);
    }
}

From source file:com.ml.ira.algos.LogisticModelParameters.java

/**
 * Saves a model to an output stream.//  w  ww . j  a  va  2 s.  co  m
 */
public void saveTo(Path path) throws IOException {
    Closeables.close(lr, false);
    targetCategories = getCsvRecordFactory().getTargetCategories();
    FileSystem ofs = path.getFileSystem(new Configuration());
    FSDataOutputStream out = ofs.create(path, true);
    write(out);
    out.flush();
    ofs.close();
}

From source file:com.netflix.exhibitor.core.config.ConfigManager.java

@Override
public void close() throws IOException {
    repeatingActivity.close();
    Closeables.close(provider, true);
}

From source file:com.b2international.snowowl.snomed.datastore.index.interest.SearchProfilePreferences.java

public void exportToXml(@Nonnull final String xmlPath) throws IOException {

    FileOutputStream outputStream = null;

    try {//w  w w.ja v  a  2  s.  c om
        outputStream = new FileOutputStream(xmlPath);
        final XStream xStream = new XStream(new Xpp3Driver());
        xStream.processAnnotations(SearchProfilePreferences.class);
        xStream.toXML(this, outputStream);
    } finally {
        Closeables.close(outputStream, true);
    }
}

From source file:org.apache.mahout.math.hadoop.MatrixColumnMeansJob.java

/**
 * Job for calculating column-wise mean of a DistributedRowMatrix
 *
 * @param initialConf//from   w w  w.ja v a  2s .  c om
 * @param inputPath
 *          path to DistributedRowMatrix input
 * @param outputVectorTmpPath
 *          path for temporary files created during job
 * @param vectorClass
 *          String of desired class for returned vector e.g. DenseVector,
 *          RandomAccessSparseVector (may be null for {@link DenseVector} )
 * @return Vector containing column-wise mean of DistributedRowMatrix
 */
public static Vector run(Configuration initialConf, Path inputPath, Path outputVectorTmpPath,
        String vectorClass) throws IOException {

    try {
        initialConf.set(VECTOR_CLASS, vectorClass == null ? DenseVector.class.getName() : vectorClass);

        Job job = new Job(initialConf, "MatrixColumnMeansJob");
        job.setJarByClass(MatrixColumnMeansJob.class);

        FileOutputFormat.setOutputPath(job, outputVectorTmpPath);

        outputVectorTmpPath.getFileSystem(job.getConfiguration()).delete(outputVectorTmpPath, true);
        job.setNumReduceTasks(1);
        FileOutputFormat.setOutputPath(job, outputVectorTmpPath);
        FileInputFormat.addInputPath(job, inputPath);
        job.setInputFormatClass(SequenceFileInputFormat.class);
        job.setOutputFormatClass(SequenceFileOutputFormat.class);
        FileOutputFormat.setOutputPath(job, outputVectorTmpPath);

        job.setMapperClass(MatrixColumnMeansMapper.class);
        job.setReducerClass(MatrixColumnMeansReducer.class);
        job.setMapOutputKeyClass(NullWritable.class);
        job.setMapOutputValueClass(VectorWritable.class);
        job.setOutputKeyClass(IntWritable.class);
        job.setOutputValueClass(VectorWritable.class);
        job.submit();
        job.waitForCompletion(true);

        Path tmpFile = new Path(outputVectorTmpPath, "part-r-00000");
        SequenceFileValueIterator<VectorWritable> iterator = new SequenceFileValueIterator<VectorWritable>(
                tmpFile, true, initialConf);
        try {
            if (iterator.hasNext()) {
                return iterator.next().get();
            } else {
                return (Vector) Class.forName(vectorClass).getConstructor(int.class).newInstance(0);
            }
        } finally {
            Closeables.close(iterator, true);
        }
    } catch (IOException ioe) {
        throw ioe;
    } catch (Throwable thr) {
        throw new IOException(thr);
    }
}

From source file:org.apache.mahout.text.SequenceFilesFromLuceneStorage.java

/**
 * Generates a sequence files from a Lucene index via the given {@link LuceneStorageConfiguration}
 *
 * @param lucene2seqConf configuration bean
 * @throws java.io.IOException if index cannot be opened or sequence file could not be written
 *///from  ww  w . j a va 2 s  .c  o m
public void run(final LuceneStorageConfiguration lucene2seqConf) throws IOException {
    List<Path> indexPaths = lucene2seqConf.getIndexPaths();
    int processedDocs = 0;

    for (Path indexPath : indexPaths) {
        Directory directory = FSDirectory.open(new File(indexPath.toUri().getPath()));
        IndexReader reader = DirectoryReader.open(directory);
        IndexSearcher searcher = new IndexSearcher(reader);

        LuceneIndexHelper.fieldShouldExistInIndex(reader, lucene2seqConf.getIdField());
        for (String field : lucene2seqConf.getFields()) {
            LuceneIndexHelper.fieldShouldExistInIndex(reader, field);
        }

        Configuration configuration = lucene2seqConf.getConfiguration();
        FileSystem fileSystem = FileSystem.get(configuration);
        Path sequenceFilePath = new Path(lucene2seqConf.getSequenceFilesOutputPath(), indexPath.getName());
        final SequenceFile.Writer sequenceFileWriter = new SequenceFile.Writer(fileSystem, configuration,
                sequenceFilePath, Text.class, Text.class);

        SeqFileWriterCollector writerCollector = new SeqFileWriterCollector(lucene2seqConf, sequenceFileWriter,
                processedDocs);
        searcher.search(lucene2seqConf.getQuery(), writerCollector);
        log.info("Wrote " + writerCollector.processedDocs + " documents in " + sequenceFilePath.toUri());
        processedDocs = writerCollector.processedDocs;
        Closeables.close(sequenceFileWriter, false);
        directory.close();
        //searcher.close();
        reader.close();
    }
}