Example usage for com.google.common.io Closeables close

List of usage examples for com.google.common.io Closeables close

Introduction

In this page you can find the example usage for com.google.common.io Closeables close.

Prototype

public static void close(@Nullable Closeable closeable, boolean swallowIOException) throws IOException 

Source Link

Document

Closes a Closeable , with control over whether an IOException may be thrown.

Usage

From source file:org.apache.mahout.math.hadoop.TimesSquaredJob.java

public static Job createTimesSquaredJob(Configuration initialConf, Vector v, int outputVectorDim,
        Path matrixInputPath, Path outputVectorPathBase, Class<? extends TimesSquaredMapper> mapClass,
        Class<? extends VectorSummingReducer> redClass) throws IOException {

    FileSystem fs = FileSystem.get(matrixInputPath.toUri(), initialConf);
    matrixInputPath = fs.makeQualified(matrixInputPath);
    outputVectorPathBase = fs.makeQualified(outputVectorPathBase);

    long now = System.nanoTime();
    Path inputVectorPath = new Path(outputVectorPathBase, INPUT_VECTOR + '/' + now);

    SequenceFile.Writer inputVectorPathWriter = null;

    try {//w  ww  . java  2s. c  om
        inputVectorPathWriter = new SequenceFile.Writer(fs, initialConf, inputVectorPath, NullWritable.class,
                VectorWritable.class);
        inputVectorPathWriter.append(NullWritable.get(), new VectorWritable(v));
    } finally {
        Closeables.close(inputVectorPathWriter, false);
    }

    URI ivpURI = inputVectorPath.toUri();
    DistributedCache.setCacheFiles(new URI[] { ivpURI }, initialConf);

    Job job = HadoopUtil.prepareJob(matrixInputPath, new Path(outputVectorPathBase, OUTPUT_VECTOR_FILENAME),
            SequenceFileInputFormat.class, mapClass, NullWritable.class, VectorWritable.class, redClass,
            NullWritable.class, VectorWritable.class, SequenceFileOutputFormat.class, initialConf);
    job.setCombinerClass(redClass);
    job.setJobName("TimesSquaredJob: " + matrixInputPath);

    Configuration conf = job.getConfiguration();
    conf.set(INPUT_VECTOR, ivpURI.toString());
    conf.setBoolean(IS_SPARSE_OUTPUT, !v.isDense());
    conf.setInt(OUTPUT_VECTOR_DIMENSION, outputVectorDim);

    return job;
}

From source file:net.derquinse.common.io.DurableFiles.java

/**
 * Private helper method. Writes a character sequence to a file, optionally appending. The file is
 * sync'd before being closed.//  w  w w. j ava  2 s. c om
 * @param from the character sequence to append
 * @param to the destination file
 * @param charset the character set used when writing the file
 * @param append true to append, false to overwrite
 * @throws IOException if an I/O error occurs
 */
private static void write(CharSequence from, File to, Charset charset, boolean append) throws IOException {
    boolean threw = true;
    FileOutputStream os = new FileOutputStream(to, append);
    try {
        OutputStreamWriter w = new OutputStreamWriter(os, charset);
        try {
            w.append(from);
            w.flush();
            os.flush();
            os.getFD().sync();
            threw = false;
        } finally {
            Closeables.close(w, threw);
        }
    } finally {
        Closeables.close(os, threw);
    }
}

From source file:javaapplication3.RunRandomForestSeq.java

private static void testFile(Path inPath, Path outPath, DataConverter converter, DecisionForest forest,
        Dataset dataset, Collection<double[]> results, Random rng, FileSystem outFS, FileSystem dataFS)
        throws IOException {
    // create the predictions file
    FSDataOutputStream ofile = null;/*  www.  j  a  va  2 s . c  o m*/

    if (outPath != null) {
        ofile = outFS.create(outPath);
    }

    FSDataInputStream input = dataFS.open(inPath);
    try {
        Scanner scanner = new Scanner(input, "UTF-8");

        while (scanner.hasNextLine()) {
            String line = scanner.nextLine();
            if (line.isEmpty()) {
                continue; // skip empty lines
            }

            Instance instance = converter.convert(line);
            double prediction = forest.classify(dataset, rng, instance);

            if (ofile != null) {
                ofile.writeChars(Double.toString(prediction)); // write the prediction
                ofile.writeChar('\n');
            }

            results.add(new double[] { dataset.getLabel(instance), prediction });
        }

        scanner.close();
    } finally {
        Closeables.close(input, true);
    }
}

From source file:org.gpfvic.mahout.cf.taste.impl.similarity.precompute.MultithreadedBatchItemSimilarities.java

@Override
public int computeItemSimilarities(int degreeOfParallelism, int maxDurationInHours, SimilarItemsWriter writer)
        throws IOException {

    ExecutorService executorService = Executors.newFixedThreadPool(degreeOfParallelism + 1);

    Output output = null;//  w  w  w.  j a va 2 s.  c  o m
    try {
        writer.open();

        DataModel dataModel = getRecommender().getDataModel();

        BlockingQueue<long[]> itemsIDsInBatches = queueItemIDsInBatches(dataModel, batchSize,
                degreeOfParallelism);
        BlockingQueue<List<SimilarItems>> results = new LinkedBlockingQueue<>();

        AtomicInteger numActiveWorkers = new AtomicInteger(degreeOfParallelism);
        for (int n = 0; n < degreeOfParallelism; n++) {
            executorService.execute(new SimilarItemsWorker(n, itemsIDsInBatches, results, numActiveWorkers));
        }

        output = new Output(results, writer, numActiveWorkers);
        executorService.execute(output);

    } catch (Exception e) {
        throw new IOException(e);
    } finally {
        executorService.shutdown();
        try {
            boolean succeeded = executorService.awaitTermination(maxDurationInHours, TimeUnit.HOURS);
            if (!succeeded) {
                throw new RuntimeException(
                        "Unable to complete the computation in " + maxDurationInHours + " hours!");
            }
        } catch (InterruptedException e) {
            throw new RuntimeException(e);
        }
        Closeables.close(writer, false);
    }

    return output.getNumSimilaritiesProcessed();
}

From source file:com.metamx.druid.index.v1.CompressedLongsIndexedSupplier.java

@Override
public IndexedLongs get() {
    return new IndexedLongs() {
        int currIndex = -1;
        ResourceHolder<LongBuffer> holder;
        LongBuffer buffer;/*from w  ww .j  av a  2 s. c o  m*/

        @Override
        public int size() {
            return totalSize;
        }

        @Override
        public long get(int index) {
            int bufferNum = index / sizePer;
            int bufferIndex = index % sizePer;

            if (bufferNum != currIndex) {
                loadBuffer(bufferNum);
            }

            return buffer.get(buffer.position() + bufferIndex);
        }

        @Override
        public void fill(int index, long[] toFill) {
            if (totalSize - index < toFill.length) {
                throw new IndexOutOfBoundsException(
                        String.format("Cannot fill array of size[%,d] at index[%,d].  Max size[%,d]",
                                toFill.length, index, totalSize));
            }

            int bufferNum = index / sizePer;
            int bufferIndex = index % sizePer;

            int leftToFill = toFill.length;
            while (leftToFill > 0) {
                if (bufferNum != currIndex) {
                    loadBuffer(bufferNum);
                }

                buffer.mark();
                buffer.position(buffer.position() + bufferIndex);
                final int numToGet = Math.min(buffer.remaining(), leftToFill);
                buffer.get(toFill, toFill.length - leftToFill, numToGet);
                buffer.reset();
                leftToFill -= numToGet;
                ++bufferNum;
                bufferIndex = 0;
            }
        }

        private void loadBuffer(int bufferNum) {
            Closeables.closeQuietly(holder);
            holder = baseLongBuffers.get(bufferNum);
            buffer = holder.get();
            currIndex = bufferNum;
        }

        @Override
        public int binarySearch(long key) {
            throw new UnsupportedOperationException();
        }

        @Override
        public int binarySearch(long key, int from, int to) {
            throw new UnsupportedOperationException();
        }

        @Override
        public String toString() {
            return "CompressedLongsIndexedSupplier_Anonymous{" + "currIndex=" + currIndex + ", sizePer="
                    + sizePer + ", numChunks=" + baseLongBuffers.size() + ", totalSize=" + totalSize + '}';
        }

        @Override
        public void close() throws IOException {
            Closeables.close(holder, false);
        }
    };
}

From source file:org.eclipse.jdt.ls.core.internal.ParentProcessWatcher.java

/**
 * Checks whether the parent process is still running.
 * If not, then we assume it has crashed, and we have to terminate the Java Language Server.
 *
 * @return true if the parent process is still running
 *///from   w  ww.j  av a 2s.  c o m
private boolean parentProcessStillRunning() {
    // Wait until parent process id is available
    final long pid = server.getParentProcessId();
    if (pid == 0 || lastActivityTime > (System.currentTimeMillis() - INACTIVITY_DELAY_SECS)) {
        return true;
    }
    String command;
    if (Platform.OS_WIN32.equals(Platform.getOS())) {
        command = "cmd /c \"tasklist /FI \"PID eq " + pid + "\" | findstr " + pid + "\"";
    } else {
        command = "ps -p " + pid;
    }
    Process process = null;
    boolean finished = false;
    try {
        process = Runtime.getRuntime().exec(command);
        finished = process.waitFor(POLL_DELAY_SECS, TimeUnit.SECONDS);
        if (!finished) {
            process.destroy();
            finished = process.waitFor(POLL_DELAY_SECS, TimeUnit.SECONDS); // wait for the process to stop
        }
        if (Platform.OS_WIN32.equals(Platform.getOS()) && finished && process.exitValue() > 1) {
            // the tasklist command should return 0 (parent process exists) or 1 (parent process doesn't exist)
            JavaLanguageServerPlugin
                    .logInfo("The tasklist command: '" + command + "' returns " + process.exitValue());
            return true;
        }
        return !finished || process.exitValue() == 0;
    } catch (IOException | InterruptedException e) {
        JavaLanguageServerPlugin.logException(e.getMessage(), e);
        return true;
    } finally {
        if (process != null) {
            if (!finished) {
                process.destroyForcibly();
            }
            // Terminating or destroying the Process doesn't close the process handle on Windows.
            // It is only closed when the Process object is garbage collected (in its finalize() method).
            // On Windows, when the Java LS is idle, we need to explicitly request a GC,
            // to prevent an accumulation of zombie processes, as finalize() will be called.
            if (Platform.OS_WIN32.equals(Platform.getOS())) {
                // Java >= 9 doesn't close the handle when the process is garbage collected
                // We need to close the opened streams
                if (!isJava1x) {
                    Closeables.closeQuietly(process.getInputStream());
                    Closeables.closeQuietly(process.getErrorStream());
                    try {
                        Closeables.close(process.getOutputStream(), false);
                    } catch (IOException e) {
                    }
                }
                System.gc();
            }
        }
    }
}

From source file:org.apache.mahout.cf.taste.impl.similarity.precompute.MultithreadedBatchItemSimilarities.java

@Override
public int computeItemSimilarities(int degreeOfParallelism, int maxDurationInHours, SimilarItemsWriter writer)
        throws IOException {

    ExecutorService executorService = Executors.newFixedThreadPool(degreeOfParallelism + 1);

    Output output = null;/*www.  j a  v  a  2s .c  om*/
    try {
        writer.open();

        DataModel dataModel = getRecommender().getDataModel();

        BlockingQueue<long[]> itemsIDsInBatches = queueItemIDsInBatches(dataModel, batchSize,
                degreeOfParallelism);
        BlockingQueue<List<SimilarItems>> results = new LinkedBlockingQueue<List<SimilarItems>>();

        AtomicInteger numActiveWorkers = new AtomicInteger(degreeOfParallelism);
        for (int n = 0; n < degreeOfParallelism; n++) {
            executorService.execute(new SimilarItemsWorker(n, itemsIDsInBatches, results, numActiveWorkers));
        }

        output = new Output(results, writer, numActiveWorkers);
        executorService.execute(output);

    } catch (Exception e) {
        throw new IOException(e);
    } finally {
        executorService.shutdown();
        try {
            boolean succeeded = executorService.awaitTermination(maxDurationInHours, TimeUnit.HOURS);
            if (!succeeded) {
                throw new RuntimeException(
                        "Unable to complete the computation in " + maxDurationInHours + " hours!");
            }
        } catch (InterruptedException e) {
            throw new RuntimeException(e);
        }
        Closeables.close(writer, false);
    }

    return output.getNumSimilaritiesProcessed();
}

From source file:org.apache.mahout.utils.vectors.lucene.Driver.java

public void dumpVectors() throws IOException {

    File file = new File(luceneDir);
    Preconditions.checkArgument(file.isDirectory(),
            "Lucene directory: " + file.getAbsolutePath() + " does not exist or is not a directory");
    Preconditions.checkArgument(maxDocs >= 0, "maxDocs must be >= 0");
    Preconditions.checkArgument(minDf >= 1, "minDf must be >= 1");
    Preconditions.checkArgument(maxDFPercent <= 99, "maxDFPercent must be <= 99");

    Directory dir = FSDirectory.open(file);
    IndexReader reader = DirectoryReader.open(dir);

    Weight weight;/*w w  w  .  j  a va 2 s .co m*/
    if ("tf".equalsIgnoreCase(weightType)) {
        weight = new TF();
    } else if ("tfidf".equalsIgnoreCase(weightType)) {
        weight = new TFIDF();
    } else {
        throw new IllegalArgumentException("Weight type " + weightType + " is not supported");
    }

    TermInfo termInfo = new CachedTermInfo(reader, field, minDf, maxDFPercent);

    LuceneIterable iterable;
    if (norm == LuceneIterable.NO_NORMALIZING) {
        iterable = new LuceneIterable(reader, idField, field, termInfo, weight, LuceneIterable.NO_NORMALIZING,
                maxPercentErrorDocs);
    } else {
        iterable = new LuceneIterable(reader, idField, field, termInfo, weight, norm, maxPercentErrorDocs);
    }

    log.info("Output File: {}", outFile);

    VectorWriter vectorWriter = getSeqFileWriter(outFile);
    try {
        long numDocs = vectorWriter.write(iterable, maxDocs);
        log.info("Wrote: {} vectors", numDocs);
    } finally {
        Closeables.close(vectorWriter, false);
    }

    File dictOutFile = new File(dictOut);
    log.info("Dictionary Output file: {}", dictOutFile);
    Writer writer = Files.newWriter(dictOutFile, Charsets.UTF_8);
    DelimitedTermInfoWriter tiWriter = new DelimitedTermInfoWriter(writer, delimiter, field);
    try {
        tiWriter.write(termInfo);
    } finally {
        Closeables.close(tiWriter, false);
    }

    if (!"".equals(seqDictOut)) {
        log.info("SequenceFile Dictionary Output file: {}", seqDictOut);

        Path path = new Path(seqDictOut);
        Configuration conf = new Configuration();
        FileSystem fs = FileSystem.get(conf);
        SequenceFile.Writer seqWriter = null;
        try {
            seqWriter = SequenceFile.createWriter(fs, conf, path, Text.class, IntWritable.class);
            Text term = new Text();
            IntWritable termIndex = new IntWritable();

            Iterator<TermEntry> termEntries = termInfo.getAllEntries();
            while (termEntries.hasNext()) {
                TermEntry termEntry = termEntries.next();
                term.set(termEntry.getTerm());
                termIndex.set(termEntry.getTermIdx());
                seqWriter.append(term, termIndex);
            }
        } finally {
            Closeables.close(seqWriter, false);
        }

    }
}

From source file:org.excalibur.service.deployment.server.ApplicationServer.java

@Override
public void close() throws IOException {
    if (started.get()) {
        try {//from  w  ww  .jav a  2 s  .com
            this.unregisterThisInstanceServices();
            Closeables.close(serviceDiscovery, true);
            server.stop();
        } catch (Exception exception) {
            LOG.warn("Error in stopping the server. Cause {}", exception.getMessage());
        }
        started.set(false);
    }
}

From source file:org.apache.mahout.math.hadoop.similarity.cooccurrence.Vectors.java

public static OpenIntIntHashMap readAsIntMap(Path path, Configuration conf) throws IOException {
    FileSystem fs = FileSystem.get(path.toUri(), conf);
    FSDataInputStream in = fs.open(path);
    try {//from  w w w .j  av a 2s.c  o m
        return readAsIntMap(in);
    } finally {
        Closeables.close(in, true);
    }
}