Example usage for com.google.common.io Closeables close

List of usage examples for com.google.common.io Closeables close

Introduction

In this page you can find the example usage for com.google.common.io Closeables close.

Prototype

public static void close(@Nullable Closeable closeable, boolean swallowIOException) throws IOException 

Source Link

Document

Closes a Closeable , with control over whether an IOException may be thrown.

Usage

From source file:org.apache.mahout.vectorizer.DictionaryVectorizer.java

/**
 * Read the feature frequency List which is built at the end of the Word Count Job and assign ids to them.
 * This will use constant memory and will run at the speed of your disk read
 *///from  w w w  .  j a v  a 2s.  com
private static List<Path> createDictionaryChunks(Path wordCountPath, Path dictionaryPathBase,
        Configuration baseConf, int chunkSizeInMegabytes, int[] maxTermDimension) throws IOException {
    List<Path> chunkPaths = Lists.newArrayList();

    Configuration conf = new Configuration(baseConf);

    FileSystem fs = FileSystem.get(wordCountPath.toUri(), conf);

    long chunkSizeLimit = chunkSizeInMegabytes * 1024L * 1024L;
    int chunkIndex = 0;
    Path chunkPath = new Path(dictionaryPathBase, DICTIONARY_FILE + chunkIndex);
    chunkPaths.add(chunkPath);

    SequenceFile.Writer dictWriter = new SequenceFile.Writer(fs, conf, chunkPath, Text.class,
            IntWritable.class);

    try {
        long currentChunkSize = 0;
        Path filesPattern = new Path(wordCountPath, OUTPUT_FILES_PATTERN);
        int i = 0;
        for (Pair<Writable, Writable> record : new SequenceFileDirIterable<Writable, Writable>(filesPattern,
                PathType.GLOB, null, null, true, conf)) {
            if (currentChunkSize > chunkSizeLimit) {
                Closeables.close(dictWriter, false);
                chunkIndex++;

                chunkPath = new Path(dictionaryPathBase, DICTIONARY_FILE + chunkIndex);
                chunkPaths.add(chunkPath);

                dictWriter = new SequenceFile.Writer(fs, conf, chunkPath, Text.class, IntWritable.class);
                currentChunkSize = 0;
            }

            Writable key = record.getFirst();
            int fieldSize = DICTIONARY_BYTE_OVERHEAD + key.toString().length() * 2 + Integer.SIZE / 8;
            currentChunkSize += fieldSize;
            dictWriter.append(key, new IntWritable(i++));
        }
        maxTermDimension[0] = i;
    } finally {
        Closeables.close(dictWriter, false);
    }

    return chunkPaths;
}

From source file:com.android.tools.idea.jps.builder.AndroidGradleTargetBuilder.java

private static void doBuild(@NotNull CompileContext context, @NotNull List<String> buildTasks,
        @NotNull BuilderExecutionSettings executionSettings, @Nullable String androidHome)
        throws ProjectBuildException {
    GradleConnector connector = getGradleConnector(executionSettings);

    ProjectConnection connection = connector.connect();
    ByteArrayOutputStream stdout = new ByteArrayOutputStream(BUFFER_SIZE);
    ByteArrayOutputStream stderr = new ByteArrayOutputStream(BUFFER_SIZE);

    try {//from ww w .j  a v  a2 s. com
        BuildLauncher launcher = connection.newBuild();
        launcher.forTasks(toStringArray(buildTasks));

        List<String> jvmArgs = Lists.newArrayList();
        BuildMode buildMode = executionSettings.getBuildMode();
        if (BuildMode.ASSEMBLE_TRANSLATE == buildMode) {
            String arg = AndroidGradleSettings.createJvmArg(GradleBuilds.ENABLE_TRANSLATION_JVM_ARG, true);
            jvmArgs.add(arg);
        }

        if (androidHome != null && !androidHome.isEmpty()) {
            String androidSdkArg = AndroidGradleSettings.createAndroidHomeJvmArg(androidHome);
            jvmArgs.add(androidSdkArg);
        }

        jvmArgs.addAll(executionSettings.getJvmOptions());

        LOG.info("Build JVM args: " + jvmArgs);
        if (!jvmArgs.isEmpty()) {
            launcher.setJvmArguments(toStringArray(jvmArgs));
        }

        List<String> commandLineArgs = Lists.newArrayList();
        commandLineArgs.addAll(executionSettings.getCommandLineOptions());
        commandLineArgs.add(
                AndroidGradleSettings.createProjectProperty(AndroidProject.PROPERTY_INVOKED_FROM_IDE, true));

        if (executionSettings.isParallelBuild() && !commandLineArgs.contains(PARALLEL_BUILD_OPTION)) {
            commandLineArgs.add(PARALLEL_BUILD_OPTION);
        }

        if (executionSettings.isOfflineBuild() && !commandLineArgs.contains(OFFLINE_MODE_OPTION)) {
            commandLineArgs.add(OFFLINE_MODE_OPTION);
        }

        if (executionSettings.isConfigureOnDemand() && !commandLineArgs.contains(CONFIGURE_ON_DEMAND_OPTION)) {
            commandLineArgs.add(CONFIGURE_ON_DEMAND_OPTION);
        }

        LOG.info("Build command line args: " + commandLineArgs);
        if (!commandLineArgs.isEmpty()) {
            launcher.withArguments(toStringArray(commandLineArgs));
        }

        File javaHomeDir = executionSettings.getJavaHomeDir();
        if (javaHomeDir != null) {
            launcher.setJavaHome(javaHomeDir);
        }

        launcher.setStandardOutput(stdout);
        launcher.setStandardError(stderr);
        launcher.run();
    } catch (BuildException e) {
        handleBuildException(e, context, stderr.toString());
    } finally {
        String outText = stdout.toString();
        context.processMessage(new ProgressMessage(outText, 1.0f));
        try {
            Closeables.close(stdout, true);
            Closeables.close(stderr, true);
        } catch (IOException e) {
            LOG.debug(e);
        }
        connection.close();
    }
}

From source file:com.google.caliper.runner.StreamService.java

/**
 * Write a line of data to the worker process over the socket.
 *
 * <p>N.B. Writing data via {@link #sendMessage(Serializable)} is only valid once the underlying
 * socket has been opened.  This should be fine assuming that socket writes are only in response
 * to socket reads (which is currently the case), so there is no way that a write could happen
 * prior to the socket being opened./*  w  w  w. ja v  a 2s  .c o  m*/
*/
void sendMessage(Serializable message) throws IOException {
    checkState(isRunning(), "Cannot read items from a %s StreamService", state());
    checkState(socketWriter != null, "Attempted to write to the socket before it was opened.");
    try {
        socketWriter.write(message);
        // We need to flush since this is a back and forth lockstep protocol, buffering can cause 
        // deadlock! 
        socketWriter.flush();
    } catch (IOException e) {
        Closeables.close(socketWriter, true);
        notifyFailed(e);
        throw e;
    }
}

From source file:org.apache.mahout.utils.vectors.lucene.ClusterLabels.java

/**
 * Get the list of labels, sorted by best score.
 *//*from w w w .  ja v  a  2 s  .com*/
protected List<TermInfoClusterInOut> getClusterLabels(Integer integer,
        Collection<WeightedPropertyVectorWritable> wpvws) throws IOException {

    if (wpvws.size() < minNumIds) {
        log.info("Skipping small cluster {} with size: {}", integer, wpvws.size());
        return null;
    }

    log.info("Processing Cluster {} with {} documents", integer, wpvws.size());
    Directory dir = FSDirectory.open(new File(this.indexDir));
    IndexReader reader = DirectoryReader.open(dir);

    log.info("# of documents in the index {}", reader.numDocs());

    Collection<String> idSet = Sets.newHashSet();
    for (WeightedPropertyVectorWritable wpvw : wpvws) {
        Vector vector = wpvw.getVector();
        if (vector instanceof NamedVector) {
            idSet.add(((NamedVector) vector).getName());
        }
    }

    int numDocs = reader.numDocs();

    OpenBitSet clusterDocBitset = getClusterDocBitset(reader, idSet, this.idField);

    log.info("Populating term infos from the index");

    /**
     * This code is as that of CachedTermInfo, with one major change, which is to get the document frequency.
     * 
     * Since we have deleted the documents out of the cluster, the document frequency for a term should only
     * include the in-cluster documents. The document frequency obtained from TermEnum reflects the frequency
     * in the entire index. To get the in-cluster frequency, we need to query the index to get the term
     * frequencies in each document. The number of results of this call will be the in-cluster document
     * frequency.
     */
    Terms t = MultiFields.getTerms(reader, contentField);
    TermsEnum te = t.iterator(null);
    Map<String, TermEntry> termEntryMap = new LinkedHashMap<String, TermEntry>();
    Bits liveDocs = MultiFields.getLiveDocs(reader); //WARNING: returns null if there are no deletions

    int count = 0;
    BytesRef term;
    while ((term = te.next()) != null) {
        OpenBitSet termBitset = new OpenBitSet(reader.maxDoc());
        DocsEnum docsEnum = MultiFields.getTermDocsEnum(reader, null, contentField, term);
        int docID;
        while ((docID = docsEnum.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
            //check to see if we don't have an deletions (null) or if document is live
            if (liveDocs != null && !liveDocs.get(docID)) {
                // document is deleted...
                termBitset.set(docsEnum.docID());
            }
        }
        // AND the term's bitset with cluster doc bitset to get the term's in-cluster frequency.
        // This modifies the termBitset, but that's fine as we are not using it anywhere else.
        termBitset.and(clusterDocBitset);
        int inclusterDF = (int) termBitset.cardinality();

        TermEntry entry = new TermEntry(term.utf8ToString(), count++, inclusterDF);
        termEntryMap.put(entry.getTerm(), entry);

    }

    List<TermInfoClusterInOut> clusteredTermInfo = Lists.newLinkedList();

    int clusterSize = wpvws.size();

    for (TermEntry termEntry : termEntryMap.values()) {

        int corpusDF = reader.docFreq(new Term(this.contentField, termEntry.getTerm()));
        int outDF = corpusDF - termEntry.getDocFreq();
        int inDF = termEntry.getDocFreq();
        double logLikelihoodRatio = scoreDocumentFrequencies(inDF, outDF, clusterSize, numDocs);
        TermInfoClusterInOut termInfoCluster = new TermInfoClusterInOut(termEntry.getTerm(), inDF, outDF,
                logLikelihoodRatio);
        clusteredTermInfo.add(termInfoCluster);
    }

    Collections.sort(clusteredTermInfo);
    // Cleanup
    Closeables.close(reader, true);
    termEntryMap.clear();

    return clusteredTermInfo.subList(0, Math.min(clusteredTermInfo.size(), maxLabels));
}

From source file:com.android.assetstudiolib.GraphicGenerator.java

/**
 * Returns the full size clip art image for a given image name.
 *
 * @param name the name of the image to be loaded (which can be looked up via
 *            {@link #getClipartNames()})
 * @return the clip art image// ww w.j av a 2s.co m
 * @throws IOException if the image cannot be loaded
 */
public static BufferedImage getClipartImage(String name) throws IOException {
    InputStream is = GraphicGenerator.class.getResourceAsStream("/images/clipart/big/" + name);
    try {
        return ImageIO.read(is);
    } finally {
        Closeables.close(is, true /* swallowIOException */);
    }
}

From source file:org.apache.mahout.utils.clustering.ClusterDumper.java

public void printClusters(String[] dictionary) throws Exception {
    Configuration conf = new Configuration();

    if (this.termDictionary != null) {
        if ("text".equals(dictionaryFormat)) {
            dictionary = VectorHelper.loadTermDictionary(new File(this.termDictionary));
        } else if ("sequencefile".equals(dictionaryFormat)) {
            dictionary = VectorHelper.loadTermDictionary(conf, this.termDictionary);
        } else {/*from   w  w  w  .j a va  2  s  .c  o  m*/
            throw new IllegalArgumentException("Invalid dictionary format");
        }
    }

    Writer writer;
    boolean shouldClose;
    if (this.outputFile == null) {
        shouldClose = false;
        writer = new OutputStreamWriter(System.out, Charsets.UTF_8);
    } else {
        shouldClose = true;
        if (outputFile.getName().startsWith("s3n://")) {
            Path p = outputPath;
            FileSystem fs = FileSystem.get(p.toUri(), conf);
            writer = new OutputStreamWriter(fs.create(p), Charsets.UTF_8);
        } else {
            Files.createParentDirs(outputFile);
            writer = Files.newWriter(this.outputFile, Charsets.UTF_8);
        }
    }
    ClusterWriter clusterWriter = createClusterWriter(writer, dictionary);
    try {
        long numWritten = clusterWriter.write(new SequenceFileDirValueIterable<ClusterWritable>(
                new Path(seqFileDir, "part-*"), PathType.GLOB, conf));

        writer.flush();
        if (runEvaluation) {
            HadoopUtil.delete(conf, new Path("tmp/representative"));
            int numIters = 5;
            RepresentativePointsDriver.main(new String[] { "--input", seqFileDir.toString(), "--output",
                    "tmp/representative", "--clusteredPoints", pointsDir.toString(), "--distanceMeasure",
                    measure.getClass().getName(), "--maxIter", String.valueOf(numIters) });
            conf.set(RepresentativePointsDriver.DISTANCE_MEASURE_KEY, measure.getClass().getName());
            conf.set(RepresentativePointsDriver.STATE_IN_KEY,
                    "tmp/representative/representativePoints-" + numIters);
            ClusterEvaluator ce = new ClusterEvaluator(conf, seqFileDir);
            writer.append("\n");
            writer.append("Inter-Cluster Density: ").append(String.valueOf(ce.interClusterDensity()))
                    .append("\n");
            writer.append("Intra-Cluster Density: ").append(String.valueOf(ce.intraClusterDensity()))
                    .append("\n");
            CDbwEvaluator cdbw = new CDbwEvaluator(conf, seqFileDir);
            writer.append("CDbw Inter-Cluster Density: ").append(String.valueOf(cdbw.interClusterDensity()))
                    .append("\n");
            writer.append("CDbw Intra-Cluster Density: ").append(String.valueOf(cdbw.intraClusterDensity()))
                    .append("\n");
            writer.append("CDbw Separation: ").append(String.valueOf(cdbw.separation())).append("\n");
            writer.flush();
        }
        log.info("Wrote {} clusters", numWritten);
    } finally {
        if (shouldClose) {
            Closeables.close(clusterWriter, false);
        } else {
            if (clusterWriter instanceof GraphMLClusterWriter) {
                clusterWriter.close();
            }
        }
    }
}

From source file:com.nesscomputing.db.postgres.embedded.EmbeddedPostgreSQL.java

@Override
public void close() throws IOException {
    if (closed.getAndSet(true)) {
        return;// w w  w . ja  v a2s.  c o m
    }
    final StopWatch watch = new StopWatch();
    watch.start();
    try {
        pgCtl(dataDirectory, "stop");
        LOG.info("{} shut down postmaster in {}", instanceId, watch);
    } catch (final Exception e) {
        LOG.error("Could not stop postmaster " + instanceId, e);
    }
    if (lock != null) {
        lock.release();
    }
    Closeables.close(lockStream, true);

    if (cleanDataDirectory && System.getProperty("ness.epg.no-cleanup") == null) {
        FileUtils.deleteDirectory(dataDirectory);
    } else {
        LOG.info("Did not clean up directory {}", dataDirectory.getAbsolutePath());
    }
}

From source file:com.microsoft.thrifty.schema.Loader.java

private ThriftFileElement loadSingleFile(Path base, Path fileName) throws IOException {
    Path file = base.resolve(fileName);
    if (!Files.exists(file)) {
        return null;
    }//  ww  w. j  a  va2 s  . c o m

    Source source = Okio.source(file);
    try {
        Location location = Location.get(base.toString(), fileName.toString());
        String data = Okio.buffer(source).readUtf8();
        return ThriftParser.parse(location, data, errorReporter);
    } catch (IOException e) {
        throw new IOException("Failed to load " + fileName + " from " + base, e);
    } finally {
        Closeables.close(source, true);
    }
}

From source file:com.orange.clara.cloud.servicedbdumper.controllers.ManagerController.java

private void getErrorResponseEntityBasicAuth(HttpServletResponse resp) throws IOException {
    String errorMessage = "401 Unauthorized";

    resp.setHeader("WWW-Authenticate", "Basic realm=\"Download Realm\"");
    resp.setStatus(HttpStatus.UNAUTHORIZED.value());
    OutputStream outputStream = resp.getOutputStream();
    try {/*w  ww .  ja va2 s.c  o  m*/
        outputStream.write(errorMessage.getBytes());
    } finally {
        Closeables.close(outputStream, true);
    }

}

From source file:zhwb.study.compiler.CompilerUtils.java

private static void writeCode(File sourceRoot, String fullName, String source) throws IOException {
    int index = fullName.lastIndexOf(".") + 1;
    String className = fullName.substring(index);
    String classPackage = fullName.substring(0, index).replace(".", File.separator);
    File classPackageFile = new File(sourceRoot, classPackage);
    if (!classPackageFile.exists()) {
        classPackageFile.mkdirs();/*from  w w  w . j a  v  a 2 s. c  o  m*/
    }
    File sourceFile = new File(classPackageFile.getCanonicalPath(), className + ".java");
    if (!sourceFile.exists()) {
        BufferedWriter bufferedWriter = new BufferedWriter(
                new OutputStreamWriter(new FileOutputStream(sourceFile), "UTF-8"));
        try {
            bufferedWriter.write(source);
        } finally {
            Closeables.close(bufferedWriter, true);
        }
    }
}