Example usage for com.google.common.io Closeables close

List of usage examples for com.google.common.io Closeables close

Introduction

In this page you can find the example usage for com.google.common.io Closeables close.

Prototype

public static void close(@Nullable Closeable closeable, boolean swallowIOException) throws IOException 

Source Link

Document

Closes a Closeable , with control over whether an IOException may be thrown.

Usage

From source file:nextmethod.web.razor.generator.internal.CodeWriter.java

protected void dispose(final boolean disposing) {
    if (disposing && writer != null) {
        try {// www  .j  a v a  2  s .  c  o  m
            Closeables.close(writer, true);
        } catch (IOException ignored) {
        }
    }
}

From source file:de.tuberlin.dima.cuttlefish.preprocessing.vectorization.Vectorizer.java

public void vectorize(File luceneIndexDir, File outputDir) throws Exception {

    Configuration conf = new Configuration();
    FileSystem fs = FileSystem.getLocal(conf);
    SequenceFile.Writer writer = null;

    FeatureDictionary dict = new FeatureDictionary();

    DirectoryReader reader = null;//  w  w w . j  a  v a  2s . c  o  m
    try {
        reader = DirectoryReader.open(new SimpleFSDirectory(luceneIndexDir));

        writer = SequenceFile.createWriter(fs, conf, new Path(outputDir.toString(), "documentVectors.seq"),
                IDAndCodes.class, VectorWritable.class);
        IDAndCodes idAndCodes = new IDAndCodes();
        VectorWritable vectorWritable = new VectorWritable();

        Fields fields = MultiFields.getFields(reader);
        if (fields != null) {
            Iterator<String> fieldNames = fields.iterator();
            while (fieldNames.hasNext()) {
                String field = fieldNames.next();
                if (!field.startsWith("bip:") && !"itemID".equals(field)) {

                    Terms terms = fields.terms(field);
                    TermsEnum termsEnum = terms.iterator(null);
                    BytesRef text;
                    while ((text = termsEnum.next()) != null) {
                        dict.addTextFeature(field, text.utf8ToString());
                    }
                }
            }
        }

        int numDocsVectorized = 0;

        for (int docID = 0; docID < reader.maxDoc(); docID++) {
            Document doc = reader.document(docID);

            int itemID = doc.getField("itemID").numericValue().intValue();

            RandomAccessSparseVector documentVector = new RandomAccessSparseVector(dict.numFeatures());
            Multimap<String, String> codes = HashMultimap.create();

            for (IndexableField field : doc.getFields()) {

                String fieldName = field.name();

                if (!fieldName.startsWith("bip:") && !"itemID".equals(fieldName)) {

                    Terms termFreqVector = reader.getTermVector(docID, fieldName);

                    if (termFreqVector != null) {

                        int maxTermFrequency = maxTermFrequency(termFreqVector);

                        TermsEnum te = termFreqVector.iterator(null);
                        BytesRef term;

                        while ((term = te.next()) != null) {

                            String termStr = term.utf8ToString();
                            int termFrequency = (int) te.totalTermFreq();

                            int documentFrequency = reader.docFreq(new Term(fieldName, term));
                            int numDocs = reader.numDocs();

                            double weight = weighting.weight(fieldName, termStr, termFrequency,
                                    documentFrequency, maxTermFrequency, numDocs);

                            int featureIndex = dict.index(fieldName, term.utf8ToString());
                            documentVector.setQuick(featureIndex, weight);
                        }
                    }

                } else if (fieldName.startsWith("bip:")) {
                    for (String value : doc.getValues(fieldName)) {
                        codes.put(fieldName, value);
                    }
                }
            }

            Vector featureVector = new SequentialAccessSparseVector(documentVector);

            weighting.normalize(featureVector);

            idAndCodes.set(itemID, codes);
            vectorWritable.set(featureVector);
            writer.append(idAndCodes, vectorWritable);

            numDocsVectorized++;
            if (numDocsVectorized % 100 == 0) {
                log.info("Vectorized {} documents", numDocsVectorized);
            }
        }

        log.info("Vectorized {} documents", numDocsVectorized);

        dict.writeToFile(new File(outputDir, "features.txt"));

        log.info("Wrote feature dictionary");

    } finally {
        Closeables.close(reader, true);
        Closeables.close(writer, true);
    }

}

From source file:org.jmxtrans.embedded.samples.graphite.GraphiteDataInjector.java

public void exportMetrics(TimeSeries timeSeries) throws IOException {
    System.out.println("Export '" + timeSeries.getKey() + "' to " + graphiteHost + " with prefix '"
            + graphiteMetricPrefix + "'");
    Socket socket = new Socket(graphiteHost, graphitePort);
    OutputStream outputStream = socket.getOutputStream();

    if (generateDataPointsFile) {
        JFreeChart chart = ChartFactory.createXYLineChart("Purchase", "date", "Amount",
                new TimeSeriesCollection(timeSeries), PlotOrientation.VERTICAL, true, true, false);
        // chart.getXYPlot().setRenderer(new XYSplineRenderer(60));

        File file = new File("/tmp/" + timeSeries.getKey() + ".png");
        ChartUtilities.saveChartAsPNG(file, chart, 1200, 800);
        System.out.println("Exported " + file.getAbsolutePath());

        String pickleFileName = "/tmp/" + timeSeries.getKey().toString() + ".pickle";
        System.out.println("Generate " + pickleFileName);
        outputStream = new TeeOutputStream(outputStream, new FileOutputStream(pickleFileName));
    }//  ww w .  j a va 2s.c  o m

    PyList list = new PyList();

    for (int i = 0; i < timeSeries.getItemCount(); i++) {
        if (debug)
            System.out.println(new DateTime(timeSeries.getDataItem(i).getPeriod().getStart()) + "\t"
                    + timeSeries.getDataItem(i).getValue().intValue());
        String metricName = graphiteMetricPrefix + timeSeries.getKey().toString();
        int time = (int) TimeUnit.SECONDS.convert(timeSeries.getDataItem(i).getPeriod().getStart().getTime(),
                TimeUnit.MILLISECONDS);
        int value = timeSeries.getDataItem(i).getValue().intValue();

        list.add(new PyTuple(new PyString(metricName), new PyTuple(new PyInteger(time), new PyInteger(value))));

        if (list.size() >= batchSize) {
            System.out.print("-");
            rateLimiter.acquire(list.size());
            sendDataPoints(outputStream, list);
        }
    }

    // send last data points
    if (!list.isEmpty()) {
        rateLimiter.acquire(list.size());
        sendDataPoints(outputStream, list);
    }

    Flushables.flushQuietly(outputStream);
    Closeables.close(outputStream, true);
    try {
        socket.close();
    } catch (Exception e) {
        // swallow exception
        e.printStackTrace();
    }

    System.out.println();
    System.out.println("Exported " + timeSeries.getKey() + ": " + timeSeries.getItemCount() + " items");
}

From source file:com.android.tools.klint.client.api.ClassEntry.java

/**
 * Given a classpath, add all the class files found within the directories and inside jar files
 *///from w w w .j a  v a2  s .  c o m
private static void addEntries(@NonNull LintClient client, @NonNull List<ClassEntry> entries,
        @NonNull List<File> classPath) {
    for (File classPathEntry : classPath) {
        if (classPathEntry.getName().endsWith(DOT_JAR)) {
            //noinspection UnnecessaryLocalVariable
            File jarFile = classPathEntry;
            if (!jarFile.exists()) {
                continue;
            }
            ZipInputStream zis = null;
            try {
                FileInputStream fis = new FileInputStream(jarFile);
                try {
                    zis = new ZipInputStream(fis);
                    ZipEntry entry = zis.getNextEntry();
                    while (entry != null) {
                        String name = entry.getName();
                        if (name.endsWith(DOT_CLASS)) {
                            try {
                                byte[] bytes = ByteStreams.toByteArray(zis);
                                if (bytes != null) {
                                    File file = new File(entry.getName());
                                    entries.add(new ClassEntry(file, jarFile, jarFile, bytes));
                                }
                            } catch (Exception e) {
                                client.log(e, null);
                                continue;
                            }
                        }

                        entry = zis.getNextEntry();
                    }
                } finally {
                    Closeables.close(fis, true);
                }
            } catch (IOException e) {
                client.log(e, "Could not read jar file contents from %1$s", jarFile);
            } finally {
                try {
                    Closeables.close(zis, true);
                } catch (IOException e) {
                    // cannot happen
                }
            }
        } else if (classPathEntry.isDirectory()) {
            //noinspection UnnecessaryLocalVariable
            File binDir = classPathEntry;
            List<File> classFiles = new ArrayList<File>();
            addClassFiles(binDir, classFiles);

            for (File file : classFiles) {
                try {
                    byte[] bytes = client.readBytes(file);
                    entries.add(new ClassEntry(file, null /* jarFile*/, binDir, bytes));
                } catch (IOException e) {
                    client.log(e, null);
                }
            }
        } else {
            client.log(null, "Ignoring class path entry %1$s", classPathEntry);
        }
    }
}

From source file:org.artifactory.maven.index.MavenIndexManager.java

boolean fetchRemoteIndex(boolean forceRemoteDownload) {
    if (indexedRepo.isLocal()) {
        return false;
    } else {//from  www .  j a  v a 2s .  c o  m
        //For remote repositories, try to download the remote cache. If fails - index locally
        RemoteRepo remoteRepo = (RemoteRepo) indexedRepo;
        if (remoteRepo.isStoreArtifactsLocally()) {
            indexStorageRepo = remoteRepo.getLocalCacheRepo();
        }
        if (remoteRepo.isOffline()) {
            log.debug("Not retrieving index for remote repository '{}'.", indexedRepo.getKey());
            if (!isIndexFilesDontExistInCache(remoteRepo)) {
                log.debug("Skipping indexing for remote offline repository '{}', Index exists in cache.",
                        indexedRepo.getKey());
                indexStatus = IndexStatus.SKIP;
            }
            unExpireIndexIfExists(remoteRepo);
            return false;
        }

        File tempIndex = null;
        File tempProperties = null;
        ResourceStreamHandle remoteIndexHandle = null;
        ResourceStreamHandle remotePropertiesHandle = null;
        try {
            //Never auto-fetch the index from central if it cannot be stored locally unless force flag is enabled
            if (!forceRemoteDownload && !shouldFetchRemoteIndex(remoteRepo)) {
                //Return true so that we don't attempt to index locally as a fallback
                return true;
            }

            //If we receive a non-modified response (with a null handle) - don't re-download the index
            log.debug("Fetching remote index files for {}", indexedRepo);
            FileOutputStream fos = null;
            try {
                remoteIndexHandle = remoteRepo.conditionalRetrieveResource(MavenNaming.NEXUS_INDEX_GZ_PATH,
                        forceRemoteDownload);
                if (remoteIndexHandle instanceof NullResourceStreamHandle) {
                    log.debug("No need to fetch unmodified index for remote repository '{}'.",
                            indexedRepo.getKey());
                    indexStatus = IndexStatus.SKIP;
                    return true;
                }
                //Save into temp files
                tempIndex = File.createTempFile(MavenNaming.NEXUS_INDEX_GZ, null);
                fos = new FileOutputStream(tempIndex);
                TaskUtils.copyLarge(remoteIndexHandle.getInputStream(), fos);
            } finally {
                IOUtils.closeQuietly(fos);
                /**
                 * Close the handle directly after reading stream and before we start to download the properties
                 * in case the target repo does not allow multiple simultaneous connections
                 */
                Closeables.close(remoteIndexHandle, false);
            }

            fos = null;
            try {
                remotePropertiesHandle = remoteRepo.downloadResource(MavenNaming.NEXUS_INDEX_PROPERTIES_PATH);
                tempProperties = File.createTempFile(MavenNaming.NEXUS_INDEX_PROPERTIES, null);
                fos = new FileOutputStream(tempProperties);
                TaskUtils.copyLarge(remotePropertiesHandle.getInputStream(), fos);
            } finally {
                IOUtils.closeQuietly(fos);
                Closeables.close(remotePropertiesHandle, false);
            }

            //Return the handle to the zip file (will be removed when the handle is closed)
            indexHandle = new TempFileStreamHandle(tempIndex);
            propertiesHandle = new TempFileStreamHandle(tempProperties);
            indexStatus = IndexStatus.NEEDS_SAVING;
            log.debug("Fetched remote index files for {}", indexedRepo);
            return true;
        } catch (IOException e) {
            closeHandles();
            FileUtils.deleteQuietly(tempIndex);
            FileUtils.deleteQuietly(tempProperties);
            log.warn("Could not retrieve remote maven index '" + MavenNaming.NEXUS_INDEX_GZ + "' for repo '"
                    + indexedRepo + "': " + e.getMessage());
            abort();
            if (isNotFoundInRemoteRepo(e) || isIndexFilesDontExistInCache(remoteRepo)) {
                indexStatus = IndexStatus.NOT_CREATED;
            }
            unExpireIndexIfExists(remoteRepo);
            return false;
        }
    }
}

From source file:net.conquiris.index.DefaultWriter.java

/**
 * Default writer.//from ww w . ja v a 2 s .co m
 * @param log Log context.
 * @param writer Lucene index writer to use.
 * @param overrideCheckpoint Whether to override the checkpoint.
 * @param checkpoint Overridden checkpoint value.
 * @param created Whether the index has been requested to be created.
 */
DefaultWriter(ContextLog log, IndexWriter writer, boolean overrideCheckpoint, @Nullable String checkpoint,
        boolean created) throws IndexException {
    this.log = checkNotNull(log, "The log context must be provided");
    this.writer = checkNotNull(writer, "The index writer must be provided");
    this.properties = new MapMaker().makeMap();
    this.keys = Collections.unmodifiableSet(this.properties.keySet());
    // Read properties
    try {
        final Map<String, String> commitData;
        final int documents;
        if (created) {
            commitData = ImmutableMap.of();
            documents = 0;
        } else {
            final IndexReader reader = IndexReader.open(writer, false);
            boolean threw = true;
            try {
                Map<String, String> data = reader.getIndexCommit().getUserData();
                if (overrideCheckpoint) {
                    final Map<String, String> modified = Maps.newHashMap();
                    if (data != null) {
                        modified.putAll(data);
                    }
                    modified.put(IndexInfo.CHECKPOINT, checkpoint);
                    commitData = modified;
                } else {
                    commitData = data;
                }
                documents = reader.numDocs();
                threw = false;
            } finally {
                Closeables.close(reader, threw);
            }
        }
        this.indexInfo = IndexInfo.fromMap(documents, commitData);
        this.checkpoint = this.indexInfo.getCheckpoint();
        this.targetCheckpoint = this.indexInfo.getTargetCheckpoint();
        this.properties.putAll(this.indexInfo.getProperties());
    } catch (LockObtainFailedException e) {
        indexStatus.compareAndSet(IndexStatus.OK, IndexStatus.LOCKED);
        throw new IndexException(e);
    } catch (CorruptIndexException e) {
        indexStatus.compareAndSet(IndexStatus.OK, IndexStatus.CORRUPT);
        throw new IndexException(e);
    } catch (IOException e) {
        indexStatus.compareAndSet(IndexStatus.OK, IndexStatus.IOERROR);
        throw new IndexException(e);
    } catch (RuntimeException e) {
        indexStatus.compareAndSet(IndexStatus.OK, IndexStatus.ERROR);
        throw e;
    }
}

From source file:net.shibboleth.idp.log.LogbackLoggingService.java

/**
 * Reads and loads in a new logging configuration.
 * /* ww w  . j  a v  a2 s. c  o m*/
 * @throws ServiceException thrown if there is a problem loading the logging configuration
 */
protected void loadLoggingConfiguration() {
    InputStream ins = null;
    try {
        statusManager.add(new InfoStatus(
                "Loading new logging configuration resource: " + configurationResource.getDescription(), this));
        ins = configurationResource.getInputStream();
        loadLoggingConfiguration(ins);
    } catch (final Exception e) {
        try {
            Closeables.close(ins, true);
        } catch (final IOException e1) {
            // swallowed && logged by Closeables but...
            throw new ServiceException(e1);
        }
        statusManager.add(new ErrorStatus(
                "Error loading logging configuration file: " + configurationResource.getDescription(), this,
                e));
        try {
            statusManager.add(new InfoStatus("Loading fallback logging configuration", this));
            ins = fallbackConfiguration.getInputStream();
            loadLoggingConfiguration(ins);
        } catch (final IOException ioe) {
            try {
                Closeables.close(ins, true);
            } catch (final IOException e1) {
                // swallowed && logged by Closeables
                throw new ServiceException(e1);
            }
            statusManager.add(new ErrorStatus("Error loading fallback logging configuration", this, e));
            throw new ServiceException("Unable to load fallback logging configuration");
        }
    } finally {
        try {
            Closeables.close(ins, true);
        } catch (final IOException e) {
            // swallowed && logged by Closeables
            throw new ServiceException(e);
        }
    }
}

From source file:com.cloudera.cdk.data.filesystem.FileSystemMetadataProvider.java

@Override
public DatasetDescriptor load(String name) {
    Preconditions.checkArgument(name != null, "Name cannot be null");

    logger.debug("Loading dataset metadata name:{}", name);

    final Path metadataPath = pathForMetadata(name);
    checkExists(rootFileSystem, metadataPath);

    InputStream inputStream = null;
    Properties properties = new Properties();
    DatasetDescriptor.Builder builder = new DatasetDescriptor.Builder();
    Path descriptorPath = new Path(metadataPath, DESCRIPTOR_FILE_NAME);

    boolean threw = true;
    try {// w  w  w.  j a  v  a2  s  .  com
        inputStream = rootFileSystem.open(descriptorPath);
        properties.load(inputStream);
        threw = false;
    } catch (IOException e) {
        throw new MetadataProviderException(
                "Unable to load descriptor file:" + descriptorPath + " for dataset:" + name, e);
    } finally {
        try {
            Closeables.close(inputStream, threw);
        } catch (IOException e) {
            throw new MetadataProviderException(e);
        }
    }

    if (properties.containsKey(FORMAT_FIELD_NAME)) {
        builder.format(Accessor.getDefault().newFormat(properties.getProperty(FORMAT_FIELD_NAME)));
    }
    if (properties.containsKey(PARTITION_EXPRESSION_FIELD_NAME)) {
        builder.partitionStrategy(
                Accessor.getDefault().fromExpression(properties.getProperty(PARTITION_EXPRESSION_FIELD_NAME)));
    }
    Path schemaPath = new Path(metadataPath, SCHEMA_FILE_NAME);
    try {
        builder.schemaUri(rootFileSystem.makeQualified(schemaPath).toUri());
    } catch (IOException e) {
        throw new MetadataProviderException("Unable to load schema file:" + schemaPath + " for dataset:" + name,
                e);
    }

    final Path location;
    if (properties.containsKey(LOCATION_FIELD_NAME)) {
        // the location should always be written by this library and validated
        // when the descriptor is first created.
        location = new Path(properties.getProperty(LOCATION_FIELD_NAME));
    } else {
        // backwards-compatibility: older versions didn't write this property
        location = pathForDataset(name);
    }
    builder.location(location);

    // custom properties
    for (String property : properties.stringPropertyNames()) {
        if (!RESERVED_PROPERTIES.contains(property)) {
            builder.property(property, properties.getProperty(property));
        }
    }

    return builder.build();
}

From source file:org.apache.mahout.classifier.df.mapreduce.Classifier.java

/**
 * Extract the prediction for each mapper and write them in the corresponding output file. 
 * The name of the output file is based on the name of the corresponding input file.
 * Will compute the ConfusionMatrix if necessary.
 *//*from  w  w w.  j  a v a2s . c  om*/
private void parseOutput(JobContext job) throws IOException {
    Configuration conf = job.getConfiguration();
    FileSystem fs = mappersOutputPath.getFileSystem(conf);

    Path[] outfiles = DFUtils.listOutputFiles(fs, mappersOutputPath);

    // read all the output
    List<double[]> resList = Lists.newArrayList();
    for (Path path : outfiles) {
        FSDataOutputStream ofile = null;
        try {
            for (Pair<DoubleWritable, Text> record : new SequenceFileIterable<DoubleWritable, Text>(path, true,
                    conf)) {
                double key = record.getFirst().get();
                String value = record.getSecond().toString();
                if (ofile == null) {
                    // this is the first value, it contains the name of the input file
                    ofile = fs.create(new Path(outputPath, value).suffix(".out"));
                } else {
                    // The key contains the correct label of the data. The value contains a prediction
                    ofile.writeChars(value); // write the prediction
                    ofile.writeChar('\n');

                    resList.add(new double[] { key, Double.valueOf(value) });
                }
            }
        } finally {
            Closeables.close(ofile, false);
        }
    }
    results = new double[resList.size()][2];
    resList.toArray(results);
}

From source file:my.mahout.SequenceFilesFromDirectory.java

private int runSequential(Configuration conf, Path input, Path output, Map<String, String> options)
        throws IOException, InterruptedException, NoSuchMethodException {
    // Running sequentially
    Charset charset = Charset.forName(getOption(CHARSET_OPTION[0]));
    String keyPrefix = getOption(KEY_PREFIX_OPTION[0]);
    FileSystem fs = FileSystem.get(input.toUri(), conf);
    ChunkedWriter writer = new ChunkedWriter(conf, Integer.parseInt(options.get(CHUNK_SIZE_OPTION[0])), output);

    try {//  w  ww .  ja v a  2 s  .c om
        SequenceFilesFromDirectoryFilter pathFilter;
        String fileFilterClassName = options.get(FILE_FILTER_CLASS_OPTION[0]);
        if (PrefixAdditionFilter.class.getName().equals(fileFilterClassName)) {
            pathFilter = new PrefixAdditionFilter(conf, keyPrefix, options, writer, charset, fs);
        } else {
            pathFilter = ClassUtils.instantiateAs(fileFilterClassName, SequenceFilesFromDirectoryFilter.class,
                    new Class[] { Configuration.class, String.class, Map.class, ChunkedWriter.class,
                            Charset.class, FileSystem.class },
                    new Object[] { conf, keyPrefix, options, writer, charset, fs });
        }
        fs.listStatus(input, pathFilter);
    } finally {
        Closeables.close(writer, false);
    }
    return 0;
}