Example usage for com.google.common.io Closer register

List of usage examples for com.google.common.io Closer register

Introduction

In this page you can find the example usage for com.google.common.io Closer register.

Prototype


public <C extends Closeable> C register(@Nullable C closeable) 

Source Link

Document

Registers the given closeable to be closed when this Closer is #close closed .

Usage

From source file:gobblin.runtime.SafeDatasetCommit.java

void checkForUnpublishedWUHandling(String datasetUrn, JobState.DatasetState datasetState,
        Class<? extends DataPublisher> dataPublisherClass, Closer closer)
        throws ReflectiveOperationException, IOException {
    if (UnpublishedHandling.class.isAssignableFrom(dataPublisherClass)) {
        // pass in jobstate to retrieve properties
        DataPublisher publisher = closer
                .register(DataPublisher.getInstance(dataPublisherClass, this.jobContext.getJobState()));
        log.info(String.format("Calling publisher to handle unpublished work units for dataset %s of job %s.",
                datasetUrn, this.jobContext.getJobId()));
        ((UnpublishedHandling) publisher)
                .handleUnpublishedWorkUnits(datasetState.getTaskStatesAsWorkUnitStates());
    }//from  ww w  .  j  a v a  2s.  c o  m
}

From source file:org.parboiled.transform.ClassNodeInitializer.java

public void process(final ParserClassNode classNode) throws IOException {
    this.classNode = Objects.requireNonNull(classNode, "classNode");

    // walk up the parser parent class chain
    ownerClass = classNode.getParentClass();
    Closer closer;
    ClassReader reader;/*from ww w .  j av  a  2 s.co  m*/
    InputStream in;
    while (!Object.class.equals(ownerClass)) {
        annotations.removeAll(CLASS_FLAGS_CLEAR);

        closer = Closer.create();
        try {
            in = getInputStream(ownerClass);
            if (in == null)
                throw new IOException(ownerClass + " not found");
            reader = new ClassReader(closer.register(in));
            reader.accept(this, ClassReader.SKIP_FRAMES);
        } finally {
            closer.close();
        }
        ownerClass = ownerClass.getSuperclass();
    }

    for (final RuleMethod method : classNode.getRuleMethods().values()) {
        // move all flags from the super methods to their overriding methods
        if (!method.isSuperMethod())
            continue;

        final String overridingMethodName = method.name.substring(1) + method.desc;

        final RuleMethod overridingMethod = classNode.getRuleMethods().get(overridingMethodName);

        method.moveFlagsTo(overridingMethod);
    }
}

From source file:net.stevechaloner.intellijad.decompilers.FileSystemDecompiler.java

protected Optional<VirtualFile> insertIntoFileSystem(@NotNull DecompilationDescriptor descriptor,
        @NotNull final DecompilationContext context, @NotNull MemoryVF file) {
    final boolean debug = LOG.isDebugEnabled();

    if (debug) {//from w w w .j  a v  a 2  s  . co m
        LOG.debug("Inserting into local file system");
    }

    final LocalFileSystem localFs = getLocalFileSystem();
    Config config = context.getConfig();
    File localPath = new File(config.getOutputDirectory() + File.separator + descriptor.getPackageNameAsPath());

    if (debug) {
        LOG.debug("Insert into " + localPath.getAbsolutePath());
    }

    Optional<VirtualFile> insertFile;
    boolean exists = localPath.exists();
    boolean canWrite = localPath.canWrite();
    boolean mkDirs = true;
    if (!exists) {
        mkDirs = localPath.mkdirs();
    }
    boolean cannotStore = false;
    if ((exists & canWrite) || mkDirs) {
        String fileName = descriptor.getClassName() + IntelliJadConstants.DOT_JAVA_EXTENSION;
        final File localFile = new File(localPath, fileName);
        if (localFile.exists() && !localFile.setWritable(true)) {
            LOG.warn("Could not set " + localFile.getAbsolutePath() + " as writable");
        }
        Closer closer = Closer.create();
        try {
            if (debug) {
                LOG.debug("Insert into local file " + localFile.getAbsolutePath());
            }
            OutputStream output = closer.register(new BufferedOutputStream(new FileOutputStream(localFile)));
            if (debug) {
                LOG.debug("Writing...");
            }
            InputStream input = closer.register(new BufferedInputStream(file.getInputStream()));
            StreamUtil.copyStreamContent(input, output);
            if (debug) {
                LOG.debug("Written");
            }
            output.close();
            if (debug) {
                LOG.debug("Closed");
            }
        } catch (IOException e) {
            LOG.error("Could not save file", e);
            cannotStore = true;
        } finally {
            try {
                closer.close();
            } catch (IOException e) {
                LOG.error("Could not close files", e);
            }
        }
        if (cannotStore) {
            insertFile = Optional.absent();
        } else {
            final AtomicReference<VirtualFile> foundFile = new AtomicReference<VirtualFile>();
            appInvoker.runWriteActionAndWait(new Runnable() {
                public void run() {
                    if (debug) {
                        LOG.debug("Looking for file: " + localFile.getAbsolutePath());
                    }
                    foundFile.set(localFs.refreshAndFindFileByIoFile(localFile));
                    if (debug) {
                        LOG.debug("Found " + String.valueOf(foundFile.get()));
                    }
                }
            });

            insertFile = Optional.of(foundFile.get());
            LOCAL_FS_FILE.set(context, insertFile.get());
        }
    } else {
        LOG.warn("Path: " + localPath.getAbsolutePath() + ", exists=" + exists + ", canWrite=" + canWrite
                + ", mkDirs=" + mkDirs);
        cannotStore = true;
        insertFile = Optional.absent();
    }
    CANNOT_STORE.set(context, cannotStore);

    return insertFile;
}

From source file:com.github.fge.grappa.transform.generate.ClassNodeInitializer.java

public void process(ParserClassNode classNode) throws IOException {
    this.classNode = Objects.requireNonNull(classNode, "classNode");

    // walk up the parser parent class chain
    ownerClass = classNode.getParentClass();
    Closer closer;
    ClassReader reader;/*  w w  w .j a va  2  s.c o m*/
    InputStream in;
    while (!Object.class.equals(ownerClass)) {
        annotations.removeAll(CLASS_FLAGS_CLEAR);

        closer = Closer.create();
        try {
            in = getInputStream(ownerClass);
            if (in == null)
                throw new IOException(ownerClass + " not found");
            reader = new ClassReader(closer.register(in));
            reader.accept(this, ClassReader.SKIP_FRAMES);
        } finally {
            closer.close();
        }
        ownerClass = ownerClass.getSuperclass();
    }

    for (RuleMethod method : classNode.getRuleMethods().values()) {
        // move all flags from the super methods to their overriding methods
        if (!method.isSuperMethod())
            continue;

        String overridingMethodName = method.name.substring(1) + method.desc;

        RuleMethod overridingMethod = classNode.getRuleMethods().get(overridingMethodName);

        method.moveFlagsTo(overridingMethod);
    }
}

From source file:com.b2international.snowowl.snomed.importer.rf2.validation.AbstractSnomedValidator.java

/**
 * Performs any one-time initialization necessary for the validation.
 * //from  w w  w .  j a v  a2 s.  c om
 * @param monitor the SubMonitor instance to report progress on
 * @return the seen effective times
 */
protected Collection<String> preValidate(final SubMonitor monitor) {
    monitor.beginTask(MessageFormat.format("Preparing {0}s validation", importType.getDisplayName()), 1);

    final Map<String, CsvListWriter> writers = newHashMap();

    final Closer closer = Closer.create();
    try {
        final InputStreamReader releaseFileReader = closer
                .register(new InputStreamReader(releaseUrl.openStream(), CsvConstants.IHTSDO_CHARSET));
        final CsvListReader releaseFileListReader = closer
                .register(new CsvListReader(releaseFileReader, CsvConstants.IHTSDO_CSV_PREFERENCE));

        componentStagingDirectory = createStagingDirectory();

        final String[] header = releaseFileListReader.getCSVHeader(true);

        if (!StringUtils.equalsIgnoreCase(header, expectedHeader)) {
            addDefect(DefectType.HEADER_DIFFERENCES, String.format("Invalid header in '%s'", releaseFileName));
        }

        while (true) {
            final List<String> row = releaseFileListReader.read();

            if (null == row) {
                break;
            }

            final String effectiveTimeKey = getEffectiveTimeKey(row.get(1));

            if (!effectiveTimes.contains(effectiveTimeKey)) {
                effectiveTimes.add(effectiveTimeKey);

                // Use the original effective time field instead of the key
                validateEffectiveTime(row.get(1), releaseFileListReader.getLineNumber());

                final Path effectiveTimeFile = getEffectiveTimeFile(effectiveTimeKey);
                final BufferedWriter bw = closer.register(
                        Files.newBufferedWriter(effectiveTimeFile, Charsets.UTF_8, StandardOpenOption.CREATE));
                final CsvListWriter lw = closer
                        .register(new CsvListWriter(bw, CsvConstants.IHTSDO_CSV_PREFERENCE));
                writers.put(effectiveTimeKey, lw);
            }

            writers.get(effectiveTimeKey).write(row);
        }

        return ImmutableList.copyOf(effectiveTimes);
    } catch (final IOException e) {
        throw new ImportException(
                MessageFormat.format("Couldn''t read row from {0} release file.", releaseFileName), e);
    } finally {
        try {
            Closeables.close(closer, true);
        } catch (IOException e) {
            throw new RuntimeException(e);
        }
        monitor.worked(1);
    }
}

From source file:io.druid.segment.IndexMergerV9.java

@Override
protected File makeIndexFiles(final List<IndexableAdapter> adapters, final AggregatorFactory[] metricAggs,
        final File outDir, final ProgressIndicator progress, final List<String> mergedDimensions,
        final List<String> mergedMetrics,
        final Function<ArrayList<Iterable<Rowboat>>, Iterable<Rowboat>> rowMergerFn, final IndexSpec indexSpec)
        throws IOException {
    progress.start();//from   ww w  . ja  v  a 2 s.co m
    progress.progress();

    List<Metadata> metadataList = Lists.transform(adapters, new Function<IndexableAdapter, Metadata>() {
        @Override
        public Metadata apply(IndexableAdapter input) {
            return input.getMetadata();
        }
    });

    Metadata segmentMetadata = null;
    if (metricAggs != null) {
        AggregatorFactory[] combiningMetricAggs = new AggregatorFactory[metricAggs.length];
        for (int i = 0; i < metricAggs.length; i++) {
            combiningMetricAggs[i] = metricAggs[i].getCombiningFactory();
        }
        segmentMetadata = Metadata.merge(metadataList, combiningMetricAggs);
    } else {
        segmentMetadata = Metadata.merge(metadataList, null);
    }

    Closer closer = Closer.create();
    final IOPeon ioPeon = new TmpFileIOPeon(false);
    closer.register(new Closeable() {
        @Override
        public void close() throws IOException {
            ioPeon.cleanup();
        }
    });
    final FileSmoosher v9Smoosher = new FileSmoosher(outDir);
    final File v9TmpDir = new File(outDir, "v9-tmp");
    v9TmpDir.mkdirs();
    closer.register(new Closeable() {
        @Override
        public void close() throws IOException {
            FileUtils.deleteDirectory(v9TmpDir);
        }
    });
    log.info("Start making v9 index files, outDir:%s", outDir);
    try {
        long startTime = System.currentTimeMillis();
        ByteStreams.write(Ints.toByteArray(IndexIO.V9_VERSION),
                Files.newOutputStreamSupplier(new File(outDir, "version.bin")));
        log.info("Completed version.bin in %,d millis.", System.currentTimeMillis() - startTime);

        progress.progress();
        final Map<String, ValueType> metricsValueTypes = Maps
                .newTreeMap(Ordering.<String>natural().nullsFirst());
        final Map<String, String> metricTypeNames = Maps.newTreeMap(Ordering.<String>natural().nullsFirst());
        final List<ColumnCapabilitiesImpl> dimCapabilities = Lists
                .newArrayListWithCapacity(mergedDimensions.size());
        mergeCapabilities(adapters, mergedDimensions, metricsValueTypes, metricTypeNames, dimCapabilities);

        final DimensionHandler[] handlers = makeDimensionHandlers(mergedDimensions, dimCapabilities);
        final List<DimensionMerger> mergers = new ArrayList<>();
        for (int i = 0; i < mergedDimensions.size(); i++) {
            mergers.add(handlers[i].makeMerger(indexSpec, v9TmpDir, ioPeon, dimCapabilities.get(i), progress));
        }

        /************* Setup Dim Conversions **************/
        progress.progress();
        startTime = System.currentTimeMillis();
        final ArrayList<Map<String, IntBuffer>> dimConversions = Lists
                .newArrayListWithCapacity(adapters.size());
        final ArrayList<Boolean> dimensionSkipFlag = Lists.newArrayListWithCapacity(mergedDimensions.size());
        final ArrayList<Boolean> convertMissingDimsFlags = Lists
                .newArrayListWithCapacity(mergedDimensions.size());
        writeDimValueAndSetupDimConversion(adapters, progress, mergedDimensions, mergers);
        log.info("Completed dim conversions in %,d millis.", System.currentTimeMillis() - startTime);

        /************* Walk through data sets, merge them, and write merged columns *************/
        progress.progress();
        final Iterable<Rowboat> theRows = makeRowIterable(adapters, mergedDimensions, mergedMetrics,
                rowMergerFn, dimCapabilities, handlers, mergers);
        final LongColumnSerializer timeWriter = setupTimeWriter(ioPeon, indexSpec);
        final ArrayList<GenericColumnSerializer> metWriters = setupMetricsWriters(ioPeon, mergedMetrics,
                metricsValueTypes, metricTypeNames, indexSpec);
        final List<IntBuffer> rowNumConversions = Lists.newArrayListWithCapacity(adapters.size());

        mergeIndexesAndWriteColumns(adapters, progress, theRows, timeWriter, metWriters, rowNumConversions,
                mergers);

        /************ Create Inverted Indexes and Finalize Build Columns *************/
        final String section = "build inverted index and columns";
        progress.startSection(section);
        makeTimeColumn(v9Smoosher, progress, timeWriter);
        makeMetricsColumns(v9Smoosher, progress, mergedMetrics, metricsValueTypes, metricTypeNames, metWriters);

        for (int i = 0; i < mergedDimensions.size(); i++) {
            DimensionMergerV9 merger = (DimensionMergerV9) mergers.get(i);
            merger.writeIndexes(rowNumConversions, closer);
            if (merger.canSkip()) {
                continue;
            }
            ColumnDescriptor columnDesc = merger.makeColumnDescriptor();
            makeColumn(v9Smoosher, mergedDimensions.get(i), columnDesc);
        }

        progress.stopSection(section);

        /************* Make index.drd & metadata.drd files **************/
        progress.progress();
        makeIndexBinary(v9Smoosher, adapters, outDir, mergedDimensions, mergedMetrics, progress, indexSpec,
                mergers);
        makeMetadataBinary(v9Smoosher, progress, segmentMetadata);

        v9Smoosher.close();
        progress.stop();

        return outDir;
    } catch (Throwable t) {
        throw closer.rethrow(t);
    } finally {
        closer.close();
    }
}

From source file:gobblin.example.wikipedia.WikipediaExtractor.java

private JsonElement performHttpQuery(String rootUrl, Map<String, String> query)
        throws URISyntaxException, IOException {
    if (null == this.httpClient) {
        this.httpClient = createHttpClient();
    }/*from  w ww.  j  a v a2s  .  c  om*/
    HttpUriRequest req = createHttpRequest(rootUrl, query);

    Closer closer = Closer.create();

    StringBuilder sb = new StringBuilder();
    try {
        HttpResponse response = sendHttpRequest(req, this.httpClient);
        if (response instanceof CloseableHttpResponse) {
            closer.register((CloseableHttpResponse) response);
        }
        BufferedReader br = closer
                .register(new BufferedReader(new InputStreamReader(response.getEntity().getContent(),
                        ConfigurationKeys.DEFAULT_CHARSET_ENCODING)));
        String line;
        while ((line = br.readLine()) != null) {
            sb.append(line + "\n");
        }
    } catch (Throwable t) {
        throw closer.rethrow(t);
    } finally {
        try {
            closer.close();
        } catch (IOException e) {
            LOG.error("IOException in Closer.close() while performing query " + req + ": " + e, e);
        }
    }

    if (Strings.isNullOrEmpty(sb.toString())) {
        LOG.warn("Received empty response for query: " + req);
        return new JsonObject();
    }

    JsonElement jsonElement = GSON.fromJson(sb.toString(), JsonElement.class);
    return jsonElement;

}

From source file:tachyon.worker.hierarchy.StorageDir.java

/**
 * Copy block file from current StorageDir to another StorageDir
 * //from   w  w  w  .j a v  a 2 s.  c  o  m
 * @param blockId Id of the block
 * @param dstDir destination StorageDir
 * @return true if success, false otherwise
 * @throws IOException
 */
boolean copyBlock(long blockId, StorageDir dstDir) throws IOException {
    long size = getBlockSize(blockId);
    if (size == -1) {
        LOG.error("Block file doesn't exist! blockId:" + blockId);
        return false;
    }
    boolean copySuccess = false;
    Closer closer = Closer.create();
    try {
        BlockHandler bhSrc = closer.register(getBlockHandler(blockId));
        BlockHandler bhDst = closer.register(dstDir.getBlockHandler(blockId));
        ByteBuffer srcBuf = bhSrc.read(0, (int) size);
        copySuccess = (bhDst.append(0, srcBuf) == size);
    } finally {
        closer.close();
    }
    if (copySuccess) {
        dstDir.addBlockId(blockId, size);
    }
    return copySuccess;
}

From source file:org.mapfish.print.servlet.MapPrinterServlet.java

protected String getSpecFromPostBody(HttpServletRequest httpServletRequest) throws IOException {
    if (httpServletRequest.getParameter("spec") != null) {
        return httpServletRequest.getParameter("spec");
    }/*w w w  . j  a v a2 s.  c  o  m*/

    Closer closer = Closer.create();
    try {
        final InputStreamReader reader = closer
                .register(new InputStreamReader(httpServletRequest.getInputStream(), getEncoding()));
        BufferedReader bufferedReader = closer.register(new BufferedReader(reader));
        final String spec = CharStreams.toString(bufferedReader);
        return spec;
    } finally {
        closer.close();
    }
}

From source file:com.android.builder.internal.packaging.DexIncrementalRenameManager.java

/**
 * Writes incremental state.//  w w w. j av  a2 s  . co m
 *
 * @throws IOException failed to write state
 */
private void writeState() throws IOException {
    File stateFile = new File(mIncrementalDir, STATE_FILE);

    Properties props = new Properties();
    int currIdx = 0;
    for (BiMap.Entry<RelativeFile, String> entry : mNameMap.entrySet()) {
        props.put(BASE_KEY_PREFIX + currIdx, entry.getKey().getBase().getPath());
        props.put(FILE_KEY_PREFIX + currIdx, entry.getKey().getFile().getPath());
        props.put(RENAMED_KEY_PREFIX + currIdx, entry.getValue());
        currIdx++;
    }

    Closer closer = Closer.create();
    try {
        props.store(closer.register(new FileWriter(stateFile)), null);
    } catch (Throwable t) {
        throw closer.rethrow(t);
    } finally {
        closer.close();
    }
}