Example usage for java.io Closeable Closeable

List of usage examples for java.io Closeable Closeable

Introduction

In this page you can find the example usage for java.io Closeable Closeable.

Prototype

Closeable

Source Link

Usage

From source file:com.splicemachine.derby.stream.function.merge.AbstractMergeJoinFlatMapFunction.java

@Override
public Iterator<LocatedRow> call(Iterator<LocatedRow> locatedRows) throws Exception {
    PeekingIterator<LocatedRow> leftPeekingIterator = Iterators.peekingIterator(locatedRows);
    if (!initialized) {
        joinOperation = getOperation();//from   ww w .  j a v a2s  .co  m
        initialized = true;
        if (!leftPeekingIterator.hasNext())
            return Collections.EMPTY_LIST.iterator();
        initRightScan(leftPeekingIterator);
    }
    final SpliceOperation rightSide = joinOperation.getRightOperation();
    DataSetProcessor dsp = EngineDriver.driver().processorFactory()
            .bulkProcessor(getOperation().getActivation(), rightSide);
    final Iterator<LocatedRow> rightIterator = Iterators.transform(rightSide.getDataSet(dsp).toLocalIterator(),
            new Function<LocatedRow, LocatedRow>() {
                @Override
                public LocatedRow apply(@Nullable LocatedRow locatedRow) {
                    operationContext.recordJoinedRight();
                    return locatedRow;
                }
            });
    ((BaseActivation) joinOperation.getActivation()).setScanStartOverride(null); // reset to null to avoid any side effects
    ((BaseActivation) joinOperation.getActivation()).setScanKeys(null);
    ((BaseActivation) joinOperation.getActivation()).setScanStopOverride(null);
    AbstractMergeJoinIterator iterator = createMergeJoinIterator(leftPeekingIterator,
            Iterators.peekingIterator(rightIterator), joinOperation.getLeftHashKeys(),
            joinOperation.getRightHashKeys(), joinOperation, operationContext);
    iterator.registerCloseable(new Closeable() {
        @Override
        public void close() throws IOException {
            try {
                rightSide.close();
            } catch (StandardException e) {
                throw new RuntimeException(e);
            }
        }
    });
    return iterator;
}

From source file:org.springframework.data.hadoop.serialization.ResourceSerializationFormat.java

/**
 * Writes the content of Spring {@link Resource}s to a single HDFS location.
 *///w w w.j  a  v a 2  s  . c o  m
@Override
protected SerializationWriterSupport createWriter(final OutputStream output) {
    // Extend and customize Serialization Writer template
    return new SerializationWriterSupport() {

        private OutputStream outputStream = output;

        private InputStream resourceSeparatorInputStream;

        @Override
        protected Closeable doOpen() throws IOException {

            resourceSeparatorInputStream = null;

            CompressionCodec codec = CompressionUtils.getHadoopCompression(getConfiguration(),
                    getCompressionAlias());

            // If a compression is not specified and if passed stream does have compression capabilities...
            if (codec == null || CompressionOutputStream.class.isInstance(outputStream)) {
                // ...just return original stream untouched
                return outputStream;
            }

            // Eventually re-use Compressor from underlying CodecPool
            final Compressor compressor = CodecPool.getCompressor(codec);

            // Create compression stream wrapping passed stream
            outputStream = codec.createOutputStream(outputStream, compressor);

            return new Closeable() {

                @Override
                public void close() throws IOException {
                    resourceSeparatorInputStream = null;
                    IOUtils.closeStream(outputStream);
                    CodecPool.returnCompressor(compressor);
                }
            };
        }

        @Override
        protected void doWrite(Resource source) throws IOException {
            InputStream inputStream = null;
            try {
                writeSeparator();

                inputStream = source.getInputStream();

                // Write source to HDFS destination
                copyBytes(inputStream, outputStream, getConfiguration(), /* close */false);

            } finally {
                closeStream(inputStream);
            }
        }

        protected void writeSeparator() throws IOException {
            if (getResourceSeparator() == null) {
                return;
            }

            if (resourceSeparatorInputStream == null) {

                // First call inits 'resourceSeparatorInputStream' and does not write anything

                resourceSeparatorInputStream = toInputStream(getResourceSeparator(), "UTF-8");

                return;
            }

            resourceSeparatorInputStream.reset();

            // Write resource separator to HDFS destination
            copyBytes(resourceSeparatorInputStream, outputStream, getConfiguration(), /* close */false);
        }
    };
}

From source file:io.druid.segment.IndexMergerV9.java

@Override
protected File makeIndexFiles(final List<IndexableAdapter> adapters, final AggregatorFactory[] metricAggs,
        final File outDir, final ProgressIndicator progress, final List<String> mergedDimensions,
        final List<String> mergedMetrics,
        final Function<ArrayList<Iterable<Rowboat>>, Iterable<Rowboat>> rowMergerFn, final IndexSpec indexSpec)
        throws IOException {
    progress.start();/*from   w ww  .j  a v a2  s.  c  om*/
    progress.progress();

    List<Metadata> metadataList = Lists.transform(adapters, new Function<IndexableAdapter, Metadata>() {
        @Override
        public Metadata apply(IndexableAdapter input) {
            return input.getMetadata();
        }
    });

    Metadata segmentMetadata = null;
    if (metricAggs != null) {
        AggregatorFactory[] combiningMetricAggs = new AggregatorFactory[metricAggs.length];
        for (int i = 0; i < metricAggs.length; i++) {
            combiningMetricAggs[i] = metricAggs[i].getCombiningFactory();
        }
        segmentMetadata = Metadata.merge(metadataList, combiningMetricAggs);
    } else {
        segmentMetadata = Metadata.merge(metadataList, null);
    }

    Closer closer = Closer.create();
    final IOPeon ioPeon = new TmpFileIOPeon(false);
    closer.register(new Closeable() {
        @Override
        public void close() throws IOException {
            ioPeon.cleanup();
        }
    });
    final FileSmoosher v9Smoosher = new FileSmoosher(outDir);
    final File v9TmpDir = new File(outDir, "v9-tmp");
    v9TmpDir.mkdirs();
    closer.register(new Closeable() {
        @Override
        public void close() throws IOException {
            FileUtils.deleteDirectory(v9TmpDir);
        }
    });
    log.info("Start making v9 index files, outDir:%s", outDir);
    try {
        long startTime = System.currentTimeMillis();
        ByteStreams.write(Ints.toByteArray(IndexIO.V9_VERSION),
                Files.newOutputStreamSupplier(new File(outDir, "version.bin")));
        log.info("Completed version.bin in %,d millis.", System.currentTimeMillis() - startTime);

        progress.progress();
        final Map<String, ValueType> metricsValueTypes = Maps
                .newTreeMap(Ordering.<String>natural().nullsFirst());
        final Map<String, String> metricTypeNames = Maps.newTreeMap(Ordering.<String>natural().nullsFirst());
        final List<ColumnCapabilitiesImpl> dimCapabilities = Lists
                .newArrayListWithCapacity(mergedDimensions.size());
        mergeCapabilities(adapters, mergedDimensions, metricsValueTypes, metricTypeNames, dimCapabilities);

        final DimensionHandler[] handlers = makeDimensionHandlers(mergedDimensions, dimCapabilities);
        final List<DimensionMerger> mergers = new ArrayList<>();
        for (int i = 0; i < mergedDimensions.size(); i++) {
            mergers.add(handlers[i].makeMerger(indexSpec, v9TmpDir, ioPeon, dimCapabilities.get(i), progress));
        }

        /************* Setup Dim Conversions **************/
        progress.progress();
        startTime = System.currentTimeMillis();
        final ArrayList<Map<String, IntBuffer>> dimConversions = Lists
                .newArrayListWithCapacity(adapters.size());
        final ArrayList<Boolean> dimensionSkipFlag = Lists.newArrayListWithCapacity(mergedDimensions.size());
        final ArrayList<Boolean> convertMissingDimsFlags = Lists
                .newArrayListWithCapacity(mergedDimensions.size());
        writeDimValueAndSetupDimConversion(adapters, progress, mergedDimensions, mergers);
        log.info("Completed dim conversions in %,d millis.", System.currentTimeMillis() - startTime);

        /************* Walk through data sets, merge them, and write merged columns *************/
        progress.progress();
        final Iterable<Rowboat> theRows = makeRowIterable(adapters, mergedDimensions, mergedMetrics,
                rowMergerFn, dimCapabilities, handlers, mergers);
        final LongColumnSerializer timeWriter = setupTimeWriter(ioPeon, indexSpec);
        final ArrayList<GenericColumnSerializer> metWriters = setupMetricsWriters(ioPeon, mergedMetrics,
                metricsValueTypes, metricTypeNames, indexSpec);
        final List<IntBuffer> rowNumConversions = Lists.newArrayListWithCapacity(adapters.size());

        mergeIndexesAndWriteColumns(adapters, progress, theRows, timeWriter, metWriters, rowNumConversions,
                mergers);

        /************ Create Inverted Indexes and Finalize Build Columns *************/
        final String section = "build inverted index and columns";
        progress.startSection(section);
        makeTimeColumn(v9Smoosher, progress, timeWriter);
        makeMetricsColumns(v9Smoosher, progress, mergedMetrics, metricsValueTypes, metricTypeNames, metWriters);

        for (int i = 0; i < mergedDimensions.size(); i++) {
            DimensionMergerV9 merger = (DimensionMergerV9) mergers.get(i);
            merger.writeIndexes(rowNumConversions, closer);
            if (merger.canSkip()) {
                continue;
            }
            ColumnDescriptor columnDesc = merger.makeColumnDescriptor();
            makeColumn(v9Smoosher, mergedDimensions.get(i), columnDesc);
        }

        progress.stopSection(section);

        /************* Make index.drd & metadata.drd files **************/
        progress.progress();
        makeIndexBinary(v9Smoosher, adapters, outDir, mergedDimensions, mergedMetrics, progress, indexSpec,
                mergers);
        makeMetadataBinary(v9Smoosher, progress, segmentMetadata);

        v9Smoosher.close();
        progress.stop();

        return outDir;
    } catch (Throwable t) {
        throw closer.rethrow(t);
    } finally {
        closer.close();
    }
}

From source file:com.opengamma.component.tool.ToolContextUtils.java

private static ToolContext createToolContextByHttp(String configResourceLocation,
        Class<? extends ToolContext> toolContextClazz, List<String> classifierChain) {
    configResourceLocation = StringUtils.stripEnd(configResourceLocation, "/");
    if (configResourceLocation.endsWith("/jax") == false) {
        configResourceLocation += "/jax";
    }/*from  w w  w. j ava 2s  . c  om*/

    // Get the remote component server using the supplied URI
    RemoteComponentServer remoteComponentServer = new RemoteComponentServer(URI.create(configResourceLocation));
    ComponentServer componentServer = remoteComponentServer.getComponentServer();

    // Attempt to build a tool context of the specified type
    ToolContext toolContext;
    try {
        toolContext = toolContextClazz.newInstance();
    } catch (Throwable t) {
        return null;
    }

    // Populate the tool context from the remote component server
    for (MetaProperty<?> metaProperty : toolContext.metaBean().metaPropertyIterable()) {
        if (!metaProperty.name().equals("contextManager")) {
            try {
                ComponentInfo componentInfo = getComponentInfo(componentServer, classifierChain,
                        metaProperty.propertyType());
                if (componentInfo == null) {
                    s_logger.warn("Unable to populate tool context '" + metaProperty.name()
                            + "', no appropriate component found on the server");
                    continue;
                }
                if (ViewProcessor.class.equals(componentInfo.getType())) {
                    final JmsConnector jmsConnector = createJmsConnector(componentInfo);
                    final ScheduledExecutorService scheduler = Executors
                            .newSingleThreadScheduledExecutor(new NamedThreadFactory("rvp"));
                    ViewProcessor vp = new RemoteViewProcessor(componentInfo.getUri(), jmsConnector, scheduler);
                    toolContext.setViewProcessor(vp);
                    toolContext.setContextManager(new Closeable() {
                        @Override
                        public void close() throws IOException {
                            scheduler.shutdownNow();
                            jmsConnector.close();
                        }
                    });
                } else {
                    String clazzName = componentInfo.getAttribute(ComponentInfoAttributes.REMOTE_CLIENT_JAVA);
                    if (clazzName == null) {
                        s_logger.warn("Unable to populate tool context '" + metaProperty.name()
                                + "', no remote access class found");
                        continue;
                    }
                    Class<?> clazz = Class.forName(clazzName);
                    metaProperty.set(toolContext,
                            clazz.getConstructor(URI.class).newInstance(componentInfo.getUri()));
                    s_logger.info("Populated tool context '" + metaProperty.name() + "' with "
                            + metaProperty.get(toolContext));
                }
            } catch (Throwable ex) {
                s_logger.warn(
                        "Unable to populate tool context '" + metaProperty.name() + "': " + ex.getMessage());
            }
        }
    }
    return toolContext;
}

From source file:org.jclouds.http.apachehc.config.ApacheHCHttpCommandExecutorServiceModule.java

@Singleton
@Provides/*from   ww  w. j a  v a  2  s.co m*/
final ClientConnectionManager newClientConnectionManager(HttpParams params, X509HostnameVerifier verifier,
        SSLContext context, Closer closer) throws NoSuchAlgorithmException, KeyManagementException {

    SchemeRegistry schemeRegistry = new SchemeRegistry();
    schemeRegistry.register(new Scheme("http", PlainSocketFactory.getSocketFactory(), 80));

    SSLSocketFactory sf = new SSLSocketFactory(context);
    sf.setHostnameVerifier(verifier);
    schemeRegistry.register(new Scheme("https", sf, 443));

    final ClientConnectionManager cm = new ThreadSafeClientConnManager(params, schemeRegistry);
    closer.addToClose(new Closeable() {
        @Override
        public void close() throws IOException {
            cm.shutdown();
        }
    });
    return cm;
}

From source file:org.opencms.lock.CmsLockUtil.java

/**
 * Utility method for locking and unlocking a set of resources conveniently with the try-with syntax
 * from Java 1.7.<p>//from ww w .  j  a  v a 2  s  . c  o  m
 *
 * This method locks a set of resources and returns a Closeable instance that will unlock the locked resources
 * when its close() method is called.
 *
 * @param cms the CMS context
 * @param resources the resources to lock
 *
 * @return the Closeable used to unlock the resources
 * @throws Exception if something goes wrong
 */
public static Closeable withLockedResources(final CmsObject cms, CmsResource... resources) throws Exception {

    final Map<CmsResource, CmsLockActionRecord> lockMap = Maps.newHashMap();
    Closeable result = new Closeable() {

        @SuppressWarnings("synthetic-access")
        public void close() {

            for (Map.Entry<CmsResource, CmsLockActionRecord> entry : lockMap.entrySet()) {
                if (entry.getValue().getChange() == LockChange.locked) {
                    CmsResource resourceToUnlock = entry.getKey();
                    // the resource may have been moved, so we read it again to get the correct path
                    try {
                        resourceToUnlock = cms.readResource(entry.getKey().getStructureId(),
                                CmsResourceFilter.ALL);
                    } catch (CmsException e) {
                        LOG.error(e.getLocalizedMessage(), e);
                    }
                    try {
                        cms.unlockResource(resourceToUnlock);
                    } catch (CmsException e) {
                        LOG.warn(e.getLocalizedMessage(), e);
                    }
                }

            }
        }
    };
    try {
        for (CmsResource resource : resources) {
            CmsLockActionRecord record = ensureLock(cms, resource);
            lockMap.put(resource, record);
        }
    } catch (CmsException e) {
        result.close();
        throw e;
    }
    return result;
}

From source file:org.wisdom.maven.utils.WebJars.java

/**
 * Checks whether the given file is a WebJar or not (http://www.webjars.org/documentation).
 * The check is based on the presence of {@literal META-INF/resources/webjars/} directory in the jar file.
 *
 * @param file the file./*w w  w. ja v  a  2  s.  c o m*/
 * @return {@literal true} if it's a bundle, {@literal false} otherwise.
 */
public static boolean isWebJar(File file) {
    Set<String> found = new LinkedHashSet<>();
    if (file.isFile() && file.getName().endsWith(".jar")) {
        JarFile jar = null;
        try {
            jar = new JarFile(file);

            // Fast return if the base structure is not there
            if (jar.getEntry(WEBJAR_LOCATION) == null) {
                return false;
            }

            Enumeration<JarEntry> entries = jar.entries();
            while (entries.hasMoreElements()) {
                JarEntry entry = entries.nextElement();
                Matcher matcher = WEBJAR_REGEX.matcher(entry.getName());
                if (matcher.matches()) {
                    found.add(matcher.group(1) + "-" + matcher.group(2));
                }
            }
        } catch (IOException e) {
            LoggerFactory.getLogger(DependencyCopy.class)
                    .error("Cannot check if the file {} is a webjar, " + "cannot open it", file.getName(), e);
            return false;
        } finally {
            final JarFile finalJar = jar;
            IOUtils.closeQuietly(new Closeable() {
                @Override
                public void close() throws IOException {
                    if (finalJar != null) {
                        finalJar.close();
                    }
                }
            });
        }

        for (String lib : found) {
            LoggerFactory.getLogger(DependencyCopy.class).info("Web Library found in {} : {}", file.getName(),
                    lib);
        }

        return !found.isEmpty();
    }

    return false;
}

From source file:org.wisdom.resources.WebJarDeployer.java

/**
 * Checks whether the given file is a WebJar or not (http://www.webjars.org/documentation).
 * The check is based on the presence of {@literal META-INF/resources/webjars/} directory in the jar file.
 *
 * @param file the file.//from  w  ww . java  2 s . c om
 * @return the set of libraries found in the file, {@code null} if none.
 */
public static Set<DetectedWebJar> isWebJar(File file) {
    Set<DetectedWebJar> found = new LinkedHashSet<>();
    if (file.isFile() && file.getName().endsWith(".jar")) {
        JarFile jar = null;
        try {
            jar = new JarFile(file);

            // Fast return if the base structure is not there
            if (jar.getEntry(WebJarController.WEBJAR_LOCATION) == null) {
                return null;
            }

            Enumeration<JarEntry> entries = jar.entries();
            while (entries.hasMoreElements()) {
                JarEntry entry = entries.nextElement();
                Matcher matcher = WebJarController.WEBJAR_REGEX.matcher(entry.getName());
                if (matcher.matches()) {
                    found.add(new DetectedWebJar(matcher.group(1), matcher.group(2), entry.getName(), file));
                }
            }
        } catch (IOException e) {
            LOGGER.error("Cannot check if the file {} is a webjar, " + "cannot open it", file.getName(), e);
            return null;
        } finally {
            final JarFile finalJar = jar;
            IOUtils.closeQuietly(new Closeable() {
                @Override
                public void close() throws IOException {
                    if (finalJar != null) {
                        finalJar.close();
                    }
                }
            });
        }

        for (DetectedWebJar lib : found) {
            LOGGER.info("Web Library found in {} : {}", file.getName(), lib.id);
        }

        return found;
    }

    return null;
}

From source file:com.puppycrawl.tools.checkstyle.utils.CommonUtilsTest.java

@Test(expected = IllegalStateException.class)
public void testCloseWithException() {
    CommonUtils.close(new Closeable() {

        @Override/*from   w  ww  .  jav a 2s. co m*/
        public void close() throws IOException {
            throw new IOException("Test IOException");
        }
    });
}

From source file:io.fabric8.maven.core.service.PortForwardService.java

/**
 * Forwards a port to the newest pod matching the given selector.
 * If another pod is created, it forwards connections to the new pod once it's ready.
 *///  www .  j  av  a2 s.  co m
public Closeable forwardPortAsync(final Logger externalProcessLogger, final LabelSelector podSelector,
        final int remotePort, final int localPort) throws Fabric8ServiceException {

    final Lock monitor = new ReentrantLock(true);
    final Condition podChanged = monitor.newCondition();
    final Pod[] nextForwardedPod = new Pod[1];

    final Thread forwarderThread = new Thread() {
        @Override
        public void run() {

            Pod currentPod = null;
            Closeable currentPortForward = null;

            try {
                monitor.lock();

                while (true) {
                    if (podEquals(currentPod, nextForwardedPod[0])) {
                        podChanged.await();
                    } else {
                        Pod nextPod = nextForwardedPod[0]; // may be null
                        try {
                            monitor.unlock();
                            // out of critical section

                            if (currentPortForward != null) {
                                log.info("Closing port-forward from pod %s",
                                        KubernetesHelper.getName(currentPod));
                                currentPortForward.close();
                                currentPortForward = null;
                            }

                            if (nextPod != null) {
                                log.info("Starting port-forward to pod %s", KubernetesHelper.getName(nextPod));
                                currentPortForward = forwardPortAsync(externalProcessLogger,
                                        KubernetesHelper.getName(nextPod), remotePort, localPort);
                            } else {
                                log.info("Waiting for a pod to become ready before starting port-forward");
                            }
                            currentPod = nextPod;
                        } finally {
                            monitor.lock();
                        }
                    }

                }

            } catch (InterruptedException e) {
                log.debug("Port-forwarding thread interrupted", e);
                Thread.currentThread().interrupt();
            } catch (Exception e) {
                log.warn("Error while port-forwarding to pod", e);
            } finally {
                monitor.unlock();

                if (currentPortForward != null) {
                    try {
                        currentPortForward.close();
                    } catch (Exception e) {
                    }
                }
            }
        }
    };

    // Switching forward to the current pod if present
    Pod newPod = getNewestPod(podSelector);
    nextForwardedPod[0] = newPod;

    final Watch watch = KubernetesClientUtil.withSelector(kubernetes.pods(), podSelector, log)
            .watch(new Watcher<Pod>() {

                @Override
                public void eventReceived(Action action, Pod pod) {
                    monitor.lock();
                    try {
                        List<Pod> candidatePods;
                        if (nextForwardedPod[0] != null) {
                            candidatePods = new LinkedList<>();
                            candidatePods.add(nextForwardedPod[0]);
                            candidatePods.add(pod);
                        } else {
                            candidatePods = Collections.singletonList(pod);
                        }
                        Pod newPod = getNewestPod(candidatePods); // may be null
                        if (!podEquals(nextForwardedPod[0], newPod)) {
                            nextForwardedPod[0] = newPod;
                            podChanged.signal();
                        }
                    } finally {
                        monitor.unlock();
                    }
                }

                @Override
                public void onClose(KubernetesClientException e) {
                    // don't care
                }
            });

    forwarderThread.start();

    final Closeable handle = new Closeable() {
        @Override
        public void close() throws IOException {
            try {
                watch.close();
            } catch (Exception e) {
            }
            try {
                forwarderThread.interrupt();
                forwarderThread.join(15000);
            } catch (Exception e) {
            }
        }
    };
    Runtime.getRuntime().addShutdownHook(new Thread() {
        @Override
        public void run() {
            try {
                handle.close();
            } catch (Exception e) {
                // suppress
            }
        }
    });

    return handle;
}