Example usage for java.io PipedInputStream connect

List of usage examples for java.io PipedInputStream connect

Introduction

In this page you can find the example usage for java.io PipedInputStream connect.

Prototype

public void connect(PipedOutputStream src) throws IOException 

Source Link

Document

Causes this piped input stream to be connected to the piped output stream src.

Usage

From source file:Main.java

public static void main(String[] args) throws Exception {
    PipedOutputStream out = new PipedOutputStream();
    PipedInputStream in = new PipedInputStream();

    // connect input and output
    in.connect(out);

    // write something
    out.write(70);// w w  w  .j ava 2 s. co  m
    out.write(71);

    // read what we wrote
    for (int i = 0; i < 2; i++) {
        System.out.println("" + (char) in.read());
    }
    in.close();
}

From source file:Main.java

public static void main(String[] args) throws Exception {
    PipedOutputStream out = new PipedOutputStream();
    PipedInputStream in = new PipedInputStream();

    // connect input and output
    in.connect(out);

    // write something
    out.write(70);/*from w w w  .  j a va 2 s .  com*/
    out.write(71);

    // read what we wrote
    for (int i = 0; i < 2; i++) {
        System.out.println("" + (char) in.read());
    }
    in.close();

}

From source file:Main.java

public static void main(String[] args) throws Exception {

    PipedOutputStream out = new PipedOutputStream();
    PipedInputStream in = new PipedInputStream();

    // connect input and output
    in.connect(out);

    // write something
    out.write(70);/*  w  ww .j  a  v  a 2  s  . c  om*/
    out.write(71);

    // read what we wrote
    for (int i = 0; i < 2; i++) {
        System.out.println("" + (char) in.read());
    }
    in.close();
}

From source file:Main.java

public static void main(String[] args) throws Exception {

    PipedOutputStream out = new PipedOutputStream();
    PipedInputStream in = new PipedInputStream();

    // connect input and output
    in.connect(out);

    // write something
    out.write(70);//from  www .ja  va2 s . c  o m
    out.write(71);

    // read what we wrote into an array of bytes
    byte[] b = new byte[2];
    in.read(b, 0, 2);

    System.out.println(new String(b));
    in.close();
}

From source file:Main.java

public static void main(String[] args) throws Exception {

    PipedOutputStream out = new PipedOutputStream();
    PipedInputStream in = new PipedInputStream(200);

    // connect input and output
    in.connect(out);

    // write something
    out.write(70);//from ww w.j a va  2s .c  om
    out.write(71);

    // read what we wrote
    for (int i = 0; i < 2; i++) {
        System.out.println("" + (char) in.read());
    }
    in.close();
}

From source file:Main.java

public static void main(String[] args) throws Exception {

    PipedOutputStream out = new PipedOutputStream();
    PipedInputStream in = new PipedInputStream();

    // connect input and output
    in.connect(out);

    // write something
    out.write(70);/*from  w ww  .j  a  va  2  s  . c o  m*/
    out.write(71);

    // print how many bytes are available
    System.out.println(in.available());

    // read what we wrote
    for (int i = 0; i < 2; i++) {
        System.out.println("" + (char) in.read());
    }
    in.close();
}

From source file:com.gc.iotools.stream.os.OutputStreamToInputStream.java

/**
 * <p>//from   w  w w.  j a  v a  2s .c  o m
 * Creates a new <code>OutputStreamToInputStream</code>. It let the user
 * specify the thread instantiation service and what will happen upon the
 * invocation of <code>close()</code> method.
 * </p>
 * <p>
 * If <code>startImmediately</code> is <code>true</code> the internal
 * thread will start before the constructor completes. This is the best
 * way if you're doing anonymous subclassing. While if you do explicit
 * sublcassing you should set this parameter to false to allow the
 * constructor of the superclass to complete before the threads are
 * started.
 * </p>
 * If <code>joinOnClose</code> is <code>true</code> when the
 * <code>close()</code> method is invoked this class will wait for the
 * internal thread to terminate. </p>
 * <p>
 * It also let the user specify the size of the pipe buffer to allocate.
 * </p>
 * 
 * @since 1.2.13
 * @param startImmediately
 *            if <code>true</code> the internal thread will start
 *            immediately after this constructor completes.
 * @param joinOnClose
 *            if <code>true</code> the internal thread will be joined when
 *            close is invoked.
 * @param executorService
 *            Service for executing the internal thread.
 * @param pipeBufferSize
 *            The size of the pipe buffer to allocate.
 * @throws java.lang.IllegalStateException
 *             Exception thrown if pipe can't be created.
 */
public OutputStreamToInputStream(final boolean startImmediately, final boolean joinOnClose,
        final ExecutorService executorService, final int pipeBufferSize) {
    if (executorService == null) {
        throw new IllegalArgumentException("executor service can't be null");
    }
    final String callerId = LogUtils.getCaller(getClass());
    final PipedInputStream pipedIS = new MyPipedInputStream(pipeBufferSize);
    try {
        pipedIS.connect(this);
    } catch (final IOException e) {
        throw new IllegalStateException("Error during pipe creaton", e);
    }
    this.joinOnClose = joinOnClose;
    this.inputstream = pipedIS;
    this.executorService = executorService;
    LOG.debug("invoked by[{}] queued for start.", callerId);
    if (startImmediately) {
        initializeIfNecessary();
    }
}

From source file:lucee.commons.io.res.type.datasource.DatasourceResourceProvider.java

public synchronized OutputStream getOutputStream(ConnectionData data, int fullPathHash, int pathHash,
        String path, String name, boolean append) throws IOException {

    Attr attr = getAttr(data, fullPathHash, path, name);
    if (attr.getId() == 0) {
        create(data, fullPathHash, pathHash, path, name, Attr.TYPE_FILE);
        attr = getAttr(data, fullPathHash, path, name);
    }/*from   ww  w .  ja v  a  2s. co  m*/

    PipedInputStream pis = new PipedInputStream();
    PipedOutputStream pos = new PipedOutputStream();
    pis.connect(pos);
    DatasourceConnection dc = null;
    //Connection c=null;
    try {
        dc = getDatasourceConnection(data);
        //Connection c = dc.getConnection();

        DataWriter writer = new DataWriter(getCore(data), dc, data.getPrefix(), attr, pis, this, append);
        writer.start();

        return new DatasourceResourceOutputStream(writer, pos);
        //core.getOutputStream(dc, name, attr, pis);
    } catch (PageException e) {
        throw new PageRuntimeException(e);
    } finally {
        removeFromCache(data, path, name);
        //manager.releaseConnection(CONNECTION_ID,dc);
    }
}

From source file:com.streamsets.datacollector.bundles.SupportBundleManager.java

/**
 * Return InputStream from which a new generated resource bundle can be retrieved.
 *///from w w w. j  a va 2 s  .  c o m
public SupportBundle generateNewBundleFromInstances(List<BundleContentGenerator> generators,
        BundleType bundleType) throws IOException {
    PipedInputStream inputStream = new PipedInputStream();
    PipedOutputStream outputStream = new PipedOutputStream();
    inputStream.connect(outputStream);
    ZipOutputStream zipOutputStream = new ZipOutputStream(outputStream);

    executor.submit(() -> generateNewBundleInternal(generators, bundleType, zipOutputStream));

    String bundleName = generateBundleName(bundleType);
    String bundleKey = generateBundleDate(bundleType) + "/" + bundleName;

    return new SupportBundle(bundleKey, bundleName, inputStream);
}

From source file:freenet.client.ArchiveManager.java

/**
 * Extract data to cache. Call synchronized on ctx.
 * @param key The key the data was fetched from.
 * @param archiveType The archive type. Must be Metadata.ARCHIVE_ZIP | Metadata.ARCHIVE_TAR.
 * @param data The actual data fetched./*from   ww w . jav a 2s .  c  o m*/
 * @param archiveContext The context for the whole fetch process.
 * @param ctx The ArchiveStoreContext for this key.
 * @param element A particular element that the caller is especially interested in, or null.
 * @param callback A callback to be called if we find that element, or if we don't.
 * @throws ArchiveFailureException If we could not extract the data, or it was too big, etc.
 * @throws ArchiveRestartException
 * @throws ArchiveRestartException If the request needs to be restarted because the archive
 * changed.
 */
public void extractToCache(FreenetURI key, ARCHIVE_TYPE archiveType, COMPRESSOR_TYPE ctype, final Bucket data,
        ArchiveContext archiveContext, ArchiveStoreContext ctx, String element, ArchiveExtractCallback callback,
        ClientContext context) throws ArchiveFailureException, ArchiveRestartException {
    logMINOR = Logger.shouldLog(LogLevel.MINOR, this);

    MutableBoolean gotElement = element != null ? new MutableBoolean() : null;

    if (logMINOR)
        Logger.minor(this, "Extracting " + key);
    ctx.removeAllCachedItems(this); // flush cache anyway
    final long expectedSize = ctx.getLastSize();
    final long archiveSize = data.size();
    /** Set if we need to throw a RestartedException rather than returning success,
     * after we have unpacked everything.
     */
    boolean throwAtExit = false;
    if ((expectedSize != -1) && (archiveSize != expectedSize)) {
        throwAtExit = true;
        ctx.setLastSize(archiveSize);
    }
    byte[] expectedHash = ctx.getLastHash();
    if (expectedHash != null) {
        byte[] realHash;
        try {
            realHash = BucketTools.hash(data);
        } catch (IOException e) {
            throw new ArchiveFailureException("Error reading archive data: " + e, e);
        }
        if (!Arrays.equals(realHash, expectedHash))
            throwAtExit = true;
        ctx.setLastHash(realHash);
    }

    if (archiveSize > archiveContext.maxArchiveSize)
        throw new ArchiveFailureException(
                "Archive too big (" + archiveSize + " > " + archiveContext.maxArchiveSize + ")!");
    else if (archiveSize <= 0)
        throw new ArchiveFailureException("Archive too small! (" + archiveSize + ')');
    else if (logMINOR)
        Logger.minor(this, "Container size (possibly compressed): " + archiveSize + " for " + data);

    InputStream is = null;
    try {
        final ExceptionWrapper wrapper;
        if ((ctype == null) || (ARCHIVE_TYPE.ZIP == archiveType)) {
            if (logMINOR)
                Logger.minor(this, "No compression");
            is = data.getInputStream();
            wrapper = null;
        } else if (ctype == COMPRESSOR_TYPE.BZIP2) {
            if (logMINOR)
                Logger.minor(this, "dealing with BZIP2");
            is = new BZip2CompressorInputStream(data.getInputStream());
            wrapper = null;
        } else if (ctype == COMPRESSOR_TYPE.GZIP) {
            if (logMINOR)
                Logger.minor(this, "dealing with GZIP");
            is = new GZIPInputStream(data.getInputStream());
            wrapper = null;
        } else if (ctype == COMPRESSOR_TYPE.LZMA_NEW) {
            // LZMA internally uses pipe streams, so we may as well do it here.
            // In fact we need to for LZMA_NEW, because of the properties bytes.
            PipedInputStream pis = new PipedInputStream();
            PipedOutputStream pos = new PipedOutputStream();
            pis.connect(pos);
            final OutputStream os = new BufferedOutputStream(pos);
            wrapper = new ExceptionWrapper();
            context.mainExecutor.execute(new Runnable() {

                @Override
                public void run() {
                    InputStream is = null;
                    try {
                        Compressor.COMPRESSOR_TYPE.LZMA_NEW.decompress(is = data.getInputStream(), os,
                                data.size(), expectedSize);
                    } catch (CompressionOutputSizeException e) {
                        Logger.error(this, "Failed to decompress archive: " + e, e);
                        wrapper.set(e);
                    } catch (IOException e) {
                        Logger.error(this, "Failed to decompress archive: " + e, e);
                        wrapper.set(e);
                    } finally {
                        try {
                            os.close();
                        } catch (IOException e) {
                            Logger.error(this, "Failed to close PipedOutputStream: " + e, e);
                        }
                        Closer.close(is);
                    }
                }

            });
            is = pis;
        } else if (ctype == COMPRESSOR_TYPE.LZMA) {
            if (logMINOR)
                Logger.minor(this, "dealing with LZMA");
            is = new LzmaInputStream(data.getInputStream());
            wrapper = null;
        } else {
            wrapper = null;
        }

        if (ARCHIVE_TYPE.ZIP == archiveType)
            handleZIPArchive(ctx, key, is, element, callback, gotElement, throwAtExit, context);
        else if (ARCHIVE_TYPE.TAR == archiveType)
            handleTARArchive(ctx, key, is, element, callback, gotElement, throwAtExit, context);
        else
            throw new ArchiveFailureException("Unknown or unsupported archive algorithm " + archiveType);
        if (wrapper != null) {
            Exception e = wrapper.get();
            if (e != null)
                throw new ArchiveFailureException("An exception occured decompressing: " + e.getMessage(), e);
        }
    } catch (IOException ioe) {
        throw new ArchiveFailureException("An IOE occured: " + ioe.getMessage(), ioe);
    } finally {
        Closer.close(is);
    }
}