Example usage for java.io PipedInputStream PipedInputStream

List of usage examples for java.io PipedInputStream PipedInputStream

Introduction

In this page you can find the example usage for java.io PipedInputStream PipedInputStream.

Prototype

public PipedInputStream(int pipeSize) 

Source Link

Document

Creates a PipedInputStream so that it is not yet #connect(java.io.PipedOutputStream) connected and uses the specified pipe size for the pipe's buffer.

Usage

From source file:com.emc.ecs.sync.CasMigrationTest.java

@Test
public void testCASSingleObject() throws Exception {
    FPPool sourcePool = new FPPool(connectString1);
    FPPool targetPool = new FPPool(connectString2);

    // create clip in source (<=1MB blob size) - capture summary for comparison
    StringWriter sourceSummary = new StringWriter();
    List<String> clipIds = createTestClips(sourcePool, 1048576, 1, sourceSummary);
    String clipID = clipIds.iterator().next();

    // open clip in source
    FPClip clip = new FPClip(sourcePool, clipID, FPLibraryConstants.FP_OPEN_FLAT);

    // buffer CDF
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    clip.RawRead(baos);//ww w.  j av a 2 s .  com

    // write CDF to target
    FPClip targetClip = new FPClip(targetPool, clipID, new ByteArrayInputStream(baos.toByteArray()),
            CLIP_OPTIONS);

    // migrate blobs
    FPTag tag, targetTag;
    int tagCount = 0;
    while ((tag = clip.FetchNext()) != null) {
        targetTag = targetClip.FetchNext();
        Assert.assertEquals("Tag names don't match", tag.getTagName(), targetTag.getTagName());
        Assert.assertTrue("Tag " + tag.getTagName() + " attributes not equal",
                Arrays.equals(tag.getAttributes(), targetTag.getAttributes()));

        int blobStatus = tag.BlobExists();
        if (blobStatus == 1) {
            PipedInputStream pin = new PipedInputStream(BUFFER_SIZE);
            PipedOutputStream pout = new PipedOutputStream(pin);
            BlobReader reader = new BlobReader(tag, pout);

            // start reading in parallel
            Thread readThread = new Thread(reader);
            readThread.start();

            // write inside this thread
            targetTag.BlobWrite(pin);

            readThread.join(); // this shouldn't do anything, but just in case

            if (!reader.isSuccess())
                throw new Exception("blob read failed", reader.getError());
        } else {
            if (blobStatus != -1)
                System.out.println("blob unavailable, clipId=" + clipID + ", tagNum=" + tagCount
                        + ", blobStatus=" + blobStatus);
        }
        tag.Close();
        targetTag.Close();
        tagCount++;
    }

    clip.Close();

    Assert.assertEquals("clip IDs not equal", clipID, targetClip.Write());
    targetClip.Close();

    // check target blob data
    targetClip = new FPClip(targetPool, clipID, FPLibraryConstants.FP_OPEN_FLAT);
    Assert.assertEquals("content mismatch", sourceSummary.toString(), summarizeClip(targetClip));
    targetClip.Close();

    // delete in source and target
    FPClip.Delete(sourcePool, clipID);
    FPClip.Delete(targetPool, clipID);
}

From source file:edu.umn.msi.tropix.common.io.impl.AsyncStreamCopierImplTest.java

@Test(groups = "unit", timeOut = 1000, invocationCount = 10)
public void close() throws IOException, InterruptedException {
    final AsyncStreamCopierImpl copier = new AsyncStreamCopierImpl();
    final Reference<Thread> threadReference = new Reference<Thread>();
    final Reference<Throwable> throwableReference = new Reference<Throwable>();
    copier.setExecutor(new Executor() {
        public void execute(final Runnable runnable) {
            final Thread thread = new Thread(runnable);
            threadReference.set(thread);
            thread.start();/* ww  w . ja  v a  2  s. c  o  m*/
            thread.setUncaughtExceptionHandler(new UncaughtExceptionHandler() {
                public void uncaughtException(final Thread arg0, final Throwable throwable) {
                    throwableReference.set(throwable);
                }
            });
        }
    });
    final PipedOutputStream pipedOutputStream = new PipedOutputStream();
    final PipedInputStream pipedInputStream = new PipedInputStream(pipedOutputStream);
    final ByteArrayOutputStream copiedStream = new ByteArrayOutputStream();
    copier.copy(pipedInputStream, copiedStream, true);
    Thread.sleep(3);
    assert new String(copiedStream.toByteArray()).equals("");
    pipedOutputStream.write("Hello ".getBytes());
    pipedOutputStream.flush();
    while (!new String(copiedStream.toByteArray()).equals("Hello ")) {
        Thread.sleep(1);
    }
    pipedOutputStream.write("World!".getBytes());
    pipedOutputStream.flush();
    while (!new String(copiedStream.toByteArray()).equals("Hello World!")) {
        Thread.sleep(1);
    }
    assert threadReference.get().isAlive();
    pipedOutputStream.close();
    while (threadReference.get().isAlive()) {
        Thread.sleep(1);
    }
    assert throwableReference.get() == null;
}

From source file:edu.isi.wings.portal.classes.StorageHandler.java

private static void streamDirectory(File directory, OutputStream os) {
    try {//w  w w . j a va  2s  .  c  o m
        // Start the ZipStream reader. Whatever is read is streamed to response
        PipedInputStream pis = new PipedInputStream(2048);
        ZipStreamer pipestreamer = new ZipStreamer(pis, os);
        pipestreamer.start();

        // Start Zipping folder and piping to the ZipStream reader
        PipedOutputStream pos = new PipedOutputStream(pis);
        ZipOutputStream zos = new ZipOutputStream(pos);
        StorageHandler.zipAndStream(directory, zos, directory.getName() + "/");
        zos.flush();
        zos.close();
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:org.jumpmind.symmetric.transport.internal.InternalTransportManager.java

public IOutgoingWithResponseTransport getPushTransport(final Node targetNode, final Node sourceNode,
        String securityToken, String channelId, String registrationUrl) throws IOException {
    final PipedOutputStream pushOs = new PipedOutputStream();
    final PipedInputStream pushIs = new PipedInputStream(pushOs);

    final PipedOutputStream respOs = new PipedOutputStream();
    final PipedInputStream respIs = new PipedInputStream(respOs);

    runAtClient(targetNode.getSyncUrl(), pushIs, respOs, new IClientRunnable() {
        public void run(ISymmetricEngine engine, InputStream is, OutputStream os) throws Exception {
            // This should be basically what the push servlet does ...
            engine.getDataLoaderService().loadDataFromPush(sourceNode, pushIs, respOs);
        }//  w w w.java2  s.  c  o  m
    });
    return new InternalOutgoingWithResponseTransport(pushOs, respIs);
}

From source file:com.github.chenxiaolong.dualbootpatcher.switcher.MbtoolTaskOutputFragment.java

@Override
public void onStart() {
    super.onStart();

    // Create terminal
    mSession = new TermSession();
    // We don't care about any input because this is kind of a "dumb" terminal output, not
    // a proper interactive one
    mSession.setTermOut(new NullOutputStream());

    mOS = new PipedOutputStream();
    try {//from   w  ww .  ja v  a2s  .  c om
        mSession.setTermIn(new PipedInputStream(mOS));
    } catch (IOException e) {
        throw new IllegalStateException("Failed to set terminal input stream to pipe", e);
    }

    mEmulatorView.attachSession(mSession);

    // Start and bind to the service
    Intent intent = new Intent(getActivity(), SwitcherService.class);
    getActivity().bindService(intent, this, Context.BIND_AUTO_CREATE);
    getActivity().startService(intent);
}

From source file:com.emc.ecs.sync.storage.CasStorageTest.java

@Test
public void testCasSingleObject() throws Exception {
    FPPool sourcePool = new FPPool(connectString1);
    FPPool targetPool = new FPPool(connectString2);

    try {// w w w.  jav  a2s . c om
        // create clip in source (<=1MB blob size) - capture summary for comparison
        StringWriter sourceSummary = new StringWriter();
        List<String> clipIds = createTestClips(sourcePool, 1048576, 1, sourceSummary);
        String clipID = clipIds.iterator().next();

        // open clip in source
        FPClip clip = new FPClip(sourcePool, clipID, FPLibraryConstants.FP_OPEN_FLAT);

        // buffer CDF
        ByteArrayOutputStream baos = new ByteArrayOutputStream();
        clip.RawRead(baos);

        // write CDF to target
        FPClip targetClip = new FPClip(targetPool, clipID, new ByteArrayInputStream(baos.toByteArray()),
                CLIP_OPTIONS);

        // migrate blobs
        FPTag tag, targetTag;
        int tagCount = 0;
        while ((tag = clip.FetchNext()) != null) {
            targetTag = targetClip.FetchNext();
            Assert.assertEquals("Tag names don't match", tag.getTagName(), targetTag.getTagName());
            Assert.assertTrue("Tag " + tag.getTagName() + " attributes not equal",
                    Arrays.equals(tag.getAttributes(), targetTag.getAttributes()));

            int blobStatus = tag.BlobExists();
            if (blobStatus == 1) {
                PipedInputStream pin = new PipedInputStream(BUFFER_SIZE);
                PipedOutputStream pout = new PipedOutputStream(pin);
                BlobReader reader = new BlobReader(tag, pout);

                // start reading in parallel
                Thread readThread = new Thread(reader);
                readThread.start();

                // write inside this thread
                targetTag.BlobWrite(pin);

                readThread.join(); // this shouldn't do anything, but just in case

                if (!reader.isSuccess())
                    throw new Exception("blob read failed", reader.getError());
            } else {
                if (blobStatus != -1)
                    System.out.println("blob unavailable, clipId=" + clipID + ", tagNum=" + tagCount
                            + ", blobStatus=" + blobStatus);
            }
            tag.Close();
            targetTag.Close();
            tagCount++;
        }

        clip.Close();

        Assert.assertEquals("clip IDs not equal", clipID, targetClip.Write());
        targetClip.Close();

        // check target blob data
        targetClip = new FPClip(targetPool, clipID, FPLibraryConstants.FP_OPEN_FLAT);
        Assert.assertEquals("content mismatch", sourceSummary.toString(), summarizeClip(targetClip));
        targetClip.Close();

        // delete in source and target
        FPClip.Delete(sourcePool, clipID);
        FPClip.Delete(targetPool, clipID);
    } finally {
        try {
            sourcePool.Close();
        } catch (Throwable t) {
            log.warn("failed to close source pool", t);
        }
        try {
            targetPool.Close();
        } catch (Throwable t) {
            log.warn("failed to close dest pool", t);
        }
    }
}

From source file:org.apache.zeppelin.spark.PySparkInterpreter.java

private void createGatewayServerAndStartScript() {
    // create python script
    createPythonScript();//from w w w .  j  av a2  s .  c  om

    port = findRandomOpenPortOnAllLocalInterfaces();

    gatewayServer = new GatewayServer(this, port);
    gatewayServer.start();

    // Run python shell
    CommandLine cmd = CommandLine.parse(getProperty("zeppelin.pyspark.python"));
    cmd.addArgument(scriptPath, false);
    cmd.addArgument(Integer.toString(port), false);
    cmd.addArgument(Integer.toString(getSparkInterpreter().getSparkVersion().toNumber()), false);
    executor = new DefaultExecutor();
    outputStream = new ByteArrayOutputStream();
    PipedOutputStream ps = new PipedOutputStream();
    in = null;
    try {
        in = new PipedInputStream(ps);
    } catch (IOException e1) {
        throw new InterpreterException(e1);
    }
    ins = new BufferedWriter(new OutputStreamWriter(ps));

    input = new ByteArrayOutputStream();

    PumpStreamHandler streamHandler = new PumpStreamHandler(outputStream, outputStream, in);
    executor.setStreamHandler(streamHandler);
    executor.setWatchdog(new ExecuteWatchdog(ExecuteWatchdog.INFINITE_TIMEOUT));

    try {
        Map env = EnvironmentUtils.getProcEnvironment();

        executor.execute(cmd, env, this);
        pythonscriptRunning = true;
    } catch (IOException e) {
        throw new InterpreterException(e);
    }

    try {
        input.write("import sys, getopt\n".getBytes());
        ins.flush();
    } catch (IOException e) {
        throw new InterpreterException(e);
    }
}

From source file:com.izforge.izpack.util.LogUtils.java

public static void loadConfiguration(final Properties configuration) throws IOException {
    if (OVERRIDE) {
        LogManager manager = LogManager.getLogManager();

        // Merge global logging properties
        InputStream baseResourceStream = null;
        try {//from ww w .j  av  a2  s .c o m
            baseResourceStream = LogUtils.class.getResourceAsStream(LOGGING_BASE_CONFIGURATION);
            final Properties baseProps = new Properties();
            baseProps.load(baseResourceStream);
            mergeLoggingConfiguration(configuration, baseProps);
        } finally {
            IOUtils.closeQuietly(baseResourceStream);
        }

        boolean mkdirs = false;
        String pattern = null;
        if (configuration.getProperty("handlers") != null
                && configuration.getProperty("handlers").contains(FILEHANDLER_CLASSNAME)
                && manager.getProperty("handlers").contains(FILEHANDLER_CLASSNAME)) {
            // IzPack maintains just one log file, don't override the existing handler type of it.
            // Special use case: Command line argument -logfile "wins" over the <log-file> tag.
            // Assumption at the moment for optimization: Just FileHandler is used for configurations from install.xml.
            return;
        }
        for (String key : configuration.stringPropertyNames()) {
            if (key.equals(FILEHANDLER_CLASSNAME + ".pattern")) {
                // Workaround for not normalized file paths, for example ${INSTALL_PATH}/../install_log/name.log
                // to get them working before creating ${INSTALL_PATH} in the
                // com.izforge.izpack.installer.unpacker.UnpackerBase.preUnpack phase
                // otherwise the FileHandler will fail when opening files already in constructor and not recover from that.
                pattern = FilenameUtils.normalize(configuration.getProperty(key));
                configuration.setProperty(key, pattern);
            } else if (key.equals(FILEHANDLER_CLASSNAME + ".mkdirs")) {
                // This key goes beyond the capabilities of java.util.logging.FileHandler
                mkdirs = Boolean.parseBoolean(configuration.getProperty(key));
                configuration.remove(key);
            }
        }
        if (mkdirs && pattern != null) {
            FileUtils.forceMkdirParent(new File(pattern));
        }

        // Merge user settings compiled in
        final Properties userProps = new Properties();
        InputStream userPropsStream = LogUtils.class
                .getResourceAsStream(ResourceManager.getInstallLoggingConfigurationResourceName());
        try {
            if (userPropsStream != null) {
                userProps.load(userPropsStream);
                for (String userPropName : userProps.stringPropertyNames()) {
                    if (userPropName.endsWith(".level") && !userPropName.startsWith(FILEHANDLER_CLASSNAME)) {
                        String level = userProps.getProperty(userPropName);
                        if (level != null) {
                            configuration.setProperty(userPropName, level);
                        }
                    }
                }
            }
        } finally {
            IOUtils.closeQuietly(userPropsStream);
        }

        InputStream defaultResourceStream = null;
        try {
            defaultResourceStream = LogUtils.class.getResourceAsStream(LOGGING_CONFIGURATION);
            final Properties defaultProps = new Properties();
            defaultProps.load(defaultResourceStream);
            mergeLoggingConfiguration(configuration, defaultProps);
        } finally {
            IOUtils.closeQuietly(defaultResourceStream);
        }

        if (Debug.isDEBUG()) {
            configuration.setProperty(FILEHANDLER_CLASSNAME + ".level", Level.FINE.toString());
            configuration.setProperty(ConsoleHandler.class.getName() + ".level", Level.FINE.toString());
        }

        // Set general log level which acts as filter in front of all handlers
        String fileLevelName = configuration.getProperty(FILEHANDLER_CLASSNAME + ".level",
                Level.ALL.toString());
        Level fileLevel = Level.ALL;
        if (fileLevelName != null) {
            fileLevel = Level.parse(fileLevelName);
        }

        String consoleLevelName = configuration.getProperty(CONSOLEHANDLER_CLASSNAME + ".level",
                Level.INFO.toString());
        Level consoleLevel = Level.INFO;
        if (consoleLevelName != null) {
            consoleLevel = Level.parse(consoleLevelName);
        }

        configuration.setProperty(".level",
                (fileLevel.intValue() < consoleLevel.intValue()) ? fileLevelName : consoleLevelName);

        final PipedOutputStream out = new PipedOutputStream();
        final PipedInputStream in = new PipedInputStream(out);
        try {
            new Thread(new Runnable() {
                public void run() {
                    try {
                        configuration.store(out, null);
                    } catch (IOException e) {
                        e.printStackTrace();
                    } finally {
                        IOUtils.closeQuietly(out);
                    }
                }
            }).start();

            manager.readConfiguration(in);
        } finally {
            IOUtils.closeQuietly(in);
        }
    }
}

From source file:org.jumpmind.symmetric.transport.internal.InternalTransportManager.java

public IOutgoingWithResponseTransport getFilePushTransport(final Node targetNode, final Node sourceNode,
        String securityToken, String registrationUrl) throws IOException {
    final PipedOutputStream pushOs = new PipedOutputStream();
    final PipedInputStream pushIs = new PipedInputStream(pushOs);

    final PipedOutputStream respOs = new PipedOutputStream();
    final PipedInputStream respIs = new PipedInputStream(respOs);

    runAtClient(targetNode.getSyncUrl(), pushIs, respOs, new IClientRunnable() {
        public void run(ISymmetricEngine engine, InputStream is, OutputStream os) throws Exception {
            // This should be basically what the push servlet does ...
            engine.getFileSyncService().loadFilesFromPush(sourceNode.getNodeId(), is, os);
        }//www.  j  a  v  a 2s. co m
    });
    return new InternalOutgoingWithResponseTransport(pushOs, respIs);
}

From source file:org.asynchttpclient.handler.BodyDeferringAsyncHandlerTest.java

@Test(groups = "standalone")
public void deferredInputStreamTrick()
        throws IOException, ExecutionException, TimeoutException, InterruptedException {
    try (AsyncHttpClient client = asyncHttpClient(getAsyncHttpClientConfig())) {
        BoundRequestBuilder r = client.prepareGet("http://localhost:" + port1 + "/deferredInputStreamTrick");

        PipedOutputStream pos = new PipedOutputStream();
        PipedInputStream pis = new PipedInputStream(pos);
        BodyDeferringAsyncHandler bdah = new BodyDeferringAsyncHandler(pos);

        Future<Response> f = r.execute(bdah);

        BodyDeferringInputStream is = new BodyDeferringInputStream(f, bdah, pis);

        Response resp = is.getAsapResponse();
        assertNotNull(resp);//  w  w  w  .ja v a2  s  .c o  m
        assertEquals(resp.getStatusCode(), HttpServletResponse.SC_OK);
        assertEquals(resp.getHeader("content-length"), String.valueOf(HALF_GIG));
        // "consume" the body, but our code needs input stream
        CountingOutputStream cos = new CountingOutputStream();
        try {
            copy(is, cos);
        } finally {
            is.close();
            cos.close();
        }

        // now we don't need to be polite, since consuming and closing
        // BodyDeferringInputStream does all.
        // it all should be here now
        assertEquals(cos.getByteCount(), HALF_GIG);
    }
}