Example usage for java.io PipedInputStream PipedInputStream

List of usage examples for java.io PipedInputStream PipedInputStream

Introduction

In this page you can find the example usage for java.io PipedInputStream PipedInputStream.

Prototype

public PipedInputStream(int pipeSize) 

Source Link

Document

Creates a PipedInputStream so that it is not yet #connect(java.io.PipedOutputStream) connected and uses the specified pipe size for the pipe's buffer.

Usage

From source file:org.lamport.tla.toolbox.jcloud.PayloadHelper.java

public static Payload appendModel2Jar(final Path modelPath, String mainClass, Properties properties,
        IProgressMonitor monitor) throws IOException {

    /*/*from  w  w  w .j a  v  a2s . com*/
     * Get the standard tla2tools.jar from the classpath as a blueprint.
     * It's located in the org.lamport.tla.toolbox.jclouds bundle in the
     * files/ directory. It uses OSGi functionality to read files/tla2tools.jar
     * from the .jclouds bundle.
     * The copy of the blueprint will contain the spec & model and 
     * additional metadata (properties, amended manifest).
     */
    final Bundle bundle = FrameworkUtil.getBundle(PayloadHelper.class);
    final URL toolsURL = bundle.getEntry("files/tla2tools.jar");
    if (toolsURL == null) {
        throw new RuntimeException("No tlatools.jar and/or spec to deploy");
    }

    /* 
     * Copy the tla2tools.jar blueprint to a temporary location on
     * disk to append model files below.
     */
    final File tempFile = File.createTempFile("tla2tools", ".jar");
    tempFile.deleteOnExit();
    try (FileOutputStream out = new FileOutputStream(tempFile)) {
        IOUtils.copy(toolsURL.openStream(), out);
    }

    /*
     * Create a virtual filesystem in jar format.
     */
    final Map<String, String> env = new HashMap<>();
    env.put("create", "true");
    final URI uri = URI.create("jar:" + tempFile.toURI());

    try (FileSystem fs = FileSystems.newFileSystem(uri, env)) {
        /*
         * Copy the spec and model into the jar's model/ folder.
         * Also copy any module override (.class file) into the jar.
         */
        try (DirectoryStream<Path> modelDirectoryStream = Files.newDirectoryStream(modelPath,
                "*.{cfg,tla,class}")) {
            for (final Path file : modelDirectoryStream) {
                final Path to = fs.getPath("/model/" + file.getFileName());
                Files.copy(file, to, StandardCopyOption.REPLACE_EXISTING);
            }
        }

        /*
         * Add given class as Main-Class statement to jar's manifest. This
         * causes Java to launch this class when no other Main class is 
         * given on the command line. Thus, it shortens the command line
         * for us.
         */
        final Path manifestPath = fs.getPath("/META-INF/", "MANIFEST.MF");
        final Manifest manifest = new Manifest(Files.newInputStream(manifestPath));
        manifest.getMainAttributes().put(Attributes.Name.MAIN_CLASS, mainClass);
        final PipedOutputStream ps = new PipedOutputStream();
        final PipedInputStream is = new PipedInputStream(ps);
        manifest.write(ps);
        ps.close();
        Files.copy(is, manifestPath, StandardCopyOption.REPLACE_EXISTING);

        /*
         * Add properties file to archive. The property file contains the
         * result email address... from where TLC eventually reads it.
         */

        // On Windows 7 and above the file has to be created in the system's
        // temp folder. Otherwise except file creation to fail with a
        // AccessDeniedException
        final File f = File.createTempFile("generated", "properties");
        OutputStream out = new FileOutputStream(f);
        // Append all entries in "properties" to the temp file f
        properties.store(out, "This is an optional header comment string");
        // Copy the temp file f into the jar with path /model/generated.properties.
        final Path to = fs.getPath("/model/generated.properties");
        Files.copy(f.toPath(), to, StandardCopyOption.REPLACE_EXISTING);
    } catch (final IOException e1) {
        throw new RuntimeException("No model directory found to deploy", e1);
    }

    /*
     * Compress archive with pack200 to achieve a much higher compression rate. We
     * are going to send the file on the wire after all:
     * 
     * effort: take more time choosing codings for better compression segment: use
     * largest-possible archive segments (>10% better compression) mod time: smear
     * modification times to a single value deflate: ignore all JAR deflation hints
     * in original archive
     */
    final Packer packer = Pack200.newPacker();
    final Map<String, String> p = packer.properties();
    p.put(Packer.EFFORT, "9");
    p.put(Packer.SEGMENT_LIMIT, "-1");
    p.put(Packer.MODIFICATION_TIME, Packer.LATEST);
    p.put(Packer.DEFLATE_HINT, Packer.FALSE);

    // Do not reorder which changes package names. Pkg name changes e.g. break
    // SimpleFilenameToStream.
    p.put(Packer.KEEP_FILE_ORDER, Packer.TRUE);

    // Throw an error if any of the above attributes is unrecognized.
    p.put(Packer.UNKNOWN_ATTRIBUTE, Packer.ERROR);

    final File packTempFile = File.createTempFile("tla2tools", ".pack.gz");
    try (final JarFile jarFile = new JarFile(tempFile);
            final GZIPOutputStream fos = new GZIPOutputStream(new FileOutputStream(packTempFile));) {
        packer.pack(jarFile, fos);
    } catch (IOException ioe) {
        throw new RuntimeException("Failed to pack200 the tla2tools.jar file", ioe);
    }

    /*
     * Convert the customized tla2tools.jar into a jClouds payload object. This is
     * the format it will be transfered on the wire. This is handled by jClouds
     * though.
     */
    Payload jarPayLoad = null;
    try {
        final InputStream openStream = new FileInputStream(packTempFile);
        jarPayLoad = Payloads.newInputStreamPayload(openStream);
        // manually set length of content to prevent a NPE bug
        jarPayLoad.getContentMetadata().setContentLength(Long.valueOf(openStream.available()));
    } catch (final IOException e1) {
        throw new RuntimeException("No tlatools.jar to deploy", e1);
    } finally {
        monitor.worked(5);
    }

    return jarPayLoad;
}

From source file:org.jumpmind.symmetric.transport.internal.InternalTransportManager.java

public IIncomingTransport getPullTransport(Node remote, final Node local, String securityToken,
        Map<String, String> requestProperties, String registrationUrl) throws IOException {
    final PipedOutputStream respOs = new PipedOutputStream();
    final PipedInputStream respIs = new PipedInputStream(respOs);

    final ChannelMap suspendIgnoreChannels = symmetricEngine.getConfigurationService()
            .getSuspendIgnoreChannelLists(remote.getNodeId());

    runAtClient(remote.getSyncUrl(), null, respOs, new IClientRunnable() {
        public void run(ISymmetricEngine engine, InputStream is, OutputStream os) throws Exception {
            IOutgoingTransport transport = new InternalOutgoingTransport(respOs, suspendIgnoreChannels,
                    IoConstants.ENCODING);
            ProcessInfo processInfo = engine.getStatisticManager().newProcessInfo(new ProcessInfoKey(
                    engine.getNodeService().findIdentityNodeId(), local.getNodeId(), ProcessType.PULL_HANDLER));
            try {
                engine.getDataExtractorService().extract(processInfo, local, transport);
                processInfo.setStatus(Status.OK);
            } catch (RuntimeException ex) {
                processInfo.setStatus(Status.ERROR);
                throw ex;
            }//from w  ww  .  ja  va  2s.c  o m
            transport.close();
        }
    });
    return new InternalIncomingTransport(respIs);
}

From source file:ro.kuberam.libs.java.crypto.CryptoModuleTests.java

public InputStream openStream() throws IOException {
    final PipedOutputStream out = new PipedOutputStream();
    final PipedInputStream in = new PipedInputStream(out);

    final Runnable exporter = () -> {
        try {//from  w ww. j a v  a  2  s .co  m
            out.write("message".getBytes(StandardCharsets.UTF_8));
        } catch (IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
        IOUtils.closeQuietly(out);
    };

    // executor.submit(exporter);

    return in;
}

From source file:at.sti2.sparkwave.ServerSocketThread.java

/**
 * TCP/IP Sparkwave Network Server/*  w w  w .  jav a  2 s .c om*/
 */
public void run() {

    try {

        //Open TCP/IP Server socket
        ServerSocket server = new ServerSocket(configuration.getPort());
        logger.info("Server: " + server);

        while (!Thread.interrupted()) {
            logger.info("Waiting for connection...");
            Socket sock = server.accept();
            logger.info("Connected: " + sock);

            //TODO Not every connection should cause a rebuild of the plugin chain. Should work with arbitrary many connections and failure resistent. re-use plugin threads and parser threads.

            InputStream socketStreamIn = sock.getInputStream();

            // PreProcessing Plugins to be loaded
            if (configuration.getPPPluginsConfig().size() == 2) {

                //TODO support arbitrary many plugins

                // Wiring: socketStreamIn --> (Plugin1) --> PipeOut1 --> PipeIn1
                final PipedOutputStream pipeOut1 = new PipedOutputStream();
                final PipedInputStream pipeIn1 = new PipedInputStream(pipeOut1);

                // Wiring: PipeIn1 --> (Plugin2) --> PipeOut2 --> PipeIn2
                final PipedOutputStream pipeOut2 = new PipedOutputStream();
                final PipedInputStream pipeIn2 = new PipedInputStream(pipeOut2);

                final ByteArrayOutputStream baos = new ByteArrayOutputStream();

                // plugin configuration
                PPPluginConfig pluginConfig1 = configuration.getPPPluginsConfig().get(0);
                PreProcess plugin1 = instantiateAndConfigurePlugin(pluginConfig1, socketStreamIn, pipeOut1);

                PPPluginConfig pluginConfig2 = configuration.getPPPluginsConfig().get(1);
                PreProcess plugin2 = instantiateAndConfigurePlugin(pluginConfig2, pipeIn1, pipeOut2);

                // N3 Parser
                StreamParserThread sparkStreamParserThread = new StreamParserThread(pipeIn2, queues);

                // kick-off pre-process
                sparkwaveParserExecutor.execute(plugin1);
                sparkwaveParserExecutor.execute(plugin2);

                // kick-off parser
                sparkwaveParserExecutor.execute(sparkStreamParserThread);

            } else {

                StreamParserThread sparkStreamParserThread = new StreamParserThread(socketStreamIn, queues);

                // kick-off parser
                sparkwaveParserExecutor.execute(sparkStreamParserThread);

            }

        }

    } catch (Exception e) {
        logger.error(e.getMessage());
    } finally {

    }
}

From source file:com.github.chenxiaolong.dualbootpatcher.switcher.ZipFlashingOutputFragment.java

@Override
public void onStart() {
    super.onStart();

    // Create terminal
    mSession = new TermSession();
    // We don't care about any input because this is kind of a "dumb" terminal output, not
    // a proper interactive one
    mSession.setTermOut(new NullOutputStream());

    mOS = new PipedOutputStream();
    try {//from  w w w.  j a  va2 s  .co  m
        mSession.setTermIn(new PipedInputStream(mOS));
    } catch (IOException e) {
        e.printStackTrace();
    }

    mEmulatorView.attachSession(mSession);

    // Start and bind to the service
    Intent intent = new Intent(getActivity(), SwitcherService.class);
    getActivity().bindService(intent, this, Context.BIND_AUTO_CREATE);
    getActivity().startService(intent);
}

From source file:org.apache.zeppelin.spark.ZeppelinR.java

/**
 * Start R repl/*from  w  ww .  j a v a 2  s  .  c om*/
 * @throws IOException
 */
public void open() throws IOException {
    createRScript();

    zeppelinR.put(hashCode(), this);

    CommandLine cmd = CommandLine.parse(rCmdPath);
    cmd.addArgument("--no-save");
    cmd.addArgument("--no-restore");
    cmd.addArgument("-f");
    cmd.addArgument(scriptPath);
    cmd.addArgument("--args");
    cmd.addArgument(Integer.toString(hashCode()));
    cmd.addArgument(Integer.toString(port));
    cmd.addArgument(libPath);
    cmd.addArgument(Integer.toString(sparkVersion.toNumber()));

    // dump out the R command to facilitate manually running it, e.g. for fault diagnosis purposes
    logger.debug(cmd.toString());

    executor = new DefaultExecutor();
    outputStream = new InterpreterOutputStream(logger);

    input = new PipedOutputStream();
    PipedInputStream in = new PipedInputStream(input);

    PumpStreamHandler streamHandler = new PumpStreamHandler(outputStream, outputStream, in);
    executor.setWatchdog(new ExecuteWatchdog(ExecuteWatchdog.INFINITE_TIMEOUT));
    executor.setStreamHandler(streamHandler);
    Map env = EnvironmentUtils.getProcEnvironment();

    initialOutput = new InterpreterOutput(null);
    outputStream.setInterpreterOutput(initialOutput);
    executor.execute(cmd, env, this);
    rScriptRunning = true;

    // flush output
    eval("cat('')");
}

From source file:SerialIntList.java

/**
 * Use object serialization to make a "deep clone" of the object o. This
 * method serializes o and all objects it refers to, and then deserializes
 * that graph of objects, which means that everything is copied. This differs
 * from the clone() method of an object which is usually implemented to
 * produce a "shallow" clone that copies references to other objects, instead
 * of copying all referenced objects./*from ww  w.  jav  a2 s  . c o m*/
 */
static Object deepclone(final Serializable o) throws IOException, ClassNotFoundException {
    // Create a connected pair of "piped" streams.
    // We'll write bytes to one, and them from the other one.
    final PipedOutputStream pipeout = new PipedOutputStream();
    PipedInputStream pipein = new PipedInputStream(pipeout);

    // Now define an independent thread to serialize the object and write
    // its bytes to the PipedOutputStream
    Thread writer = new Thread() {
        public void run() {
            ObjectOutputStream out = null;
            try {
                out = new ObjectOutputStream(pipeout);
                out.writeObject(o);
            } catch (IOException e) {
            } finally {
                try {
                    out.close();
                } catch (Exception e) {
                }
            }
        }
    };
    writer.start(); // Make the thread start serializing and writing

    // Meanwhile, in this thread, read and deserialize from the piped
    // input stream. The resulting object is a deep clone of the original.
    ObjectInputStream in = new ObjectInputStream(pipein);
    return in.readObject();
}

From source file:org.jaqpot.core.service.client.jpdi.JPDIClientImpl.java

@Override
public Future<Model> train(Dataset dataset, Algorithm algorithm, Map<String, Object> parameters,
        String predictionFeature, MetaInfo modelMeta, String taskId) {

    CompletableFuture<Model> futureModel = new CompletableFuture<>();

    TrainingRequest trainingRequest = new TrainingRequest();
    trainingRequest.setDataset(dataset);
    trainingRequest.setParameters(parameters);
    trainingRequest.setPredictionFeature(predictionFeature);
    //        String trainingRequestString = serializer.write(trainingRequest);

    final HttpPost request = new HttpPost(algorithm.getTrainingService());

    PipedOutputStream out = new PipedOutputStream();
    PipedInputStream in;//from  w  w  w  .j av a  2s.  c  o m
    try {
        in = new PipedInputStream(out);
    } catch (IOException ex) {
        futureModel.completeExceptionally(ex);
        return futureModel;
    }
    InputStreamEntity entity = new InputStreamEntity(in, ContentType.APPLICATION_JSON);
    entity.setChunked(true);

    request.setEntity(entity);
    request.addHeader("Accept", "application/json");

    Future futureResponse = client.execute(request, new FutureCallback<HttpResponse>() {

        @Override
        public void completed(final HttpResponse response) {
            futureMap.remove(taskId);
            int status = response.getStatusLine().getStatusCode();
            try {
                InputStream responseStream = response.getEntity().getContent();

                switch (status) {
                case 200:
                case 201:
                    TrainingResponse trainingResponse = serializer.parse(responseStream,
                            TrainingResponse.class);
                    Model model = new Model();
                    model.setId(randomStringGenerator.nextString(20));
                    model.setActualModel(trainingResponse.getRawModel());
                    model.setPmmlModel(trainingResponse.getPmmlModel());
                    model.setAdditionalInfo(trainingResponse.getAdditionalInfo());
                    model.setAlgorithm(algorithm);
                    model.setParameters(parameters);
                    model.setDatasetUri(dataset != null ? dataset.getDatasetURI() : null);

                    //Check if independedFeatures of model exist in dataset
                    List<String> filteredIndependedFeatures = new ArrayList<String>();

                    if (dataset != null && dataset.getFeatures() != null
                            && trainingResponse.getIndependentFeatures() != null)
                        for (String feature : trainingResponse.getIndependentFeatures()) {
                            for (FeatureInfo featureInfo : dataset.getFeatures()) {
                                if (feature.equals(featureInfo.getURI()))
                                    filteredIndependedFeatures.add(feature);
                            }
                        }

                    model.setIndependentFeatures(filteredIndependedFeatures);
                    model.setDependentFeatures(Arrays.asList(predictionFeature));
                    model.setMeta(modelMeta);

                    List<String> predictedFeatures = new ArrayList<>();
                    for (String featureTitle : trainingResponse.getPredictedFeatures()) {
                        Feature predictionFeatureResource = featureHandler.findByTitleAndSource(featureTitle,
                                "algorithm/" + algorithm.getId());
                        if (predictionFeatureResource == null) {
                            // Create the prediction features (POST /feature)
                            String predFeatID = randomStringGenerator.nextString(12);
                            predictionFeatureResource = new Feature();
                            predictionFeatureResource.setId(predFeatID);
                            predictionFeatureResource.setPredictorFor(predictionFeature);
                            predictionFeatureResource.setMeta(MetaInfoBuilder.builder()
                                    .addSources(
                                            /*messageBody.get("base_uri") + */"algorithm/" + algorithm.getId())
                                    .addComments("Feature created to hold predictions by algorithm with ID "
                                            + algorithm.getId())
                                    .addTitles(featureTitle).addSeeAlso(predictionFeature)
                                    .addCreators(algorithm.getMeta().getCreators()).build());
                            /* Create feature */
                            featureHandler.create(predictionFeatureResource);
                        }
                        predictedFeatures.add(baseURI + "feature/" + predictionFeatureResource.getId());
                    }
                    model.setPredictedFeatures(predictedFeatures);
                    futureModel.complete(model);
                    break;
                case 400:
                    String message = new BufferedReader(new InputStreamReader(responseStream)).lines()
                            .collect(Collectors.joining("\n"));
                    futureModel.completeExceptionally(new BadRequestException(message));
                    break;
                case 500:
                    message = new BufferedReader(new InputStreamReader(responseStream)).lines()
                            .collect(Collectors.joining("\n"));
                    futureModel.completeExceptionally(new InternalServerErrorException(message));
                    break;
                default:
                    message = new BufferedReader(new InputStreamReader(responseStream)).lines()
                            .collect(Collectors.joining("\n"));
                    futureModel.completeExceptionally(new InternalServerErrorException(message));
                }
            } catch (IOException | UnsupportedOperationException ex) {
                futureModel.completeExceptionally(ex);
            }
        }

        @Override
        public void failed(final Exception ex) {
            futureMap.remove(taskId);
            futureModel.completeExceptionally(ex);
        }

        @Override
        public void cancelled() {
            futureMap.remove(taskId);
            futureModel.cancel(true);
        }

    });

    serializer.write(trainingRequest, out);
    try {
        out.close();
    } catch (IOException ex) {
        futureModel.completeExceptionally(ex);
    }

    futureMap.put(taskId, futureResponse);
    return futureModel;
}

From source file:com.blacklocus.jres.http.HttpMethods.java

static HttpEntity createEntity(final Object payload) throws IOException {
    final HttpEntity entity;
    if (payload instanceof InputStream) {

        if (LOG.isDebugEnabled()) {
            String stream = IOUtils.toString((InputStream) payload);
            LOG.debug(stream);//from w  w w . j ava 2  s.c o m
            entity = new StringEntity(stream, ContentType.APPLICATION_JSON);
        } else {
            entity = new InputStreamEntity((InputStream) payload, ContentType.APPLICATION_JSON);
        }

    } else if (payload instanceof String) {

        LOG.debug((String) payload);
        entity = new StringEntity((String) payload, ContentType.APPLICATION_JSON);

    } else { // anything else will be serialized with Jackson

        if (LOG.isDebugEnabled()) {
            String json = ObjectMappers.toJson(payload);
            LOG.debug(json);
            entity = new StringEntity(json, ContentType.APPLICATION_JSON);

        } else {
            final PipedOutputStream pipedOutputStream = new PipedOutputStream();
            final PipedInputStream pipedInputStream = new PipedInputStream(pipedOutputStream);
            PIPER.submit(new ExceptingRunnable() {
                @Override
                protected void go() throws Exception {
                    try {
                        ObjectMappers.NORMAL.writeValue(pipedOutputStream, payload);
                        pipedOutputStream.flush();
                    } finally {
                        IOUtils.closeQuietly(pipedOutputStream);
                    }
                }
            });
            entity = new InputStreamEntity(pipedInputStream, ContentType.APPLICATION_JSON);
        }

    }
    return entity;
}

From source file:org.apache.zeppelin.spark.SparkRInterpreter.java

@Override
public void open() {
    // create R script
    createRScript();/*ww  w .j  a  v  a2s . co  m*/

    int backendTimeout = Integer.parseInt(System.getenv().getOrDefault("SPARKR_BACKEND_TIMEOUT", "120"));

    // Launch a SparkR backend server for the R process to connect to; this will let it see our
    // Java system properties etc.
    ZeppelinRBackend sparkRBackend = new ZeppelinRBackend();

    Semaphore initialized = new Semaphore(0);
    Thread sparkRBackendThread = new Thread("SparkR backend") {
        @Override
        public void run() {
            sparkRBackendPort = sparkRBackend.init();
            initialized.release();
            sparkRBackend.run();
        }
    };

    sparkRBackendThread.start();

    // Wait for RBackend initialization to finish
    try {
        if (initialized.tryAcquire(backendTimeout, TimeUnit.SECONDS)) {
            // Launch R
            CommandLine cmd = CommandLine.parse(getProperty("zeppelin.sparkr.r"));
            cmd.addArgument(scriptPath, false);
            cmd.addArgument("--no-save", false);
            //      cmd.addArgument(getJavaSparkContext().version(), false);
            executor = new DefaultExecutor();
            outputStream = new ByteArrayOutputStream();
            PipedOutputStream ps = new PipedOutputStream();
            in = null;
            try {
                in = new PipedInputStream(ps);
            } catch (IOException e1) {
                throw new InterpreterException(e1);
            }
            ins = new BufferedWriter(new OutputStreamWriter(ps));

            input = new ByteArrayOutputStream();

            PumpStreamHandler streamHandler = new PumpStreamHandler(outputStream, outputStream, in);
            executor.setStreamHandler(streamHandler);
            executor.setWatchdog(new ExecuteWatchdog(ExecuteWatchdog.INFINITE_TIMEOUT));

            Map env = EnvironmentUtils.getProcEnvironment();

            String sparkRInterpreterObjId = sparkRBackend.put(this);
            String uberdataContextObjId = sparkRBackend.put(getUberdataContext());
            env.put("R_PROFILE_USER", scriptPath);
            env.put("SPARK_HOME", getSparkHome());
            env.put("EXISTING_SPARKR_BACKEND_PORT", String.valueOf(sparkRBackendPort));
            env.put("SPARKR_INTERPRETER_ID", sparkRInterpreterObjId);
            env.put("UBERDATA_CONTEXT_ID", uberdataContextObjId);
            logger.info("executing {} {}", env, cmd.toString());
            executor.execute(cmd, env, this);
            logger.info("executed");
            rScriptRunning = true;

        } else {
            System.err.println("SparkR backend did not initialize in " + backendTimeout + " seconds");
            System.exit(-1);
        }
    } catch (InterruptedException e) {
        new InterpreterException((e));
    } catch (IOException e) {
        new InterpreterException((e));
    }

}