Example usage for java.nio.channels Channels newChannel

List of usage examples for java.nio.channels Channels newChannel

Introduction

In this page you can find the example usage for java.nio.channels Channels newChannel.

Prototype

public static WritableByteChannel newChannel(OutputStream out) 

Source Link

Document

Constructs a channel that writes bytes to the given stream.

Usage

From source file:org.apache.hadoop.hbase.ipc.ServerRpcConnection.java

private void processUnwrappedData(byte[] inBuf) throws IOException, InterruptedException {
    ReadableByteChannel ch = Channels.newChannel(new ByteArrayInputStream(inBuf));
    // Read all RPCs contained in the inBuf, even partial ones
    while (true) {
        int count;
        if (unwrappedDataLengthBuffer.remaining() > 0) {
            count = this.rpcServer.channelRead(ch, unwrappedDataLengthBuffer);
            if (count <= 0 || unwrappedDataLengthBuffer.remaining() > 0)
                return;
        }//from   ww w  .j a va  2 s .c o  m

        if (unwrappedData == null) {
            unwrappedDataLengthBuffer.flip();
            int unwrappedDataLength = unwrappedDataLengthBuffer.getInt();

            if (unwrappedDataLength == RpcClient.PING_CALL_ID) {
                if (RpcServer.LOG.isDebugEnabled())
                    RpcServer.LOG.debug("Received ping message");
                unwrappedDataLengthBuffer.clear();
                continue; // ping message
            }
            unwrappedData = ByteBuffer.allocate(unwrappedDataLength);
        }

        count = this.rpcServer.channelRead(ch, unwrappedData);
        if (count <= 0 || unwrappedData.remaining() > 0)
            return;

        if (unwrappedData.remaining() == 0) {
            unwrappedDataLengthBuffer.clear();
            unwrappedData.flip();
            processOneRpc(new SingleByteBuff(unwrappedData));
            unwrappedData = null;
        }
    }
}

From source file:org.alfresco.contentstore.AbstractContentStore.java

protected int applyPatch(ReadableByteChannel inChannel, WritableByteChannel outChannel,
        PatchDocument patchDocument) throws IOException {
    InChannel c = new InChannel(inChannel, patchDocument.getMatchedBlocks(), patchDocument.getBlockSize());

    int totalWritten = 0;

    int blockIndex = -1;

    //        int blockIndex = c.nextBlock();
    //        if(blockIndex > -1)
    //        {//from  w ww.  j  a  va2 s  .co  m
    for (Patch patch : patchDocument.getPatches()) {
        int lastMatchingBlockIndex = patch.getLastMatchIndex();

        blockIndex = c.nextBlock();
        while (blockIndex != -1 && blockIndex <= lastMatchingBlockIndex) {
            int bytesWritten = outChannel.write(c.currentBlock);
            totalWritten += bytesWritten;
            if (bytesWritten != c.bytesRead) {
                throw new RuntimeException("Wrote too few bytes, " + c.blockSize + ", " + bytesWritten);
            }

            blockIndex = c.nextBlock();
            if (blockIndex == -1) {
                break;
            }
        }

        // apply patch
        int patchSize = patch.getSize();
        ReadableByteChannel patchChannel = Channels.newChannel(patch.getStream());
        ByteBuffer patchBB = ByteBuffer.allocate(patchSize);
        int bytesRead = patchChannel.read(patchBB);
        patchBB.flip();
        int bytesWritten = outChannel.write(patchBB);
        totalWritten += bytesWritten;
        if (bytesWritten != bytesRead) {
            throw new RuntimeException("Wrote too few bytes, expected " + bytesRead + ", got " + bytesWritten);
        }
    }

    // we're done with all the patches, add the remaining blocks
    while (blockIndex != -1) {
        int bytesWritten = outChannel.write(c.currentBlock);
        totalWritten += bytesWritten;
        if (bytesWritten != c.bytesRead) {
            throw new RuntimeException("Wrote too few bytes");
        }

        blockIndex = c.nextBlock();
    }
    //        }

    return totalWritten;
}

From source file:org.orbisgis.orbisserver.baseserver.model.Session.java

/**
 * Generate an archive with the result of the job available on the server.
 * @param jobId Id of the job which has generated the results.
 * @return File object of the archive containing the results. If an error appends in the archive creation, returns null.
 *///from   w w w . java 2  s.  c o  m
public File getResultAchive(String jobId) {
    File jobFolder = new File(workspaceFolder, jobId);
    for (StatusInfo statusInfo : getAllStatusInfo()) {
        if (statusInfo.getJobId().equalsIgnoreCase(jobId)) {
            //Once the good StatusInfo in found, for each output store its data in the archive
            for (Output out : statusInfo.getResult().getOutputList()) {
                //In the case of plain data, write it into a file
                if (out.getData() != null) {
                    try {
                        for (Object content : out.getData().getContent()) {
                            File outFile;
                            //If a file with the output name already exists, adds a number to it
                            if (jobFolder.list(new NameFileFilter(out.getTitle())) != null) {
                                int diff = 1;
                                while (jobFolder.list(new NameFileFilter(out.getTitle() + diff)) != null) {
                                    diff++;
                                }
                                outFile = new File(jobFolder,
                                        out.getTitle().replaceAll(File.separator, "") + diff);
                            } else {
                                outFile = new File(jobFolder, out.getTitle().replaceAll(File.separator, ""));
                            }
                            //Create the file and write data inside
                            if (jobFolder.mkdirs() || outFile.createNewFile()) {
                                try (FileWriter fileWriter = new FileWriter(outFile)) {
                                    try (PrintWriter out1 = new PrintWriter(fileWriter)) {
                                        out1.append(content.toString());
                                    }
                                }
                            } else {
                                LOGGER.error("Unable to create the output as a file.");
                            }
                        }
                    } catch (IOException e) {
                        LOGGER.error("Unable to write the output as a file.\n" + e.getMessage());
                    }
                }
                //If the result is a reference, copy if to the archive folder
                else if (out.getReference() != null) {
                    try {
                        URL url = new URL(out.getReference());
                        ReadableByteChannel readableByteChannel = Channels.newChannel(url.openStream());
                        FileOutputStream fos = new FileOutputStream(out.getTitle());
                        fos.getChannel().transferFrom(readableByteChannel, 0, Long.MAX_VALUE);
                    } catch (IOException e) {
                        LOGGER.error("Unable to download the result.\n" + e.getMessage());
                    }
                }
            }
        }
    }
    try {
        //Create a zip file with the archive folder
        File zipFile = new File(workspaceFolder, "Result.zip");
        FileOutputStream fos = new FileOutputStream(zipFile);
        ZipOutputStream zos = new ZipOutputStream(fos);
        for (File f : jobFolder.listFiles()) {
            FileInputStream fis = new FileInputStream(f);
            ZipEntry zipEntry = new ZipEntry(f.getName());
            zos.putNextEntry(zipEntry);

            byte[] bytes = new byte[1024];
            int length;
            while ((length = fis.read(bytes)) >= 0) {
                zos.write(bytes, 0, length);
            }

            zos.closeEntry();
            fis.close();
        }
        zos.close();
        fos.close();
        return zipFile;
    } catch (IOException e) {
        LOGGER.error("Unable to zip the result folder.\n" + e.getMessage());
    }
    return null;
}

From source file:org.apache.beam.sdk.io.FileBasedSinkTest.java

private File writeValuesWithWritableByteChannelFactory(final WritableByteChannelFactory factory,
        String... values) throws IOException {
    final File file = tmpFolder.newFile("test.gz");
    final WritableByteChannel channel = factory.create(Channels.newChannel(new FileOutputStream(file)));
    for (String value : values) {
        channel.write(ByteBuffer.wrap((value + "\n").getBytes(StandardCharsets.UTF_8)));
    }/*from   w w  w  .  j  a  v  a 2  s.c o m*/
    channel.close();
    return file;
}

From source file:org.alfresco.contentstore.ChecksumTest.java

@Test
public void test1_5() throws IOException {
    checksumService.setBlockSize(8192);//from  www  .j a v  a 2s .co  m

    UserContext.setUser("user1");

    File f = copy("marbles-uncompressed.tif");
    System.out.println("f = " + f);
    Node node = Node.build().nodeId(GUID.generate()).nodeVersion(1l);

    try (InputStream in = getClass().getClassLoader().getResourceAsStream("marbles-uncompressed.tif")) {
        NodeChecksums checksums = checksumService.getChecksums(node, in);

        try (ReadableByteChannel channel1 = Channels
                .newChannel(getClass().getClassLoader().getResourceAsStream("marbles-uncompressed.tif"))) {
            PatchDocument patchDocument = new PatchDocumentImpl();
            patchService.updatePatchDocument(patchDocument, checksums, channel1);
            System.out.println("patchDocument = " + patchDocument);
            applyPatch(f, patchDocument);
        }
    }

    try (InputStream in3 = getClass().getClassLoader().getResourceAsStream("marbles-uncompressed.tif");
            InputStream in4 = new FileInputStream(f)) {
        assertEqual(in3, in4);
    }
}

From source file:Interface.FramePrincipal.java

private void bt_atuActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_bt_atuActionPerformed

    ///////////////////////////////////////////// Baixa xml do bucket //////////////////////////////////////////////////////////        
    File diretorio = new File(dir_dow.getText());

    if ("".equals(dir_dow.getText())) {
        JOptionPane.showMessageDialog(null, "Campo diretrio deve ser preenchido!");
    } else if (!diretorio.exists()) {
        JOptionPane.showMessageDialog(null, "Este no  um diretrio vlido!");
    } else {/*from  ww  w .  java 2 s  .  c  o  m*/
        try {
            URL arquivoBucket = new URL("http://storage.googleapis.com/" + bac.getNome());

            //Passa caminho de saida do arquivo que esta sendo baixado
            ReadableByteChannel canalArquivoSaida = Channels.newChannel(arquivoBucket.openStream());
            FileOutputStream arquivoSaida = new FileOutputStream(
                    dir_dow.getText() + "/" + bac.getNome() + ".xml");

            //Calcula tempo que o processo de download levou
            long inicio = System.currentTimeMillis();
            arquivoSaida.getChannel().transferFrom(canalArquivoSaida, 0, 1 << 24);
            long fim = System.currentTimeMillis();
            System.out.println(fim - inicio);

            arquivoSaida.close(); //libera o arquivo aps ser baixado.

            /////////////////////////////// Carrega tabela com nome de arquivos ////////////////////////////////////////////////////       
            //percorret tabela
            DefaultTableModel adm = (DefaultTableModel) jTable.getModel();
            adm.setNumRows(0);
            ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////

            //Importar arquivo xml         
            File file = new File(dir_dow.getText() + "/" + bac.getNome() + ".xml");
            DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
            DocumentBuilder db = dbf.newDocumentBuilder();
            Document doc = db.parse(file);
            doc.getDocumentElement().normalize();
            System.out.println("Root element " + doc.getDocumentElement().getNodeName());
            NodeList nodeLst = doc.getElementsByTagName("Contents");
            System.out.println("Information of all employees");

            for (int s = 0; s < nodeLst.getLength(); s++) {

                Node fstNode = nodeLst.item(s);

                if (fstNode.getNodeType() == Node.ELEMENT_NODE) {

                    Element fstElmnt = (Element) fstNode;
                    NodeList fstNmElmntLst = fstElmnt.getElementsByTagName("Key");
                    Element fstNmElmnt = (Element) fstNmElmntLst.item(0);
                    NodeList fstNm = fstNmElmnt.getChildNodes();
                    System.out.println("Key : " + ((Node) fstNm.item(0)).getNodeValue());

                    String val = ((Node) fstNm.item(0)).getNodeValue();

                    adm.addRow(new Object[] { val });

                }

            }
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

}

From source file:org.alfresco.provision.ActiveMQService.java

private BrokerStats getBrokerStats() throws IOException {
    BrokerStats brokerStats = new BrokerStats();

    StringBuilder sb = new StringBuilder("http://");
    sb.append(activeMQHost);/* w w  w .j  a v a2  s. c om*/
    sb.append(":");
    sb.append(activeMQPort);
    sb.append("/api/jolokia");
    String url = sb.toString();

    CloseableHttpResponse httpResponse = null;

    HttpPost httpPost = new HttpPost(url);
    Request[] post = new Request[] {
            new Request("read", "org.apache.activemq:type=Broker,brokerName=localhost", "MemoryPercentUsage"),
            new Request("read", "org.apache.activemq:type=Broker,brokerName=localhost", "StorePercentUsage"),
            new Request("read", "org.apache.activemq:type=Broker,brokerName=localhost", "TempPercentUsage") };
    String str = mapper.writeValueAsString(post);
    HttpEntity postEntity = new StringEntity(str);
    httpPost.setEntity(postEntity);
    httpResponse = client.execute(httpPost);

    StatusLine status = httpResponse.getStatusLine();
    // Expecting "OK" status
    if (status.getStatusCode() == HttpStatus.SC_OK) {
        HttpEntity entity = httpResponse.getEntity();
        InputStream in = entity.getContent();
        try {
            ByteBuffer bb = ByteBuffer.allocate(1024 * 10);
            ReadableByteChannel inChannel = Channels.newChannel(in);
            int read = -1;
            do {
                read = inChannel.read(bb);
            } while (read != -1);
            bb.flip();
            Response[] response = mapper.readValue(bb.array(), Response[].class);
            for (Response r : response) {
                if (r.getRequest().getAttribute().equals("MemoryPercentUsage")) {
                    double memoryPercentUsage = r.getValue() != null ? r.getValue() : 0.0;
                    brokerStats.withMemoryPercentUsage(memoryPercentUsage);
                } else if (r.getRequest().getAttribute().equals("StorePercentUsage")) {
                    double storePercentUsage = r.getValue() != null ? r.getValue() : 0.0;
                    brokerStats.withStorePercentUsage(storePercentUsage);
                } else if (r.getRequest().getAttribute().equals("TempPercentUsage")) {
                    double tempPercentUsage = r.getValue() != null ? r.getValue() : 0.0;
                    brokerStats.withTempPercentUsage(tempPercentUsage);
                }
            }
        } finally {
            if (in != null) {
                in.close();
            }
        }
    } else {
        // TODO
    }

    return brokerStats;
}

From source file:com.linkedin.databus.core.TestDbusEventBufferPersistence.java

private void pushEventsToBuffer(DbusEventBuffer dbusBuf, int numEvents) {
    dbusBuf.start(1);//  w  w  w .  j  a  v a  2s. c o  m
    dbusBuf.startEvents();
    DbusEventGenerator generator = new DbusEventGenerator();
    Vector<DbusEvent> events = new Vector<DbusEvent>();

    generator.generateEvents(numEvents, 1, 100, 10, events);

    // set end of windows
    for (int i = 0; i < numEvents - 1; ++i) {
        long scn = events.get(i).sequence();
        ++i;

        DbusEventInternalWritable writableEvent;
        try {
            writableEvent = DbusEventCorrupter.makeWritable(events.get(i));
        } catch (InvalidEventException ie) {
            LOG.error("Exception trace is " + ie);
            Assert.fail();
            return;
        }
        writableEvent.setSrcId((short) -2);
        writableEvent.setSequence(scn);
        writableEvent.applyCrc();
        assertTrue("invalid event #" + i, writableEvent.isValid(true));
    }

    // set up the ReadChannel with 2 events
    ByteArrayOutputStream oStream = new ByteArrayOutputStream();
    WritableByteChannel oChannel = Channels.newChannel(oStream);
    for (int i = 0; i < numEvents; ++i) {
        ((DbusEventInternalReadable) events.get(i)).writeTo(oChannel, Encoding.BINARY);
    }

    byte[] writeBytes = oStream.toByteArray();
    ByteArrayInputStream iStream = new ByteArrayInputStream(writeBytes);
    ReadableByteChannel rChannel = Channels.newChannel(iStream);
    try {
        dbusBuf.readEvents(rChannel);
    } catch (InvalidEventException ie) {
        LOG.error("Exception trace is " + ie);
        Assert.fail();
        return;
    }
}

From source file:org.alfresco.contentstore.ChecksumTest.java

@Test
public void test2() throws IOException {
    checksumService.setBlockSize(8192);//from ww w.j a va  2s.c om

    UserContext.setUser("user1");

    File f = TempFileProvider.createTempFile("ContentStoreTest", GUID.generate());

    System.out.println("f = " + f);
    Node node = Node.build().nodeId(GUID.generate()).nodeVersion(1l);

    NodeChecksums checksums = getChecksumsForClasspathResource(node, "marbles-uncompressed.tif");

    try (ReadableByteChannel channel1 = Channels
            .newChannel(getClass().getClassLoader().getResourceAsStream("marbles-uncompressed1.tif"));
            //            ReadableByteChannel in = Channels.newChannel(getClass().getClassLoader().getResourceAsStream("marbles-uncompressed1.tif"));
            ReadableByteChannel in = Channels
                    .newChannel(getClass().getClassLoader().getResourceAsStream("marbles-uncompressed.tif"));
            FileOutputStream fos = new FileOutputStream(f);
            WritableByteChannel patchedChannel = fos.getChannel()) {
        PatchDocument patchDocument = new PatchDocumentImpl();
        patchService.updatePatchDocument(patchDocument, checksums, channel1);
        System.out.println("patchDocument = " + patchDocument);

        applyPatch(in, patchedChannel, patchDocument);
        //            applyPatch(f, patchDocument);
    }

    try (InputStream in3 = getClass().getClassLoader().getResourceAsStream("marbles-uncompressed1.tif");
            InputStream in4 = new FileInputStream(f)) {
        assertEqual(in3, in4);
    }
}

From source file:com.alibaba.jstorm.daemon.nimbus.ServiceHandler.java

@Override
public void beginLibUpload(String libName) throws TException {
    try {/*from w w w .j ava  2 s.  co m*/
        String parent = PathUtils.parent_path(libName);
        PathUtils.local_mkdirs(parent);
        data.getUploaders().put(libName, Channels.newChannel(new FileOutputStream(libName)));
        LOG.info("Begin upload file from client to " + libName);
    } catch (Exception e) {
        LOG.error("Fail to upload jar " + libName, e);
        throw new TException(e);
    }
}