List of usage examples for java.nio.channels Channels newChannel
public static WritableByteChannel newChannel(OutputStream out)
From source file:org.cloudfoundry.caldecott.server.converter.ByteBufferHttpMessageConverter.java
@Override protected void writeInternal(ByteBuffer buffer, HttpOutputMessage outputMessage) throws IOException { WritableByteChannel channel = Channels.newChannel(outputMessage.getBody()); channel.write(buffer);/*from ww w.java 2 s . co m*/ }
From source file:com.saasovation.common.port.adapter.messaging.slothmq.SlothWorker.java
protected String receive() { SocketChannel socketChannel = null; try {/*from w ww . j a v a 2s .c o m*/ socketChannel = this.socket.accept(); if (socketChannel == null) { return null; // if non-blocking } ReadableByteChannel readByteChannel = Channels.newChannel(socketChannel.socket().getInputStream()); ByteArrayOutputStream byteArray = new ByteArrayOutputStream(); ByteBuffer readBuffer = ByteBuffer.allocate(8); while (readByteChannel.read(readBuffer) != -1) { readBuffer.flip(); while (readBuffer.hasRemaining()) { byteArray.write(readBuffer.get()); } readBuffer.clear(); } return new String(byteArray.toByteArray()); } catch (IOException e) { logger.error("Failed to receive because: {}: Continuing...", e.getMessage(), e); return null; } finally { if (socketChannel != null) { try { socketChannel.close(); } catch (IOException e) { // ignore } } } }
From source file:com.web.searchlocal.flashpaper.thread.Covnert2SwfTask.java
/** * /* ww w .j a va 2 s .c om*/ */ public void excute() { String tmpOutFile = outFile.getPath().concat(File.separator) .concat(inFile.getName().replaceAll("[.]{1}.*$", ".swf")); List<String> commandArray = new ArrayList<String>(); commandArray.add(defaultCommand); commandArray.add(inFile.getPath()); commandArray.add("-o"); commandArray.add(tmpOutFile); ProcessBuilder pbObj = new ProcessBuilder(); pbObj.command(commandArray); pbObj.directory(outFile); pbObj.redirectErrorStream(true); try { Process proObj = pbObj.start(); final InputStream ins = proObj.getInputStream(); final ByteBuffer byteBuffer = ByteBuffer.allocate(1024); Thread th = new Thread() { public void run() { ReadableByteChannel rbcObj = Channels.newChannel(ins); try { while (rbcObj.read(byteBuffer) != -1) { byteBuffer.flip(); logger.info(java.nio.charset.Charset.defaultCharset().decode(byteBuffer)); byteBuffer.clear(); } } catch (IOException e) { logger.error(e); } } }; th.setDaemon(true); th.start(); try { proObj.waitFor(); logger.error("??." + tmpOutFile); } catch (InterruptedException e) { logger.error(e); } } catch (IOException e) { logger.error(e); } }
From source file:io.druid.segment.data.VSizeIndexedIntsWriterTest.java
private void checkSerializedSizeAndData() throws Exception { int maxValue = vals.length == 0 ? 0 : Ints.max(vals); VSizeIndexedIntsWriter writer = new VSizeIndexedIntsWriter(ioPeon, "test", maxValue); VSizeIndexedInts intsFromList = VSizeIndexedInts.fromList(Ints.asList(vals), maxValue); writer.open();/*from ww w.j a va2s.c om*/ for (int val : vals) { writer.add(val); } writer.close(); long writtenLength = writer.getSerializedSize(); final WritableByteChannel outputChannel = Channels.newChannel(ioPeon.makeOutputStream("output")); writer.writeToChannel(outputChannel); outputChannel.close(); assertEquals(writtenLength, intsFromList.getSerializedSize()); // read from ByteBuffer and check values VSizeIndexedInts intsFromByteBuffer = VSizeIndexedInts .readFromByteBuffer(ByteBuffer.wrap(IOUtils.toByteArray(ioPeon.makeInputStream("output")))); assertEquals(vals.length, intsFromByteBuffer.size()); for (int i = 0; i < vals.length; ++i) { assertEquals(vals[i], intsFromByteBuffer.get(i)); } }
From source file:org.alfresco.MockContentGetter.java
public MockContentGetter addTestContent(long nodeInternalId, String nodeId, Long nodeVersion, String nodePath, File nodeContent, String mimeType) throws FileNotFoundException { long size = nodeContent.length(); InputStream in = new BufferedInputStream(new FileInputStream(nodeContent)); ReadableByteChannel channel = Channels.newChannel(in); Node node = Node.build().nodeId(nodeId).nodeInternalId(nodeInternalId).nodeVersion(nodeVersion); Content content = new Content(channel, size); testContentByNodeId.put(node, content); testContentByNodeInternalId.put(nodeInternalId, content); testContentByNodePath.put(nodePath, content); return this; }
From source file:org.sglover.checksum.ChecksumServiceImpl.java
private ReadableByteChannel getChannel(InputStream in) throws IOException { ReadableByteChannel channel = Channels.newChannel(in); return channel; }
From source file:com.compomics.pladipus.controller.setup.InstallActiveMQ.java
private void downloadActiveMQ() throws IOException, ZipException { //File downloadFile = PladipusFileDownloadingService.downloadFile(link, activeMQFolder); if (!activeMQFolder.exists() & !activeMQFolder.mkdirs()) { throw new IOException("could not make install folder"); }//from w ww. jav a 2s. c o m URL website = new URL(link); Path downloadFile = Files.createTempFile("activemqdownload", null); try (ReadableByteChannel rbc = Channels.newChannel(website.openStream()); FileOutputStream fos = new FileOutputStream(downloadFile.toFile())) { //todo replace with loop and replace Long.MAX_VALUE with buffer size? if (fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE) != 0) { try (FileInputStream fis = new FileInputStream(downloadFile.toFile())) { if (DigestUtils.md5Hex(fis).equals("4b844f588672e6616bd6f006253d6148")) { ZipFile zipFile = new ZipFile(downloadFile.toFile()); zipFile.extractAll(activeMQFolder.getPath()); } else { throw new IOException("md5 digest did not match, aborting"); } } } } }
From source file:oz.hadoop.yarn.api.FsByteBufferPersister.java
/** * //from www .j av a 2 s .c o m * @param dataBuffer */ public void persist(String dataIdentifier, ByteBuffer dataBuffer) { WritableByteChannel outputChannel = this.outputChannels.get(dataIdentifier); if (outputChannel == null) { String fileName = this.generateFileName(dataIdentifier); try { OutputStream os = this.fileSystem.create(new Path(fileName), true); outputChannel = Channels.newChannel(os); this.outputChannels.put(dataIdentifier, outputChannel); } catch (Exception e) { throw new IllegalStateException( "Failed to create FSDataOutputStream with fileIdentifier '" + dataIdentifier + "'", e); } } dataBuffer.rewind(); try { outputChannel.write(dataBuffer); } catch (Exception e) { throw new IllegalStateException("Failed to write data to channel", e); } }
From source file:com.thinkberg.moxo.vfs.s3.jets3t.Jets3tFileObject.java
protected void doAttach() throws Exception { if (!attached) { try {//from www. j av a2 s . c o m object = service.getObject(bucket, getS3Key()); System.err.println("Attached file to S3 Object: " + object); InputStream is = object.getDataInputStream(); if (object.getContentLength() > 0) { ReadableByteChannel rbc = Channels.newChannel(is); FileChannel cacheFc = getCacheFileChannel(); cacheFc.transferFrom(rbc, 0, object.getContentLength()); cacheFc.close(); rbc.close(); } else { is.close(); } } catch (S3ServiceException e) { object = new S3Object(bucket, getS3Key()); object.setLastModifiedDate(new Date()); System.err.println("Attached file to new S3 Object: " + object); } attached = true; } }
From source file:com.hazelcast.simulator.boot.SimulatorInstaller.java
void install() { File simulatorHome = new File(FileUtils.getUserHomePath(), "hazelcast-simulator-" + version); System.setProperty("SIMULATOR_HOME", simulatorHome.getAbsolutePath()); System.out.println("Installing Simulator: " + version); File userHome = getUserHome(); try {/*from w w w. ja v a 2 s . c o m*/ if (version.endsWith("SNAPSHOT")) { File archive = new File( format("%s/.m2/repository/com/hazelcast/simulator/dist/%s/dist-%s-dist.tar.gz", userHome, version, version)); if (archive.exists()) { simulatorHome.delete(); decompress(archive); } else if (!simulatorHome.exists()) { throw new IllegalStateException( "Could not install simulator, archive: " + archive.getAbsolutePath() + " not found"); } } else if (!simulatorHome.exists()) { File archive = new File(getUserHome(), format("hazelcast-simulator-%s-dist.tar.gz", version)); URL url = new URL(format( "http://repo1.maven.org/maven2/" + "com/hazelcast/simulator/dist/%s/dist-%s-dist.tar.gz", version, version)); ReadableByteChannel rbc = Channels.newChannel(url.openStream()); System.out.printf("File [%s] doesn't exist; downloading\n", archive.getAbsolutePath()); FileOutputStream fos = new FileOutputStream(archive); fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE); decompress(archive); archive.delete(); } } catch (Exception e) { throw new RuntimeException(e); } }