List of usage examples for java.nio.channels Channels newInputStream
public static InputStream newInputStream(AsynchronousByteChannel ch)
From source file:com.quanticate.opensource.compressingcontentstore.DecompressingContentReader.java
@Override protected ReadableByteChannel getDirectReadableChannel() throws ContentIOException { // Get a Channel onto the real data ReadableByteChannel rawChannel = getRawChannel(); // Wrap this as an InputStream - Commons Compress is Stream not Channel based // Note that Commons Compress needs to mark/reset a bit to identify InputStream rawInp = new BufferedInputStream(Channels.newInputStream(rawChannel), 32); // Try to process it as a compressed stream try {/*from w w w. j a va2s . c om*/ CompressorInputStream decompressed = new CompressorStreamFactory().createCompressorInputStream(rawInp); logger.debug("Detected compressed data as " + decompressed.getClass().getName()); return Channels.newChannel(decompressed); } catch (CompressorException e) { logger.info("Unable to decompress " + realContentReader, e); } // Tidy up that channel, and re-fetch the real one try { rawInp.close(); rawChannel.close(); } catch (IOException e) { logger.warn("Error tidying up", e); } logger.debug("Using raw form for " + getContentUrl()); return getRawChannel(); }
From source file:com.bigfatgun.fixjures.json.JSONSource.java
private String loadSource() throws IOException { return CharStreams.toString(new InputStreamReader(Channels.newInputStream(getSource()), getCharset())); }
From source file:com.lemania.sis.server.servlet.GcsServlet.java
/** * Retrieves a file from GCS and returns it in the http response. If the * request path is /gcs/Foo/Bar this will be interpreted as a request to * read the GCS file named Bar in the bucket Foo. *//*from w ww .j av a2 s. c o m*/ @Override public void doGet(HttpServletRequest req, HttpServletResponse resp) throws IOException { // GcsFilename fileName = getFileName(req); // resp.setContentType("application/octet-stream"); resp.setHeader("Content-Disposition:", "attachment;filename=" + fileName.getObjectName()); // if (SERVE_USING_BLOBSTORE_API) { BlobstoreService blobstoreService = BlobstoreServiceFactory.getBlobstoreService(); BlobKey blobKey = blobstoreService .createGsBlobKey("/gs/" + fileName.getBucketName() + "/" + fileName.getObjectName()); blobstoreService.serve(blobKey, resp); } else { GcsInputChannel readChannel = gcsService.openPrefetchingReadChannel(fileName, 0, BUFFER_SIZE); copy(Channels.newInputStream(readChannel), resp.getOutputStream()); } }
From source file:com.newatlanta.appengine.vfs.provider.GaeRandomAccessContent.java
GaeRandomAccessContent(GaeFileObject gfo, RandomAccessMode m, boolean append) throws IOException { EnumSet<StandardOpenOption> options = EnumSet.of(StandardOpenOption.READ); if (m == RandomAccessMode.READWRITE) { options.add(StandardOpenOption.WRITE); gfo.doSetLastModTime(System.currentTimeMillis()); }// w ww.j a va 2 s. co m if (append) { options.add(StandardOpenOption.APPEND); } fileChannel = new GaeFileChannel(gfo, options); dataOutput = new DataOutputStream(Channels.newOutputStream(fileChannel)); dataInput = new GaeDataInputStream(Channels.newInputStream(fileChannel)); }
From source file:com.chicm.cmraft.rpc.PacketUtils.java
public static RpcCall parseRpcRequestFromChannel(AsynchronousSocketChannel channel, BlockingService service) throws InterruptedException, ExecutionException, IOException { RpcCall call = null;/*from www .ja va2 s.c o m*/ long t = System.currentTimeMillis(); InputStream in = Channels.newInputStream(channel); byte[] datasize = new byte[MESSAGE_LENGHT_FIELD_SIZE]; in.read(datasize); int nDataSize = bytes2Int(datasize); int len = 0; ByteBuffer buf = ByteBuffer.allocateDirect(nDataSize); for (; len < nDataSize;) { len += channel.read(buf).get(); } if (len < nDataSize) { LOG.error("SOCKET READ FAILED, len:" + len); return call; } byte[] data = new byte[nDataSize]; buf.flip(); buf.get(data); int offset = 0; CodedInputStream cis = CodedInputStream.newInstance(data, offset, nDataSize - offset); int headerSize = cis.readRawVarint32(); offset += cis.getTotalBytesRead(); RequestHeader header = RequestHeader.newBuilder().mergeFrom(data, offset, headerSize).build(); offset += headerSize; cis.skipRawBytes(headerSize); cis.resetSizeCounter(); int bodySize = cis.readRawVarint32(); offset += cis.getTotalBytesRead(); //LOG.debug("header parsed:" + header.toString()); MethodDescriptor md = service.getDescriptorForType().findMethodByName(header.getRequestName()); Builder builder = service.getRequestPrototype(md).newBuilderForType(); Message body = null; if (builder != null) { body = builder.mergeFrom(data, offset, bodySize).build(); //LOG.debug("server : request parsed:" + body.toString()); } call = new RpcCall(header.getId(), header, body, md); if (LOG.isTraceEnabled()) { LOG.trace("Parse Rpc request from socket: " + call.getCallId() + ", takes" + (System.currentTimeMillis() - t) + " ms"); } return call; }
From source file:org.apache.hadoop.mapreduce.task.reduce.InMemoryLinkMapOutput.java
@Override public void shuffle(MapHost host, InputStream input, long compressedLength, long decompressedLength, ShuffleClientMetrics metrics, Reporter reporter) throws IOException { String mapHostName = host.getHostName().split(":")[0]; String app_path = conf.get(MRConfig.LOCAL_DIR); LOG.debug("original app_path " + app_path); String[] app_path_parts = app_path.split("/"); app_path_parts[app_path_parts.length - 5] = mapHostName; StringBuilder builder = new StringBuilder(); for (String s : app_path_parts) { builder.append(s);//from w ww. jav a 2 s. co m builder.append("/"); } app_path = builder.toString(); String src = app_path + "output/" + getMapId() + "/file.out"; File f = new File(src); if (f.exists()) { LOG.debug("shuffleToLink: the src " + src + " EXIST!"); } //LOG.debug("src file size: "+f.length()); //input = new FileInputStream(src); //input.skip(offset); RandomAccessFile raf = new RandomAccessFile(f, "r"); input = Channels.newInputStream(raf.getChannel().position(offset)); IFileInputStream checksumIn = new IFileInputStream(input, compressedLength, conf); input = checksumIn; // Are map-outputs compressed? if (codec != null) { decompressor.reset(); input = codec.createInputStream(input, decompressor); } try { LOG.debug("offset: " + offset); LOG.debug("memory.length: " + memory.length); LOG.debug("compressedLength: " + compressedLength); LOG.debug("decompressedLength: " + decompressedLength); // TO-DO: would offset and length be OK to be int? IOUtils.readFully(input, memory, 0, memory.length); metrics.inputBytes((int) memory.length); reporter.progress(); LOG.info("Read " + memory.length + " bytes from map-output for " + getMapId()); /** * We've gotten the amount of data we were expecting. Verify the * decompressor has nothing more to offer. This action also forces * the decompressor to read any trailing bytes that weren't critical * for decompression, which is necessary to keep the stream in sync. */ //if (input.read() >= 0) { // throw new IOException( // "Unexpected extra bytes from input stream for " // + getMapId()); //} input.close(); raf.close(); } catch (IOException ioe) { // Close the streams IOUtils.cleanup(LOG, input); // Re-throw throw ioe; } finally { CodecPool.returnDecompressor(decompressor); } }
From source file:com.thinkberg.moxo.vfs.s3.jets3t.Jets3tFileObject.java
protected InputStream doGetInputStream() throws Exception { return Channels.newInputStream(getCacheFileChannel()); }
From source file:com.github.neoio.nio.util.NIOUtils.java
public static ByteBuffer readChannelToBuffer(ReadableByteChannel channel) throws NetIOException { ByteArrayOutputStream bos = new ByteArrayOutputStream(); try {/* www. ja v a 2 s. co m*/ logger.debug("bytes read from channel: " + IOUtils.copy(Channels.newInputStream(channel), bos)); } catch (IOException e) { throw new NetIOException(e); } return ByteBuffer.wrap(ArrayUtils.subarray(bos.toByteArray(), 0, bos.size())); }
From source file:com.almende.eve.state.ConcurrentJsonFileState.java
/** * Open file.//from ww w .j a v a2 s . c om * * @throws IOException * Signals that an I/O exception has occurred. */ @SuppressWarnings("resource") protected void openFile() throws IOException { synchronized (locked) { while (locked.containsKey(filename) && locked.get(filename)) { try { locked.wait(); } catch (final InterruptedException e) { } } locked.put(filename, true); final File file = new File(filename); if (!file.exists()) { locked.put(filename, false); locked.notifyAll(); throw new IllegalStateException("Warning: File doesn't exist (anymore):'" + filename + "'"); } channel = new RandomAccessFile(file, "rw").getChannel(); try { // TODO: add support for shared locks, allowing parallel reading // operations. lock = channel.lock(); } catch (final Exception e) { channel.close(); channel = null; lock = null; locked.put(filename, false); locked.notifyAll(); throw new IllegalStateException("error, couldn't obtain file lock on:" + filename, e); } fis = Channels.newInputStream(channel); fos = Channels.newOutputStream(channel); } }
From source file:com.almende.eve.state.file.ConcurrentSerializableFileState.java
/** * Open file.//from w w w . j a va 2s. c o m * * @throws IOException * Signals that an I/O exception has occurred. */ @SuppressWarnings("resource") protected void openFile() throws IOException { synchronized (locked) { while (locked.containsKey(filename) && locked.get(filename)) { try { locked.wait(); } catch (final InterruptedException e) { } } locked.put(filename, true); final File file = new File(filename); if (!file.exists()) { locked.put(filename, false); locked.notifyAll(); throw new IllegalStateException("Warning: File doesn't exist (anymore):'" + filename + "'"); } channel = new RandomAccessFile(file, "rw").getChannel(); try { // TODO: add support for shared locks, allowing parallel reading // operations. lock = channel.lock(); } catch (final Exception e) { channel.close(); channel = null; lock = null; locked.put(filename, false); locked.notifyAll(); throw new IllegalStateException("error, couldn't obtain file lock on:" + filename, e); } fis = new BufferedInputStream(Channels.newInputStream(channel)); fos = new BufferedOutputStream(Channels.newOutputStream(channel)); } }