List of usage examples for java.nio.channels Channels newChannel
public static WritableByteChannel newChannel(OutputStream out)
From source file:gemlite.core.internal.support.system.WorkPathHelper.java
private static boolean lock(File directory) { String name = "node.lock"; File lockfile = new File(directory, name); lockfile.deleteOnExit();/*from w w w. j av a2s . c om*/ try { FileChannel fc = (FileChannel) Channels.newChannel(new FileOutputStream(lockfile)); lock = fc.tryLock(); if (lock != null) { return true; } } catch (IOException x) { System.err.println(x.toString()); } catch (OverlappingFileLockException e) { System.err.println(e.toString()); } return false; }
From source file:org.alfresco.services.ContentGetterImpl.java
@Override public Content getContentByNodeId(String nodeId, Long nodeVersion) { Content content = null;//from w w w. j a va2 s. c o m StringBuilder sb = new StringBuilder(nodeId); if (nodeVersion != null) { sb.append(";"); sb.append(nodeVersion); } ObjectId objectId = new ObjectIdImpl(sb.toString()); Session session = getCMISSession(); try { Document document = (Document) session.getObject(objectId); if (document != null) { if (document.isLatestVersion()) { String mimeType = (String) document.getProperty(PropertyIds.CONTENT_STREAM_MIME_TYPE) .getFirstValue(); BigInteger size = (BigInteger) document.getProperty(PropertyIds.CONTENT_STREAM_LENGTH) .getFirstValue(); ContentStream stream = document.getContentStream(); if (stream != null) { InputStream is = stream.getStream(); ReadableByteChannel channel = Channels.newChannel(is); content = new Content(channel, size.longValue()); } } else { logger.warn("Node " + nodeId + "." + nodeVersion + " not latest version"); } } else { logger.warn("Node " + nodeId + "." + nodeVersion + " not found"); } } catch (CmisObjectNotFoundException e) { logger.warn("Node " + nodeId + "." + nodeVersion + " not found"); } return content; }
From source file:net.amigocraft.mpt.command.InstallCommand.java
@SuppressWarnings("unchecked") public static void downloadPackage(String id) throws MPTException { JSONObject packages = (JSONObject) Main.packageStore.get("packages"); if (packages != null) { JSONObject pack = (JSONObject) packages.get(id); if (pack != null) { if (pack.containsKey("name") && pack.containsKey("version") && pack.containsKey("url")) { if (pack.containsKey("sha1") || !Config.ENFORCE_CHECKSUM) { String name = pack.get("name").toString(); String version = pack.get("version").toString(); String fullName = name + " v" + version; String url = pack.get("url").toString(); String sha1 = pack.containsKey("sha1") ? pack.get("sha1").toString() : ""; if (pack.containsKey("installed")) { //TODO: compare versions throw new MPTException(ID_COLOR + name + ERROR_COLOR + " is already installed"); }//from ww w . ja v a 2 s. co m try { URLConnection conn = new URL(url).openConnection(); conn.connect(); ReadableByteChannel rbc = Channels.newChannel(conn.getInputStream()); File file = new File(Main.plugin.getDataFolder(), "cache" + File.separator + id + ".zip"); file.setReadable(true, false); file.setWritable(true, false); file.getParentFile().mkdirs(); file.createNewFile(); FileOutputStream os = new FileOutputStream(file); os.getChannel().transferFrom(rbc, 0, MiscUtil.getFileSize(new URL(url))); os.close(); if (!sha1.isEmpty() && !sha1(file.getAbsolutePath()).equals(sha1)) { file.delete(); throw new MPTException(ERROR_COLOR + "Failed to install package " + ID_COLOR + fullName + ERROR_COLOR + ": checksum mismatch!"); } } catch (IOException ex) { throw new MPTException( ERROR_COLOR + "Failed to download package " + ID_COLOR + fullName); } } else throw new MPTException(ERROR_COLOR + "Package " + ID_COLOR + id + ERROR_COLOR + " is missing SHA-1 checksum! Aborting..."); } else throw new MPTException(ERROR_COLOR + "Package " + ID_COLOR + id + ERROR_COLOR + " is missing required elements!"); } else throw new MPTException(ERROR_COLOR + "Cannot find package with id " + ID_COLOR + id); } else { throw new MPTException(ERROR_COLOR + "Package store is malformed!"); } }
From source file:org.eclipse.jgit.lfs.server.fs.ObjectUploadListener.java
/** * @param repository//from ww w . j a v a 2s. c o m * the repository storing large objects * @param context * @param request * @param response * @param id * @throws FileNotFoundException * @throws IOException */ public ObjectUploadListener(FileLfsRepository repository, AsyncContext context, HttpServletRequest request, HttpServletResponse response, AnyLongObjectId id) throws FileNotFoundException, IOException { this.context = context; this.response = response; this.in = request.getInputStream(); this.inChannel = Channels.newChannel(in); this.out = repository.getOutputStream(id); this.channel = Channels.newChannel(out); response.setContentType(Constants.CONTENT_TYPE_GIT_LFS_JSON); }
From source file:org.apache.xmlgraphics.util.io.Base64Test.java
/** * Returns true if the contents of <tt>is1</tt> match the contents of * <tt>is2</tt>/*from w ww . j av a2 s .c o m*/ * * @throws IOException */ public static boolean compareStreams(final InputStream i1, final InputStream i2, final boolean skipws) throws IOException { try (final ReadableByteChannel ch1 = Channels.newChannel(i1)) { try (final ReadableByteChannel ch2 = Channels.newChannel(i2)) { final ByteBuffer buf1 = ByteBuffer.allocateDirect(1024); final ByteBuffer buf2 = ByteBuffer.allocateDirect(1024); while (true) { final int n1 = ch1.read(buf1); final int n2 = ch2.read(buf2); if (n1 == -1 || n2 == -1) { return n1 == n2; } buf1.flip(); buf2.flip(); for (int i = 0; i < Math.min(n1, n2); ++i) { if (buf1.get() != buf2.get()) { return false; } } buf1.compact(); buf2.compact(); } } } }
From source file:org.apache.beam.sdk.io.LocalFileSystem.java
@Override protected WritableByteChannel create(LocalResourceId resourceId, CreateOptions createOptions) throws IOException { LOG.debug("creating file {}", resourceId); File absoluteFile = resourceId.getPath().toFile().getAbsoluteFile(); if (absoluteFile.getParentFile() != null && !absoluteFile.getParentFile().exists() && !absoluteFile.getParentFile().mkdirs() && !absoluteFile.getParentFile().exists()) { throw new IOException("Unable to create parent directories for '" + resourceId + "'"); }//from w ww . j a v a 2 s .c om return Channels.newChannel(new BufferedOutputStream(new FileOutputStream(absoluteFile))); }
From source file:io.github.jeremgamer.preview.actions.Download.java
private void download() { GeneralSave gs = new GeneralSave(); try {/*from ww w. ja v a 2s . c o m*/ gs.load(new File("projects/" + Editor.getProjectName() + "/general.rbd")); } catch (IOException e1) { e1.printStackTrace(); } name = gs.getString("name"); new Thread(new Runnable() { @Override public void run() { if (url == null) { } else { File archive = new File(System.getProperty("user.home") + "/AppData/Roaming/.rocketbuilder/" + name + "/data.zip"); File outputFolder = new File( System.getProperty("user.home") + "/AppData/Roaming/.rocketbuilder/" + name); new File(System.getProperty("user.home") + "/AppData/Roaming/.rocketbuilder/" + name).mkdirs(); URL webFile; try { webFile = new URL(url); ReadableByteChannel rbc = Channels.newChannel(webFile.openStream()); fos = new FileOutputStream(System.getProperty("user.home") + "/AppData/Roaming/.rocketbuilder/" + name + "/data.zip"); HttpURLConnection httpConn = (HttpURLConnection) webFile.openConnection(); totalBytes = httpConn.getContentLength(); new Thread(new Runnable() { @Override public void run() { try { while (bytesCopied < totalBytes) { for (CustomProgressBar bar : barList) { bytesCopied = fos.getChannel().size(); progressValue = (int) (100 * bytesCopied / totalBytes); bar.setValue(progressValue); if (bar.isStringPainted()) { bar.setString(progressValue + "% " + bytesCopied / 1000 + "/" + totalBytes / 1000 + "Kb tape " + step + "/2"); } } } } catch (IOException e) { e.printStackTrace(); } } }).start(); fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE); fos.close(); step = 2; for (CustomProgressBar bar : barList) { if (bar.isStringPainted()) { bar.setString("tape " + step + "/2 : Extraction"); } } for (int timeout = 100; timeout > 0; timeout--) { RandomAccessFile ran = null; try { ran = new RandomAccessFile(archive, "rw"); break; } catch (Exception ex) { } finally { if (ran != null) try { ran.close(); } catch (IOException ex) { } ran = null; } try { Thread.sleep(100); } catch (InterruptedException ex) { } } ZipFile zipFile = new ZipFile(archive, Charset.forName("Cp437")); Enumeration<? extends ZipEntry> entries = zipFile.entries(); while (entries.hasMoreElements()) { ZipEntry entry = entries.nextElement(); File entryDestination = new File(outputFolder, entry.getName()); entryDestination.getParentFile().mkdirs(); if (entry.isDirectory()) entryDestination.mkdirs(); else { InputStream in = zipFile.getInputStream(entry); OutputStream out = new FileOutputStream(entryDestination); IOUtils.copy(in, out); IOUtils.closeQuietly(in); IOUtils.closeQuietly(out); in.close(); out.close(); } } for (CustomProgressBar bar : barList) { bar.setString(""); } zipFile.close(); archive.delete(); } catch (IOException e) { e.printStackTrace(); } } } }).start(); }
From source file:com.spectralogic.ds3client.commands.GetObjectRequest.java
public GetObjectRequest(final String bucketName, final String objectName, final UUID job, final long offset, @Nonnull final OutputStream stream) { Preconditions.checkNotNull(stream, "Stream may not be null."); this.bucketName = bucketName; this.objectName = objectName; this.job = job.toString(); this.offset = offset; this.channel = Channels.newChannel(stream); this.updateQueryParam("job", job); this.updateQueryParam("offset", offset); }
From source file:io.druid.segment.data.CompressedVSizeIntsIndexedWriterTest.java
private void checkSerializedSizeAndData(int chunkSize) throws Exception { CompressedVSizeIntsIndexedWriter writer = new CompressedVSizeIntsIndexedWriter(ioPeon, "test", vals.length > 0 ? Ints.max(vals) : 0, chunkSize, byteOrder, compressionStrategy); CompressedVSizeIntsIndexedSupplier supplierFromList = CompressedVSizeIntsIndexedSupplier.fromList( Ints.asList(vals), vals.length > 0 ? Ints.max(vals) : 0, chunkSize, byteOrder, compressionStrategy); writer.open();//w w w .j a v a 2 s. c om for (int val : vals) { writer.add(val); } writer.close(); long writtenLength = writer.getSerializedSize(); final WritableByteChannel outputChannel = Channels.newChannel(ioPeon.makeOutputStream("output")); writer.writeToChannel(outputChannel); outputChannel.close(); assertEquals(writtenLength, supplierFromList.getSerializedSize()); // read from ByteBuffer and check values CompressedVSizeIntsIndexedSupplier supplierFromByteBuffer = CompressedVSizeIntsIndexedSupplier .fromByteBuffer(ByteBuffer.wrap(IOUtils.toByteArray(ioPeon.makeInputStream("output"))), byteOrder); IndexedInts indexedInts = supplierFromByteBuffer.get(); for (int i = 0; i < vals.length; ++i) { assertEquals(vals[i], indexedInts.get(i)); } CloseQuietly.close(indexedInts); }
From source file:com.quanticate.opensource.compressingcontentstore.DecompressingContentReader.java
@Override protected ReadableByteChannel getDirectReadableChannel() throws ContentIOException { // Get a Channel onto the real data ReadableByteChannel rawChannel = getRawChannel(); // Wrap this as an InputStream - Commons Compress is Stream not Channel based // Note that Commons Compress needs to mark/reset a bit to identify InputStream rawInp = new BufferedInputStream(Channels.newInputStream(rawChannel), 32); // Try to process it as a compressed stream try {// ww w.ja v a2 s .co m CompressorInputStream decompressed = new CompressorStreamFactory().createCompressorInputStream(rawInp); logger.debug("Detected compressed data as " + decompressed.getClass().getName()); return Channels.newChannel(decompressed); } catch (CompressorException e) { logger.info("Unable to decompress " + realContentReader, e); } // Tidy up that channel, and re-fetch the real one try { rawInp.close(); rawChannel.close(); } catch (IOException e) { logger.warn("Error tidying up", e); } logger.debug("Using raw form for " + getContentUrl()); return getRawChannel(); }