List of usage examples for org.apache.commons.io IOUtils copyLarge
public static long copyLarge(Reader input, Writer output) throws IOException
Reader
to a Writer
. From source file:org.apache.sentry.tests.e2e.solr.AbstractSolrSentryTestCase.java
/** * Make a raw http request to specific cluster node. Node is of the format * host:port/context, i.e. "localhost:8983/solr" *//* ww w .j av a 2s . c o m*/ protected String makeHttpRequest(CloudSolrClient client, String node, String httpMethod, String path, byte[] content, String contentType, int expectedStatusCode) throws Exception { HttpClient httpClient = client.getLbClient().getHttpClient(); URI uri = new URI("http://" + node + path); HttpRequestBase method = null; if ("GET".equals(httpMethod)) { method = new HttpGet(uri); } else if ("HEAD".equals(httpMethod)) { method = new HttpHead(uri); } else if ("POST".equals(httpMethod)) { method = new HttpPost(uri); } else if ("PUT".equals(httpMethod)) { method = new HttpPut(uri); } else { throw new IOException("Unsupported method: " + method); } if (method instanceof HttpEntityEnclosingRequestBase) { HttpEntityEnclosingRequestBase entityEnclosing = (HttpEntityEnclosingRequestBase) method; ByteArrayEntity entityRequest = new ByteArrayEntity(content); entityRequest.setContentType(contentType); entityEnclosing.setEntity(entityRequest); } HttpEntity httpEntity = null; boolean success = false; String retValue = ""; try { final HttpResponse response = httpClient.execute(method); httpEntity = response.getEntity(); assertEquals(expectedStatusCode, response.getStatusLine().getStatusCode()); if (httpEntity != null) { InputStream is = httpEntity.getContent(); ByteArrayOutputStream os = new ByteArrayOutputStream(); try { IOUtils.copyLarge(is, os); os.flush(); } finally { IOUtils.closeQuietly(os); IOUtils.closeQuietly(is); } retValue = os.toString(); } success = true; } finally { if (!success) { EntityUtils.consumeQuietly(httpEntity); method.abort(); } } return retValue; }
From source file:org.apache.servicecomb.foundation.vertx.stream.PumpFromPart.java
private CompletableFuture<Void> toOutputStreamSync(OutputStream outputStream, boolean autoCloseOutputStream) { CompletableFuture<Void> future = new CompletableFuture<>(); try (InputStream inputStream = part.getInputStream()) { IOUtils.copyLarge(inputStream, outputStream); } catch (Throwable e) { future.completeExceptionally(e); }// w w w. j a va 2s .com if (autoCloseOutputStream) { try { outputStream.close(); } catch (Throwable e) { future.completeExceptionally(e); } } future.complete(null); return future; }
From source file:org.apache.servicecomb.foundation.vertx.stream.TestPumpFromPart.java
@Test public void pump_read_error() throws IOException { new MockUp<InputStreamToReadStream>() { @Mock// w w w .ja v a 2 s . c om void readInWorker(Future<ReadResult> future) { future.fail(error); } }; new Expectations(IOUtils.class) { { IOUtils.copyLarge((InputStream) any, (OutputStream) any); result = error; } }; pump_error(null); pump_error(context); }
From source file:org.apache.servicecomb.foundation.vertx.stream.TestPumpFromPart.java
@Test public void pump_write_error() throws IOException { new MockUp<BufferOutputStream>() { @Mock/*w w w. j av a2s . c o m*/ void write(byte[] b) throws IOException { throw error; } }; new Expectations(IOUtils.class) { { IOUtils.copyLarge((InputStream) any, (OutputStream) any); result = error; } }; pump_error(null); pump_error(context); }
From source file:org.apache.sling.servlets.post.impl.helper.SlingFileUploadHandler.java
/** * Merge all previous chunks with last chunk's stream into a temporary file * and return it./*from ww w . jav a2s.c o m*/ */ private File mergeChunks(final Node parentNode, final InputStream lastChunkStream) throws PersistenceException, RepositoryException { OutputStream out = null; SequenceInputStream mergeStrm = null; File file = null; try { file = File.createTempFile("tmp-", "-mergechunk"); out = new FileOutputStream(file); String startPattern = SlingPostConstants.CHUNK_NODE_NAME + "_" + "0_*"; NodeIterator nodeItr = parentNode.getNodes(startPattern); Set<InputStream> inpStrmSet = new LinkedHashSet<InputStream>(); while (nodeItr.hasNext()) { if (nodeItr.getSize() > 1) { throw new RepositoryException("more than one node found for pattern: " + startPattern); } Node rangeNode = nodeItr.nextNode(); inpStrmSet.add(rangeNode.getProperty(javax.jcr.Property.JCR_DATA).getBinary().getStream()); log.debug("added chunk {} to merge stream", rangeNode.getName()); String[] indexBounds = rangeNode.getName() .substring((SlingPostConstants.CHUNK_NODE_NAME + "_").length()).split("_"); startPattern = SlingPostConstants.CHUNK_NODE_NAME + "_" + String.valueOf(Long.valueOf(indexBounds[1]) + 1) + "_*"; nodeItr = parentNode.getNodes(startPattern); } inpStrmSet.add(lastChunkStream); mergeStrm = new SequenceInputStream(Collections.enumeration(inpStrmSet)); IOUtils.copyLarge(mergeStrm, out); } catch (IOException e) { throw new PersistenceException("excepiton occured", e); } finally { IOUtils.closeQuietly(out); IOUtils.closeQuietly(mergeStrm); } return file; }
From source file:org.apache.sling.tracer.internal.JSONRecording.java
public boolean render(OutputStream os, boolean compressed) throws IOException { if (json != null) { IOUtils.copyLarge(getInputStream(compressed), os); return true; }/*from w ww. j a v a 2s. com*/ return false; }
From source file:org.apache.solr.servlet.HttpSolrCall.java
private void remoteQuery(String coreUrl, HttpServletResponse resp) throws IOException { HttpRequestBase method = null;//ww w . ja va 2s .c o m HttpEntity httpEntity = null; try { String urlstr = coreUrl + queryParams.toQueryString(); boolean isPostOrPutRequest = "POST".equals(req.getMethod()) || "PUT".equals(req.getMethod()); if ("GET".equals(req.getMethod())) { method = new HttpGet(urlstr); } else if ("HEAD".equals(req.getMethod())) { method = new HttpHead(urlstr); } else if (isPostOrPutRequest) { HttpEntityEnclosingRequestBase entityRequest = "POST".equals(req.getMethod()) ? new HttpPost(urlstr) : new HttpPut(urlstr); InputStream in = new CloseShieldInputStream(req.getInputStream()); // Prevent close of container streams HttpEntity entity = new InputStreamEntity(in, req.getContentLength()); entityRequest.setEntity(entity); method = entityRequest; } else if ("DELETE".equals(req.getMethod())) { method = new HttpDelete(urlstr); } else { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unexpected method type: " + req.getMethod()); } for (Enumeration<String> e = req.getHeaderNames(); e.hasMoreElements();) { String headerName = e.nextElement(); if (!"host".equalsIgnoreCase(headerName) && !"authorization".equalsIgnoreCase(headerName) && !"accept".equalsIgnoreCase(headerName)) { method.addHeader(headerName, req.getHeader(headerName)); } } // These headers not supported for HttpEntityEnclosingRequests if (method instanceof HttpEntityEnclosingRequest) { method.removeHeaders(TRANSFER_ENCODING_HEADER); method.removeHeaders(CONTENT_LENGTH_HEADER); } final HttpResponse response = solrDispatchFilter.httpClient.execute(method, HttpClientUtil.createNewHttpClientRequestContext()); int httpStatus = response.getStatusLine().getStatusCode(); httpEntity = response.getEntity(); resp.setStatus(httpStatus); for (HeaderIterator responseHeaders = response.headerIterator(); responseHeaders.hasNext();) { Header header = responseHeaders.nextHeader(); // We pull out these two headers below because they can cause chunked // encoding issues with Tomcat if (header != null && !header.getName().equalsIgnoreCase(TRANSFER_ENCODING_HEADER) && !header.getName().equalsIgnoreCase(CONNECTION_HEADER)) { resp.addHeader(header.getName(), header.getValue()); } } if (httpEntity != null) { if (httpEntity.getContentEncoding() != null) resp.setCharacterEncoding(httpEntity.getContentEncoding().getValue()); if (httpEntity.getContentType() != null) resp.setContentType(httpEntity.getContentType().getValue()); InputStream is = httpEntity.getContent(); OutputStream os = resp.getOutputStream(); IOUtils.copyLarge(is, os); } } catch (IOException e) { sendError(new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error trying to proxy request for url: " + coreUrl, e)); } finally { Utils.consumeFully(httpEntity); } }
From source file:org.apache.taverna.prov.Saver.java
private Path writeIfLocal(List<ExternalReferenceSPI> externalReferences, Path file, String mimeType) throws IOException { ValueCarryingExternalReference<?> valRef = null; for (ExternalReferenceSPI ref : externalReferences) { if (ref instanceof ValueCarryingExternalReference) { valRef = (ValueCarryingExternalReference<?>) ref; break; }// w w w.j a v a2 s . c om } if (valRef == null) { return null; } String fileExtension; try { fileExtension = MimeTypes.getDefaultMimeTypes().forName(mimeType).getExtension(); } catch (MimeTypeException e1) { fileExtension = ""; } Path targetFile = file.resolveSibling(file.getFileName() + fileExtension); MessageDigest sha = null; MessageDigest sha512 = null; OutputStream output = Files.newOutputStream(targetFile); try { try { sha = MessageDigest.getInstance("SHA"); output = new DigestOutputStream(output, sha); sha512 = MessageDigest.getInstance("SHA-512"); output = new DigestOutputStream(output, sha512); } catch (NoSuchAlgorithmException e) { logger.info("Could not find digest", e); } IOUtils.copyLarge(valRef.openStream(getContext()), output); } finally { output.close(); } if (sha != null) { getSha1sums().put(targetFile.toRealPath(), hexOfDigest(sha)); } if (sha512 != null) { sha512.digest(); getSha512sums().put(targetFile.toRealPath(), hexOfDigest(sha512)); } return targetFile; }
From source file:org.apache.usergrid.management.importer.S3ImportImpl.java
public File copyFileFromBucket(String blobFileName, String bucketName, String accessId, String secretKey) throws Exception { // setup to use JCloud BlobStore interface to AWS S3 Properties overrides = new Properties(); overrides.setProperty("s3" + ".identity", accessId); overrides.setProperty("s3" + ".credential", secretKey); final Iterable<? extends Module> MODULES = ImmutableSet.of(new JavaUrlHttpCommandExecutorServiceModule(), new Log4JLoggingModule(), new NettyPayloadModule()); BlobStoreContext context = ContextBuilder.newBuilder("s3").credentials(accessId, secretKey).modules(MODULES) .overrides(overrides).buildView(BlobStoreContext.class); BlobStore blobStore = context.getBlobStore(); // get file from configured bucket, copy it to local temp file Blob blob = blobStore.getBlob(bucketName, blobFileName); if (blob == null) { throw new RuntimeException("Blob file name " + blobFileName + " not found in bucket " + bucketName); }/*from w ww. j a v a 2 s.com*/ FileOutputStream fop = null; File tempFile; try { tempFile = File.createTempFile(bucketName, RandomStringUtils.randomAlphabetic(10)); tempFile.deleteOnExit(); fop = new FileOutputStream(tempFile); InputStream is = blob.getPayload().openStream(); IOUtils.copyLarge(is, fop); return tempFile; } finally { if (fop != null) { fop.close(); } } }
From source file:org.apache.xmlgraphics.ps.PSGenerator.java
/** * Embeds the Identity-H CMap file into the output stream, if that has not * already been done./* ww w. j av a 2 s.c om*/ * * @return true if embedding has actually been performed, false otherwise * (which means that a call to this method had already been made earlier) * @throws IOException in case of an I/O problem */ public boolean embedIdentityH() throws IOException { if (identityHEmbedded) { return false; } else { resTracker.registerNeededResource(getProcsetCIDInitResource()); writeDSCComment(DSCConstants.BEGIN_DOCUMENT, IDENTITY_H); InputStream cmap = PSGenerator.class.getResourceAsStream(IDENTITY_H); try { IOUtils.copyLarge(cmap, out); } finally { IOUtils.closeQuietly(cmap); } writeDSCComment(DSCConstants.END_DOCUMENT); resTracker.registerSuppliedResource(getIdentityHCMapResource()); identityHEmbedded = true; return true; } }