List of usage examples for com.google.common.io ByteSource size
public long size() throws IOException
From source file:org.codice.alliance.nsili.endpoint.requests.OrderRequestImpl.java
private void writeFile(FileLocation destination, PackagingSpecFormatType packagingSpecFormatType, String filename, List<String> sentFiles, ByteSource contents) throws IOException { String filenameWithExt = filename + packagingSpecFormatType.getExtension(); writeFile(destination, contents.openStream(), contents.size(), filenameWithExt, packagingSpecFormatType.getContentType()); sentFiles.add(filenameWithExt);/*from www .j ava 2 s. c o m*/ }
From source file:be.iminds.aiolos.cloud.jclouds.CloudManagerImplJClouds.java
private VMInstance provisionOSGiRuntime(String bndrun, List<String> resources, NodeMetadata node) throws Exception { int osgiPort = 9278; int httpPort = 8080; Activator.logger.log(LogService.LOG_INFO, "Waiting for node " + node.getHostname() + " to come online..."); SshClient sshClient = computeService.getContext().utils().sshForNode().apply(NodeMetadataBuilder .fromNodeMetadata(node).privateAddresses(Collections.<String>emptyList()).build()); Activator.logger.log(LogService.LOG_INFO, "Node " + node.getHostname() + " online, provision OSGi"); synchronized (this) { try {/*ww w . j av a 2 s . co m*/ sshClient.connect(); Activator.logger.log(LogService.LOG_INFO, "Connected to node " + node.getHostname() + " at ip " + sshClient.getHostAddress() + ", uploading necessary files..."); // set build.bnd and copy ext folder sshClient.exec("mkdir -p /tmp/AIOLOS/cnf/ext"); sshClient.exec("touch /tmp/AIOLOS/cnf/build.bnd"); File ext = new File("../cnf/ext"); for (String name : ext.list()) { uploadFile(sshClient, "../cnf/ext/" + name, "/tmp/AIOLOS/cnf/ext/" + name); } // upload bnd String bnd = null; File bndDir = new File("../cnf/plugins/biz.aQute.bnd"); for (String name : bndDir.list()) { bnd = name; break; } if (bnd == null) throw new Exception("No bnd present..."); uploadFile(sshClient, "../cnf/plugins/biz.aQute.bnd/" + bnd, "/tmp/AIOLOS/cnf/plugins/biz.aQute.bnd/" + bnd); // upload bnd repository plugin TODO avoid hard coded url? uploadFile(sshClient, "../cnf/plugins/biz.aQute.repository/biz.aQute.repository-2.1.0.jar", "/tmp/AIOLOS/cnf/plugins/biz.aQute.repository/biz.aQute.repository-2.1.0.jar"); // copy all bndrun files present (can be used for cloudmanagers on the new machine) sshClient.exec("mkdir -p /tmp/AIOLOS/tools"); for (File file : getFilteredBndRunFiles()) { uploadFile(sshClient, file.getAbsolutePath(), "/tmp/AIOLOS/tools/" + file.getName()); } // Set the rsa.ip property to the public ip of the VM try { Properties run = new Properties(); run.load(new FileInputStream(new File(bndrun))); String runproperties = run.getProperty("-runproperties"); // check for port properties try { Properties props = new Properties(); props.load(new ByteArrayInputStream(runproperties.replaceAll(",", "\n").getBytes("UTF-8"))); String osgiPortString = props.getProperty("rsa.port"); if (osgiPortString != null) { osgiPort = Integer.parseInt(osgiPortString); } String httpPortString = props.getProperty("org.osgi.service.http.port"); if (httpPortString != null) { httpPort = Integer.parseInt(httpPortString); } } catch (Exception e) { } String publicIP = node.getPublicAddresses().iterator().next(); String privateIP = node.getPrivateAddresses().iterator().next(); if (runproperties == null) runproperties = "rsa.ip=" + publicIP + ",private.ip=" + privateIP + ",public.ip=" + publicIP; else runproperties += ",rsa.ip=" + publicIP + ",private.ip=" + privateIP + ",public.ip=" + publicIP; run.put("-runproperties", runproperties); ByteArrayOutputStream bao = new ByteArrayOutputStream(); run.store(bao, "bnd run configuration"); bao.flush(); ByteSource byteSource = ByteSource.wrap(bao.toByteArray()); Payload payload = new ByteSourcePayload(byteSource); payload.getContentMetadata().setContentLength(byteSource.size()); bao.close(); sshClient.put("/tmp/AIOLOS/tools/" + bndrun, payload); } catch (Exception e) { throw new Exception("Invalid bndrun configuration provided " + bndrun); } // copy resources if (resources != null) { sshClient.exec("mkdir -p /tmp/AIOLOS/tools/resources"); for (String r : resources) { File f = new File(r); ByteSource byteSource = Files.asByteSource(f); Payload payload = new ByteSourcePayload(byteSource); payload.getContentMetadata().setContentLength(byteSource.size()); sshClient.put("/tmp/AIOLOS/tools/resources/" + f.getName(), payload); } } if (publicKeyFile != null && publicKeyFile.exists()) { try { uploadFile(sshClient, publicKeyFile.getAbsolutePath(), "/tmp/AIOLOS/tools/resources/shared.pub"); } catch (IOException e) { } } // upload repositories // TODO don't hard code this? uploadDir(sshClient, "../cnf/localrepo", "/tmp/AIOLOS/cnf/localrepo"); uploadDir(sshClient, "../cnf/releaserepo", "/tmp/AIOLOS/cnf/releaserepo"); uploadDir(sshClient, "../tools/generated/workspacerepo", "/tmp/AIOLOS/tools/generated/workspacerepo"); // start ByteSource byteSource = ByteSource .wrap(buildBndRunScript(bnd, bndrun, node.getOperatingSystem()).getBytes()); Payload payload = new ByteSourcePayload(byteSource); payload.getContentMetadata().setContentLength(byteSource.size()); sshClient.put("/tmp/AIOLOS/init.sh", payload); sshClient.exec("chmod a+x /tmp/AIOLOS/init.sh"); Activator.logger.log(LogService.LOG_INFO, "Start OSGi on node " + node.getHostname() + " ..."); ExecResponse response = sshClient .exec("nohup /tmp/AIOLOS/init.sh >> /tmp/AIOLOS/init.log 2>&1 < /dev/null"); if (response.getExitStatus() != 0) { Activator.logger.log(LogService.LOG_ERROR, "Execution of script failed: " + response.getError()); } } finally { if (sshClient != null) sshClient.disconnect(); } } return new VMInstance(node.getId(), node.getUri(), node.getName(), node.getGroup(), node.getImageId(), node.getStatus().name(), node.getHostname(), node.getPrivateAddresses(), node.getPublicAddresses(), node.getHardware().getName(), osgiPort, httpPort); }
From source file:com.facebook.buck.artifact_cache.ThriftArtifactCache.java
@Override protected void storeImpl(final ArtifactInfo info, final Path file, final HttpArtifactCacheEvent.Finished.Builder eventBuilder) throws IOException { final ByteSource artifact = new ByteSource() { @Override//from w ww. j a v a 2 s . co m public InputStream openStream() throws IOException { return projectFilesystem.newFileInputStream(file); } }; BuckCacheStoreRequest storeRequest = new BuckCacheStoreRequest(); ArtifactMetadata artifactMetadata = infoToMetadata(info, artifact, repository, scheduleType, distributedBuildModeEnabled); storeRequest.setMetadata(artifactMetadata); PayloadInfo payloadInfo = new PayloadInfo(); long artifactSizeBytes = artifact.size(); payloadInfo.setSizeBytes(artifactSizeBytes); BuckCacheRequest cacheRequest = new BuckCacheRequest(); cacheRequest.addToPayloads(payloadInfo); cacheRequest.setType(BuckCacheRequestType.STORE); cacheRequest.setStoreRequest(storeRequest); if (LOG.isVerboseEnabled()) { LOG.verbose(String.format("Storing artifact with metadata: [%s].", ThriftUtil.thriftToDebugJson(artifactMetadata))); } final ThriftArtifactCacheProtocol.Request request = ThriftArtifactCacheProtocol.createRequest(PROTOCOL, cacheRequest, artifact); Request.Builder builder = toOkHttpRequest(request); eventBuilder.getStoreBuilder().setRequestSizeBytes(request.getRequestLengthBytes()); try (HttpResponse httpResponse = storeClient.makeRequest(hybridThriftEndpoint, builder)) { if (httpResponse.code() != 200) { throw new IOException(String.format( "Failed to store cache artifact with HTTP status code [%d] " + " to url [%s] for build target [%s] that has size [%d] bytes.", httpResponse.code(), httpResponse.requestUrl(), info.getBuildTarget().orElse(null), artifactSizeBytes)); } try (ThriftArtifactCacheProtocol.Response response = ThriftArtifactCacheProtocol.parseResponse(PROTOCOL, httpResponse.getBody())) { if (!response.getThriftData().isWasSuccessful()) { reportFailure( "Failed to store artifact with thriftErrorMessage=[%s] " + "url=[%s] artifactSizeBytes=[%d]", response.getThriftData().getErrorMessage(), httpResponse.requestUrl(), artifactSizeBytes); } eventBuilder.getStoreBuilder() .setArtifactContentHash(storeRequest.getMetadata().artifactPayloadMd5); eventBuilder.getStoreBuilder().setWasStoreSuccessful(response.getThriftData().isWasSuccessful()); } } }
From source file:com.tinspx.util.io.ByteUtils.java
static int checkByteSourceSize(ByteSource source) throws IOException { final long size = source.size(); if (size > Integer.MAX_VALUE) { throw new OutOfMemoryError(String.format("%s is too large (%d) for byte array", source, size)); }/*from w w w . j av a2s .c o m*/ return (int) size; }
From source file:org.codice.alliance.nsili.endpoint.requests.OrderRequestImpl.java
private List<String> writeData(FileLocation destination, PackagingSpecFormatType packagingSpecFormatType, List<ResourceContainer> files, String filename) throws IOException { List<String> sentFiles = new ArrayList<>(); if (!files.isEmpty()) { if (files.size() > 1) { int totalNum = files.size() + 1; String totalNumPortion = String.format(FILE_COUNT_FORMAT, totalNum); switch (packagingSpecFormatType) { case FILESUNC: { int currNum = 1; for (ResourceContainer file : files) { String currNumPortion = String.format(FILE_COUNT_FORMAT, currNum); String currFileName = filename + "." + currNumPortion + "." + totalNumPortion; writeFile(destination, file.getInputStream(), file.getSize(), currFileName, file.getMimeTypeValue()); currNum++;/* w w w.j a v a2 s .co m*/ sentFiles.add(currFileName); } } break; case FILESCOMPRESS: { int currNum = 1; for (ResourceContainer file : files) { try (TemporaryFileBackedOutputStream fos = new TemporaryFileBackedOutputStream( MAX_MEMORY_SIZE); ZipOutputStream zipOut = new ZipOutputStream(fos)) { getZip(zipOut, file.getInputStream(), file.getName()); ByteSource contents = fos.asByteSource(); String currNumPortion = String.format(FILE_COUNT_FORMAT, currNum); String currFileName = filename + "." + currNumPortion + "." + totalNumPortion + packagingSpecFormatType.getExtension(); writeFile(destination, contents.openStream(), contents.size(), currFileName, packagingSpecFormatType.getContentType()); sentFiles.add(currFileName); currNum++; } } } break; case FILESZIP: { try (TemporaryFileBackedOutputStream fos = new TemporaryFileBackedOutputStream(MAX_MEMORY_SIZE); ZipOutputStream zipOut = new ZipOutputStream(fos)) { getZip(zipOut, files); ByteSource zip = fos.asByteSource(); writeFile(destination, packagingSpecFormatType, filename, sentFiles, zip); } } break; case FILESGZIP: { int currNum = 1; for (ResourceContainer file : files) { try (TemporaryFileBackedOutputStream fos = new TemporaryFileBackedOutputStream( MAX_MEMORY_SIZE); GZIPOutputStream zipOut = new GZIPOutputStream(fos)) { getGzip(zipOut, file.getInputStream()); ByteSource contents = fos.asByteSource(); String currNumPortion = String.format(FILE_COUNT_FORMAT, currNum); String currFileName = filename + "." + currNumPortion + "." + totalNumPortion + packagingSpecFormatType.getExtension(); writeFile(destination, contents.openStream(), contents.size(), currFileName, packagingSpecFormatType.getContentType()); sentFiles.add(currFileName); currNum++; } } } break; case TARUNC: { try (TemporaryFileBackedOutputStream fos = new TemporaryFileBackedOutputStream(MAX_MEMORY_SIZE); TarOutputStream tarOut = new TarOutputStream(fos)) { getTar(tarOut, files); ByteSource tar = fos.asByteSource(); writeFile(destination, packagingSpecFormatType, filename, sentFiles, tar); } } break; case TARZIP: { try (TemporaryFileBackedOutputStream tarFos = new TemporaryFileBackedOutputStream( MAX_MEMORY_SIZE); TarOutputStream tarOut = new TarOutputStream(tarFos)) { getTar(tarOut, files); try (TemporaryFileBackedOutputStream zipFos = new TemporaryFileBackedOutputStream( MAX_MEMORY_SIZE); ZipOutputStream zipOut = new ZipOutputStream(zipFos)) { getZip(zipOut, tarFos.asByteSource().openStream(), filename + ".tar"); ByteSource zip = zipFos.asByteSource(); writeFile(destination, packagingSpecFormatType, filename, sentFiles, zip); } } } break; case TARGZIP: { try (TemporaryFileBackedOutputStream tarFos = new TemporaryFileBackedOutputStream( MAX_MEMORY_SIZE); TarOutputStream tarOut = new TarOutputStream(tarFos)) { getTar(tarOut, files); try (TemporaryFileBackedOutputStream gzipFos = new TemporaryFileBackedOutputStream( MAX_MEMORY_SIZE); GZIPOutputStream zipOut = new GZIPOutputStream(gzipFos)) { getGzip(zipOut, tarFos.asByteSource().openStream()); ByteSource zip = gzipFos.asByteSource(); writeFile(destination, packagingSpecFormatType, filename, sentFiles, zip); } } } break; case TARCOMPRESS: { try (TemporaryFileBackedOutputStream tarFos = new TemporaryFileBackedOutputStream( MAX_MEMORY_SIZE); TarOutputStream tarOut = new TarOutputStream(tarFos)) { getTar(tarOut, files); try (TemporaryFileBackedOutputStream zipFos = new TemporaryFileBackedOutputStream( MAX_MEMORY_SIZE); ZipOutputStream zipOut = new ZipOutputStream(zipFos)) { getZip(zipOut, tarFos.asByteSource().openStream(), filename + ".tar"); writeFile(destination, packagingSpecFormatType, filename, sentFiles, zipFos.asByteSource()); } } } break; default: break; } } else { ResourceContainer file = files.iterator().next(); switch (packagingSpecFormatType) { case FILESUNC: { writeFile(destination, file.getInputStream(), file.getSize(), filename, file.getMimeTypeValue()); sentFiles.add(filename); } break; case FILESCOMPRESS: { try (TemporaryFileBackedOutputStream fos = new TemporaryFileBackedOutputStream(MAX_MEMORY_SIZE); ZipOutputStream zipOut = new ZipOutputStream(fos)) { getZip(zipOut, file.getInputStream(), file.getName()); ByteSource contents = fos.asByteSource(); writeFile(destination, packagingSpecFormatType, filename, sentFiles, contents); } } break; case TARUNC: try (TemporaryFileBackedOutputStream fos = new TemporaryFileBackedOutputStream(MAX_MEMORY_SIZE); TarOutputStream tarOut = new TarOutputStream(fos)) { getTar(tarOut, file); ByteSource contents = fos.asByteSource(); writeFile(destination, packagingSpecFormatType, filename, sentFiles, contents); } break; case TARZIP: { try (TemporaryFileBackedOutputStream tarFos = new TemporaryFileBackedOutputStream( MAX_MEMORY_SIZE); TarOutputStream tarOut = new TarOutputStream(tarFos)) { getTar(tarOut, file); try (TemporaryFileBackedOutputStream zipFos = new TemporaryFileBackedOutputStream( MAX_MEMORY_SIZE); ZipOutputStream zipOut = new ZipOutputStream(zipFos)) { getZip(zipOut, tarFos.asByteSource().openStream(), filename + ".tar"); ByteSource contents = zipFos.asByteSource(); writeFile(destination, packagingSpecFormatType, filename, sentFiles, contents); } } } break; case FILESZIP: try (TemporaryFileBackedOutputStream fos = new TemporaryFileBackedOutputStream(MAX_MEMORY_SIZE); GZIPOutputStream zipOut = new GZIPOutputStream(fos)) { getGzip(zipOut, file.getInputStream()); ByteSource contents = fos.asByteSource(); writeFile(destination, packagingSpecFormatType, filename, sentFiles, contents); } break; case TARGZIP: { try (TemporaryFileBackedOutputStream tarFos = new TemporaryFileBackedOutputStream( MAX_MEMORY_SIZE); TarOutputStream tarOut = new TarOutputStream(tarFos)) { getTar(tarOut, file); try (TemporaryFileBackedOutputStream gzipFos = new TemporaryFileBackedOutputStream( MAX_MEMORY_SIZE); GZIPOutputStream zipOut = new GZIPOutputStream(gzipFos)) { getGzip(zipOut, tarFos.asByteSource().openStream()); ByteSource contents = gzipFos.asByteSource(); writeFile(destination, packagingSpecFormatType, filename, sentFiles, contents); } } } break; case FILESGZIP: try (TemporaryFileBackedOutputStream fos = new TemporaryFileBackedOutputStream(MAX_MEMORY_SIZE); GZIPOutputStream zipOut = new GZIPOutputStream(fos)) { getGzip(zipOut, file.getInputStream()); ByteSource contents = fos.asByteSource(); writeFile(destination, packagingSpecFormatType, filename, sentFiles, contents); } break; case TARCOMPRESS: { try (TemporaryFileBackedOutputStream tarFos = new TemporaryFileBackedOutputStream( MAX_MEMORY_SIZE); TarOutputStream tarOut = new TarOutputStream(tarFos)) { getTar(tarOut, file); try (TemporaryFileBackedOutputStream zipFos = new TemporaryFileBackedOutputStream( MAX_MEMORY_SIZE); ZipOutputStream zipOut = new ZipOutputStream(zipFos)) { getZip(zipOut, tarFos.asByteSource().openStream(), filename + ".tar"); ByteSource contents = zipFos.asByteSource(); writeFile(destination, packagingSpecFormatType, filename, sentFiles, contents); } } } break; default: break; } } } return sentFiles; }
From source file:org.gaul.s3proxy.S3ProxyHandler.java
private void handleInitiateMultipartUpload(HttpServletRequest request, HttpServletResponse response, BlobStore blobStore, String containerName, String blobName) throws IOException { ByteSource payload = ByteSource.empty(); BlobBuilder.PayloadBlobBuilder builder = blobStore.blobBuilder(blobName).payload(payload); addContentMetdataFromHttpRequest(builder, request); builder.contentLength(payload.size()); Blob blob = builder.build();/* ww w.j av a2 s. c o m*/ // S3 requires blob metadata during the initiate call while Azure and // Swift require it in the complete call. Store a stub blob which // allows reproducing this metadata later. blobStore.putBlob(containerName, blob); MultipartUpload mpu = blobStore.initiateMultipartUpload(containerName, blob.getMetadata()); try (Writer writer = response.getWriter()) { XMLStreamWriter xml = xmlOutputFactory.createXMLStreamWriter(writer); xml.writeStartDocument(); xml.writeStartElement("InitiateMultipartUploadResult"); xml.writeDefaultNamespace(AWS_XMLNS); writeSimpleElement(xml, "Bucket", containerName); writeSimpleElement(xml, "Key", blobName); writeSimpleElement(xml, "UploadId", mpu.id()); xml.writeEndElement(); xml.flush(); } catch (XMLStreamException xse) { throw new IOException(xse); } }
From source file:org.jclouds.kinetic.strategy.internal.KineticStorageStrategyImpl.java
private Blob createBlobFromByteSource(final String container, final String key, final ByteSource byteSource) { BlobBuilder builder = blobBuilders.get(); builder.name(key);/*from w w w . j av a 2 s .c o m*/ File file = getFileForBlobKey(container, key); try { String cacheControl = null; String contentDisposition = null; String contentEncoding = null; String contentLanguage = null; String contentType = null; HashCode hashCode = null; Date expires = null; ImmutableMap.Builder<String, String> userMetadata = ImmutableMap.builder(); UserDefinedFileAttributeView view = getUserDefinedFileAttributeView(file.toPath()); if (view != null) { Set<String> attributes = ImmutableSet.copyOf(view.list()); cacheControl = readStringAttributeIfPresent(view, attributes, XATTR_CACHE_CONTROL); contentDisposition = readStringAttributeIfPresent(view, attributes, XATTR_CONTENT_DISPOSITION); contentEncoding = readStringAttributeIfPresent(view, attributes, XATTR_CONTENT_ENCODING); contentLanguage = readStringAttributeIfPresent(view, attributes, XATTR_CONTENT_LANGUAGE); contentType = readStringAttributeIfPresent(view, attributes, XATTR_CONTENT_TYPE); if (contentType == null && autoDetectContentType) { contentType = probeContentType(file.toPath()); } if (attributes.contains(XATTR_CONTENT_MD5)) { ByteBuffer buf = ByteBuffer.allocate(view.size(XATTR_CONTENT_MD5)); view.read(XATTR_CONTENT_MD5, buf); hashCode = HashCode.fromBytes(buf.array()); } if (attributes.contains(XATTR_EXPIRES)) { ByteBuffer buf = ByteBuffer.allocate(view.size(XATTR_EXPIRES)); view.read(XATTR_EXPIRES, buf); buf.flip(); expires = new Date(buf.asLongBuffer().get()); } for (String attribute : attributes) { if (!attribute.startsWith(XATTR_USER_METADATA_PREFIX)) { continue; } String value = readStringAttributeIfPresent(view, attributes, attribute); userMetadata.put(attribute.substring(XATTR_USER_METADATA_PREFIX.length()), value); } builder.payload(byteSource).cacheControl(cacheControl).contentDisposition(contentDisposition) .contentEncoding(contentEncoding).contentLanguage(contentLanguage) .contentLength(byteSource.size()).contentMD5(hashCode).contentType(contentType) .expires(expires).userMetadata(userMetadata.build()); } else { builder.payload(byteSource).contentLength(byteSource.size()) .contentMD5(byteSource.hash(Hashing.md5()).asBytes()); } } catch (IOException e) { throw Throwables.propagate(e); } Blob blob = builder.build(); blob.getMetadata().setContainer(container); blob.getMetadata().setLastModified(new Date(file.lastModified())); blob.getMetadata().setSize(file.length()); if (blob.getPayload().getContentMetadata().getContentMD5() != null) blob.getMetadata() .setETag(base16().lowerCase().encode(blob.getPayload().getContentMetadata().getContentMD5())); return blob; }