List of usage examples for com.google.common.io ByteSource ByteSource
protected ByteSource()
From source file:com.facebook.buck.io.file.MorePaths.java
public static ByteSource asByteSource(Path path) { return new ByteSource() { @Override/* w w w. j a v a2 s. com*/ public InputStream openStream() throws IOException { return Files.newInputStream(path); } }; }
From source file:com.facebook.buck.artifact_cache.HttpArtifactCache.java
@Override protected void storeImpl(ArtifactInfo info, final Path file, final Finished.Builder eventBuilder) throws IOException { // Build the request, hitting the multi-key endpoint. Request.Builder builder = new Request.Builder(); final HttpArtifactCacheBinaryProtocol.StoreRequest storeRequest = new HttpArtifactCacheBinaryProtocol.StoreRequest( info, new ByteSource() { @Override// ww w . ja v a 2 s . c o m public InputStream openStream() throws IOException { return projectFilesystem.newFileInputStream(file); } }); eventBuilder.getStoreBuilder().setRequestSizeBytes(storeRequest.getContentLength()); // Wrap the file into a `RequestBody` which uses `ProjectFilesystem`. builder.put(new RequestBody() { @Override public MediaType contentType() { return OCTET_STREAM_CONTENT_TYPE; } @Override public long contentLength() throws IOException { return storeRequest.getContentLength(); } @Override public void writeTo(BufferedSink bufferedSink) throws IOException { StoreWriteResult writeResult = storeRequest.write(bufferedSink.outputStream()); eventBuilder.getStoreBuilder() .setArtifactContentHash(writeResult.getArtifactContentHashCode().toString()); } }); // Dispatch the store operation and verify it succeeded. try (HttpResponse response = storeClient.makeRequest("/artifacts/key", builder)) { final boolean requestFailed = response.code() != HttpURLConnection.HTTP_ACCEPTED; if (requestFailed) { reportFailure("store(%s, %s): unexpected response: %d", response.requestUrl(), info.getRuleKeys(), response.code()); } eventBuilder.getStoreBuilder().setWasStoreSuccessful(!requestFailed); } }
From source file:io.druid.storage.s3.S3DataSegmentPuller.java
public FileUtils.FileCopyResult getSegmentFiles(final S3Coords s3Coords, final File outDir) throws SegmentLoadingException { log.info("Pulling index at path[%s] to outDir[%s]", s3Coords, outDir); if (!isObjectInBucket(s3Coords)) { throw new SegmentLoadingException("IndexFile[%s] does not exist.", s3Coords); }//from w w w . ja va 2s . c o m if (!outDir.exists()) { outDir.mkdirs(); } if (!outDir.isDirectory()) { throw new ISE("outDir[%s] must be a directory.", outDir); } try { final URI uri = URI.create(String.format("s3://%s/%s", s3Coords.bucket, s3Coords.path)); final ByteSource byteSource = new ByteSource() { @Override public InputStream openStream() throws IOException { try { return buildFileObject(uri, s3Client).openInputStream(); } catch (ServiceException e) { if (e.getCause() != null) { if (S3Utils.S3RETRY.apply(e)) { throw new IOException("Recoverable exception", e); } } throw Throwables.propagate(e); } } }; if (CompressionUtils.isZip(s3Coords.path)) { final FileUtils.FileCopyResult result = CompressionUtils.unzip(byteSource, outDir, S3Utils.S3RETRY, true); log.info("Loaded %d bytes from [%s] to [%s]", result.size(), s3Coords.toString(), outDir.getAbsolutePath()); return result; } if (CompressionUtils.isGz(s3Coords.path)) { final String fname = Files.getNameWithoutExtension(uri.getPath()); final File outFile = new File(outDir, fname); final FileUtils.FileCopyResult result = CompressionUtils.gunzip(byteSource, outFile); log.info("Loaded %d bytes from [%s] to [%s]", result.size(), s3Coords.toString(), outFile.getAbsolutePath()); return result; } throw new IAE("Do not know how to load file type at [%s]", uri.toString()); } catch (Exception e) { try { org.apache.commons.io.FileUtils.deleteDirectory(outDir); } catch (IOException ioe) { log.warn(ioe, "Failed to remove output directory [%s] for segment pulled from [%s]", outDir.getAbsolutePath(), s3Coords.toString()); } throw new SegmentLoadingException(e, e.getMessage()); } }
From source file:io.druid.server.namespace.URIExtractionNamespaceFunctionFactory.java
@Override public Callable<String> getCachePopulator(final URIExtractionNamespace extractionNamespace, final String lastVersion, final Map<String, String> cache) { final long lastCached = lastVersion == null ? JodaUtils.MIN_INSTANT : Long.parseLong(lastVersion); return new Callable<String>() { @Override//www . jav a2s . c o m public String call() { final URI originalUri = extractionNamespace.getUri(); final SearchableVersionedDataFinder<URI> pullerRaw = pullers.get(originalUri.getScheme()); if (pullerRaw == null) { throw new IAE("Unknown loader type[%s]. Known types are %s", originalUri.getScheme(), pullers.keySet()); } if (!(pullerRaw instanceof URIDataPuller)) { throw new IAE("Cannot load data from location [%s]. Data pulling from [%s] not supported", originalUri.toString(), originalUri.getScheme()); } final URIDataPuller puller = (URIDataPuller) pullerRaw; final String versionRegex = extractionNamespace.getVersionRegex(); final URI uri = pullerRaw.getLatestVersion(originalUri, versionRegex == null ? null : Pattern.compile(versionRegex)); if (uri == null) { throw new RuntimeException(new FileNotFoundException( String.format("Could not find match for pattern `%s` in [%s] for %s", versionRegex, originalUri, extractionNamespace))); } final String uriPath = uri.getPath(); try { return RetryUtils.retry(new Callable<String>() { @Override public String call() throws Exception { final String version = puller.getVersion(uri); try { long lastModified = Long.parseLong(version); if (lastModified <= lastCached) { final DateTimeFormatter fmt = ISODateTimeFormat.dateTime(); log.debug( "URI [%s] for namespace [%s] was las modified [%s] but was last cached [%s]. Skipping ", uri.toString(), extractionNamespace.getNamespace(), fmt.print(lastModified), fmt.print(lastCached)); return version; } } catch (NumberFormatException ex) { log.debug(ex, "Failed to get last modified timestamp. Assuming no timestamp"); } final ByteSource source; if (CompressionUtils.isGz(uriPath)) { // Simple gzip stream log.debug("Loading gz"); source = new ByteSource() { @Override public InputStream openStream() throws IOException { return CompressionUtils.gzipInputStream(puller.getInputStream(uri)); } }; } else { source = new ByteSource() { @Override public InputStream openStream() throws IOException { return puller.getInputStream(uri); } }; } final long lineCount = new MapPopulator<>( extractionNamespace.getNamespaceParseSpec().getParser()).populate(source, cache); log.info("Finished loading %d lines for namespace [%s]", lineCount, extractionNamespace.getNamespace()); return version; } }, puller.shouldRetryPredicate(), DEFAULT_NUM_RETRIES); } catch (Exception e) { throw Throwables.propagate(e); } } }; }
From source file:com.facebook.buck.io.MorePaths.java
public static ByteSource asByteSource(final Path path) { return new ByteSource() { @Override//from ww w . j ava 2 s.co m public InputStream openStream() throws IOException { return Files.newInputStream(path); } }; }
From source file:com.facebook.buck.java.JarFattener.java
/** * @return a {@link Step} that generates the fat jar info resource. *///from w w w . j a va 2 s . c o m private Step writeFatJarInfo(Path destination, final ImmutableMap<String, String> nativeLibraries) { ByteSource source = new ByteSource() { @Override public InputStream openStream() throws IOException { FatJar fatJar = new FatJar(FAT_JAR_INNER_JAR, nativeLibraries); ByteArrayOutputStream bytes = new ByteArrayOutputStream(); try { fatJar.store(bytes); } catch (Exception e) { throw Throwables.propagate(e); } return new ByteArrayInputStream(bytes.toByteArray()); } }; return new WriteFileStep(getProjectFilesystem(), source, destination, /* executable */ false); }
From source file:com.metamx.common.CompressionUtils.java
/** * Unzip the pulled file to an output directory. This is only expected to work on zips with lone files, and is not intended for zips with directory structures. * * @param pulledFile The file to unzip/*from w ww . j a v a 2 s .c o m*/ * @param outDir The directory to store the contents of the file. * * @return a FileCopyResult of the files which were written to disk * * @throws IOException */ public static FileUtils.FileCopyResult unzip(final File pulledFile, final File outDir) throws IOException { if (!(outDir.exists() && outDir.isDirectory())) { throw new ISE("outDir[%s] must exist and be a directory", outDir); } log.info("Unzipping file[%s] to [%s]", pulledFile, outDir); final FileUtils.FileCopyResult result = new FileUtils.FileCopyResult(); try (final ZipFile zipFile = new ZipFile(pulledFile)) { final Enumeration<? extends ZipEntry> enumeration = zipFile.entries(); while (enumeration.hasMoreElements()) { final ZipEntry entry = enumeration.nextElement(); result.addFiles(FileUtils.retryCopy(new ByteSource() { @Override public InputStream openStream() throws IOException { return new BufferedInputStream(zipFile.getInputStream(entry)); } }, new File(outDir, entry.getName()), FileUtils.IS_EXCEPTION, DEFAULT_RETRY_COUNT).getFiles()); } } return result; }
From source file:org.ambraproject.wombat.controller.PowerPointController.java
private ByteSource getImageFile(AssetPointer assetId) throws IOException { Map<String, ?> files = articleService.getItemFiles(assetId); Map<String, ?> file = (Map<String, ?>) files.get(IMAGE_SIZE); ContentKey key = ContentKey.createForUuid((String) file.get("crepoKey"), UUID.fromString((String) file.get("crepoUuid"))); return new ByteSource() { @Override/*from w w w . j av a 2 s .c o m*/ public InputStream openStream() throws IOException { return corpusContentApi.request(key, ImmutableList.of()).getEntity().getContent(); } }; }
From source file:io.druid.segment.writeout.ByteBufferWriteOutBytes.java
@Override public InputStream asInputStream() throws IOException { checkOpen();//from w w w . ja v a2 s. co m Function<ByteBuffer, ByteSource> byteBufferToByteSource = buf -> new ByteSource() { @Override public InputStream openStream() { ByteBuffer inputBuf = buf.duplicate(); inputBuf.flip(); return new ByteBufferInputStream(inputBuf); } }; return ByteSource.concat(buffers.stream().map(byteBufferToByteSource).collect(Collectors.toList())) .openStream(); }
From source file:org.apache.druid.storage.hdfs.HdfsDataSegmentPuller.java
FileUtils.FileCopyResult getSegmentFiles(final Path path, final File outDir) throws SegmentLoadingException { try {//from w w w.j a v a2 s . co m org.apache.commons.io.FileUtils.forceMkdir(outDir); } catch (IOException e) { throw new SegmentLoadingException(e, ""); } try { final FileSystem fs = path.getFileSystem(config); if (fs.isDirectory(path)) { // -------- directory --------- try { return RetryUtils.retry(() -> { if (!fs.exists(path)) { throw new SegmentLoadingException("No files found at [%s]", path.toString()); } final RemoteIterator<LocatedFileStatus> children = fs.listFiles(path, false); final FileUtils.FileCopyResult result = new FileUtils.FileCopyResult(); while (children.hasNext()) { final LocatedFileStatus child = children.next(); final Path childPath = child.getPath(); final String fname = childPath.getName(); if (fs.isDirectory(childPath)) { log.warn("[%s] is a child directory, skipping", childPath.toString()); } else { final File outFile = new File(outDir, fname); try (final FSDataInputStream in = fs.open(childPath)) { NativeIO.chunkedCopy(in, outFile); } result.addFile(outFile); } } log.info("Copied %d bytes from [%s] to [%s]", result.size(), path.toString(), outDir.getAbsolutePath()); return result; }, shouldRetryPredicate(), DEFAULT_RETRY_COUNT); } catch (Exception e) { throw Throwables.propagate(e); } } else if (CompressionUtils.isZip(path.getName())) { // -------- zip --------- final FileUtils.FileCopyResult result = CompressionUtils.unzip(new ByteSource() { @Override public InputStream openStream() throws IOException { return getInputStream(path); } }, outDir, shouldRetryPredicate(), false); log.info("Unzipped %d bytes from [%s] to [%s]", result.size(), path.toString(), outDir.getAbsolutePath()); return result; } else if (CompressionUtils.isGz(path.getName())) { // -------- gzip --------- final String fname = path.getName(); final File outFile = new File(outDir, CompressionUtils.getGzBaseName(fname)); final FileUtils.FileCopyResult result = CompressionUtils.gunzip(new ByteSource() { @Override public InputStream openStream() throws IOException { return getInputStream(path); } }, outFile); log.info("Gunzipped %d bytes from [%s] to [%s]", result.size(), path.toString(), outFile.getAbsolutePath()); return result; } else { throw new SegmentLoadingException("Do not know how to handle file type at [%s]", path.toString()); } } catch (IOException e) { throw new SegmentLoadingException(e, "Error loading [%s]", path.toString()); } }