Example usage for com.google.common.io ByteSource ByteSource

List of usage examples for com.google.common.io ByteSource ByteSource

Introduction

In this page you can find the example usage for com.google.common.io ByteSource ByteSource.

Prototype

protected ByteSource() 

Source Link

Document

Constructor for use by subclasses.

Usage

From source file:org.opendaylight.mdsal.binding.generator.impl.ModuleInfoBackedContext.java

@Override
public CheckedFuture<? extends YangTextSchemaSource, SchemaSourceException> getSource(
        final SourceIdentifier sourceIdentifier) {
    final YangModuleInfo yangModuleInfo = sourceIdentifierToModuleInfo.get(sourceIdentifier);

    if (yangModuleInfo == null) {
        LOG.debug("Unknown schema source requested: {}, available sources: {}", sourceIdentifier,
                sourceIdentifierToModuleInfo.keySet());
        return Futures.immediateFailedCheckedFuture(
                new SchemaSourceException("Unknown schema source: " + sourceIdentifier));
    }//from  w w w .  j ava2  s.co  m

    return Futures.immediateCheckedFuture(
            YangTextSchemaSource.delegateForByteSource(sourceIdentifier, new ByteSource() {
                @Override
                public InputStream openStream() throws IOException {
                    return yangModuleInfo.getModuleSourceStream();
                }
            }));
}

From source file:io.druid.storage.hdfs.HdfsDataSegmentPuller.java

public FileUtils.FileCopyResult getSegmentFiles(final Path path, final File outDir)
        throws SegmentLoadingException {
    final LocalFileSystem localFileSystem = new LocalFileSystem();
    try {//from w  ww .  j a v  a2s.  c  o  m
        final FileSystem fs = path.getFileSystem(config);
        if (fs.isDirectory(path)) {

            // --------    directory     ---------

            try {
                return RetryUtils.retry(new Callable<FileUtils.FileCopyResult>() {
                    @Override
                    public FileUtils.FileCopyResult call() throws Exception {
                        if (!fs.exists(path)) {
                            throw new SegmentLoadingException("No files found at [%s]", path.toString());
                        }

                        final RemoteIterator<LocatedFileStatus> children = fs.listFiles(path, false);
                        final ArrayList<FileUtils.FileCopyResult> localChildren = new ArrayList<>();
                        final FileUtils.FileCopyResult result = new FileUtils.FileCopyResult();
                        while (children.hasNext()) {
                            final LocatedFileStatus child = children.next();
                            final Path childPath = child.getPath();
                            final String fname = childPath.getName();
                            if (fs.isDirectory(childPath)) {
                                log.warn("[%s] is a child directory, skipping", childPath.toString());
                            } else {
                                final File outFile = new File(outDir, fname);

                                // Actual copy
                                fs.copyToLocalFile(childPath, new Path(outFile.toURI()));
                                result.addFile(outFile);
                            }
                        }
                        log.info("Copied %d bytes from [%s] to [%s]", result.size(), path.toString(),
                                outDir.getAbsolutePath());
                        return result;
                    }

                }, shouldRetryPredicate(), DEFAULT_RETRY_COUNT);
            } catch (Exception e) {
                throw Throwables.propagate(e);
            }
        } else if (CompressionUtils.isZip(path.getName())) {

            // --------    zip     ---------

            final FileUtils.FileCopyResult result = CompressionUtils.unzip(new ByteSource() {
                @Override
                public InputStream openStream() throws IOException {
                    return getInputStream(path);
                }
            }, outDir, shouldRetryPredicate(), false);

            log.info("Unzipped %d bytes from [%s] to [%s]", result.size(), path.toString(),
                    outDir.getAbsolutePath());

            return result;
        } else if (CompressionUtils.isGz(path.getName())) {

            // --------    gzip     ---------

            final String fname = path.getName();
            final File outFile = new File(outDir, CompressionUtils.getGzBaseName(fname));
            final FileUtils.FileCopyResult result = CompressionUtils.gunzip(new ByteSource() {
                @Override
                public InputStream openStream() throws IOException {
                    return getInputStream(path);
                }
            }, outFile);

            log.info("Gunzipped %d bytes from [%s] to [%s]", result.size(), path.toString(),
                    outFile.getAbsolutePath());
            return result;
        } else {
            throw new SegmentLoadingException("Do not know how to handle file type at [%s]", path.toString());
        }
    } catch (IOException e) {
        throw new SegmentLoadingException(e, "Error loading [%s]", path.toString());
    }
}

From source file:com.facebook.buck.jvm.java.JarFattener.java

/**
 * @return a {@link Step} that generates the fat jar info resource.
 *///from   w  w  w . j  a  v a2  s .co  m
private Step writeFatJarInfo(Path destination, final ImmutableMap<String, String> nativeLibraries) {

    ByteSource source = new ByteSource() {
        @Override
        public InputStream openStream() throws IOException {
            FatJar fatJar = new FatJar(FAT_JAR_INNER_JAR, nativeLibraries);
            ByteArrayOutputStream bytes = new ByteArrayOutputStream();
            try {
                fatJar.store(bytes);
            } catch (JAXBException e) {
                throw new RuntimeException(e);
            }
            return new ByteArrayInputStream(bytes.toByteArray());
        }
    };

    return new WriteFileStep(getProjectFilesystem(), source, destination, /* executable */ false);
}

From source file:org.haiku.haikudepotserver.job.controller.JobController.java

/**
 * <p>This URL can be used to supply data that can be used with a job to be run as an input to the
 * job.  A GUID is returned in the header {@link #HEADER_DATAGUID} that can be later used to refer
 * to this uploaded data.</p>/*from w ww  . j a  v a 2 s.com*/
 */

@RequestMapping(value = "/" + SEGMENT_JOBDATA, method = RequestMethod.POST)
@ResponseBody
public void supplyData(final HttpServletRequest request, final HttpServletResponse response,
        @RequestHeader(value = HttpHeaders.CONTENT_TYPE, required = false) String contentType,
        @RequestParam(value = KEY_USECODE, required = false) String useCode) throws IOException {

    Preconditions.checkArgument(null != request, "the request must be provided");

    int length = request.getContentLength();

    if (-1 != length && length > MAX_SUPPLY_DATA_LENGTH) {
        response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
    }

    ObjectContext context = serverRuntime.newContext();

    tryObtainAuthenticatedUser(context).orElseThrow(() -> {
        LOGGER.warn("attempt to supply job data with no authenticated user");
        return new JobDataAuthorizationFailure();
    });

    JobData data = jobService.storeSuppliedData(useCode,
            !Strings.isNullOrEmpty(contentType) ? contentType : MediaType.OCTET_STREAM.toString(),
            new ByteSource() {
                @Override
                public InputStream openStream() throws IOException {
                    return request.getInputStream();
                }
            });

    response.setStatus(HttpServletResponse.SC_OK);
    response.setHeader(HEADER_DATAGUID, data.getGuid());
}

From source file:com.facebook.buck.artifact_cache.ThriftArtifactCache.java

@Override
protected void storeImpl(final ArtifactInfo info, final Path file,
        final HttpArtifactCacheEvent.Finished.Builder eventBuilder) throws IOException {

    final ByteSource artifact = new ByteSource() {
        @Override/* w w w  .jav  a  2  s .c  o  m*/
        public InputStream openStream() throws IOException {
            return projectFilesystem.newFileInputStream(file);
        }
    };

    BuckCacheStoreRequest storeRequest = new BuckCacheStoreRequest();
    ArtifactMetadata artifactMetadata = infoToMetadata(info, artifact, repository, scheduleType,
            distributedBuildModeEnabled);
    storeRequest.setMetadata(artifactMetadata);
    PayloadInfo payloadInfo = new PayloadInfo();
    long artifactSizeBytes = artifact.size();
    payloadInfo.setSizeBytes(artifactSizeBytes);
    BuckCacheRequest cacheRequest = new BuckCacheRequest();
    cacheRequest.addToPayloads(payloadInfo);
    cacheRequest.setType(BuckCacheRequestType.STORE);
    cacheRequest.setStoreRequest(storeRequest);

    if (LOG.isVerboseEnabled()) {
        LOG.verbose(String.format("Storing artifact with metadata: [%s].",
                ThriftUtil.thriftToDebugJson(artifactMetadata)));
    }

    final ThriftArtifactCacheProtocol.Request request = ThriftArtifactCacheProtocol.createRequest(PROTOCOL,
            cacheRequest, artifact);
    Request.Builder builder = toOkHttpRequest(request);
    eventBuilder.getStoreBuilder().setRequestSizeBytes(request.getRequestLengthBytes());
    try (HttpResponse httpResponse = storeClient.makeRequest(hybridThriftEndpoint, builder)) {
        if (httpResponse.code() != 200) {
            throw new IOException(String.format(
                    "Failed to store cache artifact with HTTP status code [%d] "
                            + " to url [%s] for build target [%s] that has size [%d] bytes.",
                    httpResponse.code(), httpResponse.requestUrl(), info.getBuildTarget().orElse(null),
                    artifactSizeBytes));
        }

        try (ThriftArtifactCacheProtocol.Response response = ThriftArtifactCacheProtocol.parseResponse(PROTOCOL,
                httpResponse.getBody())) {
            if (!response.getThriftData().isWasSuccessful()) {
                reportFailure(
                        "Failed to store artifact with thriftErrorMessage=[%s] "
                                + "url=[%s] artifactSizeBytes=[%d]",
                        response.getThriftData().getErrorMessage(), httpResponse.requestUrl(),
                        artifactSizeBytes);
            }

            eventBuilder.getStoreBuilder()
                    .setArtifactContentHash(storeRequest.getMetadata().artifactPayloadMd5);
            eventBuilder.getStoreBuilder().setWasStoreSuccessful(response.getThriftData().isWasSuccessful());
        }
    }
}

From source file:org.apache.druid.java.util.common.CompressionUtils.java

/**
 * Unzip the pulled file to an output directory. This is only expected to work on zips with lone files, and is not intended for zips with directory structures.
 *
 * @param pulledFile The file to unzip/*from   w w  w . j a v a  2s  .c  om*/
 * @param outDir     The directory to store the contents of the file.
 *
 * @return a FileCopyResult of the files which were written to disk
 *
 * @throws IOException
 */
public static FileUtils.FileCopyResult unzip(final File pulledFile, final File outDir) throws IOException {
    if (!(outDir.exists() && outDir.isDirectory())) {
        throw new ISE("outDir[%s] must exist and be a directory", outDir);
    }
    log.info("Unzipping file[%s] to [%s]", pulledFile, outDir);
    final FileUtils.FileCopyResult result = new FileUtils.FileCopyResult();
    try (final ZipFile zipFile = new ZipFile(pulledFile)) {
        final Enumeration<? extends ZipEntry> enumeration = zipFile.entries();
        while (enumeration.hasMoreElements()) {
            final ZipEntry entry = enumeration.nextElement();
            final File outFile = new File(outDir, entry.getName());

            validateZipOutputFile(pulledFile.getCanonicalPath(), outFile, outDir);

            result.addFiles(FileUtils.retryCopy(new ByteSource() {
                @Override
                public InputStream openStream() throws IOException {
                    return new BufferedInputStream(zipFile.getInputStream(entry));
                }
            }, outFile, FileUtils.IS_EXCEPTION, DEFAULT_RETRY_COUNT).getFiles());
        }
    }
    return result;
}

From source file:com.google.devtools.build.lib.rules.objc.ObjcActionsBuilder.java

private static ByteSource xcodegenControlFileBytes(final Artifact pbxproj, final XcodeProvider.Project project,
        final String minimumOs) {
    return new ByteSource() {
        @Override/*from w  ww . j a v a  2  s.c  om*/
        public InputStream openStream() {
            return XcodeGenProtos.Control.newBuilder().setPbxproj(pbxproj.getExecPathString())
                    .addAllTarget(project.targets())
                    .addBuildSetting(XcodeGenProtos.XcodeprojBuildSetting.newBuilder()
                            .setName("IPHONEOS_DEPLOYMENT_TARGET").setValue(minimumOs).build())
                    .build().toByteString().newInput();
        }
    };
}

From source file:com.metamx.common.CompressionUtils.java

/**
 * A gunzip function to store locally/*from   w  w  w . j  a  v a2s.c  om*/
 *
 * @param in          The factory to produce input streams
 * @param outFile     The file to store the result into
 * @param shouldRetry A predicate to indicate if the Throwable is recoverable
 *
 * @return The count of bytes written to outFile
 */
public static FileUtils.FileCopyResult gunzip(final ByteSource in, final File outFile,
        Predicate<Throwable> shouldRetry) {
    return FileUtils.retryCopy(new ByteSource() {
        @Override
        public InputStream openStream() throws IOException {
            return gzipInputStream(in.openStream());
        }
    }, outFile, shouldRetry, DEFAULT_RETRY_COUNT);
}

From source file:org.onosproject.drivers.microsemi.yang.impl.AbstractYangServiceImpl.java

protected final String encodeMoToXmlStr(ModelObjectData yangObjectOpParamFilter,
        List<AnnotatedNodeInfo> annotations) throws NetconfException {
    //Convert the param to XML to use as a filter
    ResourceData rd = ((ModelConverter) yangModelRegistry).createDataNode(yangObjectOpParamFilter);

    DefaultCompositeData.Builder cdBuilder = DefaultCompositeData.builder().resourceData(rd);
    if (annotations != null) {
        for (AnnotatedNodeInfo ani : annotations) {
            cdBuilder.addAnnotatedNodeInfo(ani);
        }/*from  www  .j a  va  2 s  .c om*/
    }
    CompositeStream cs = xSer.encode(cdBuilder.build(), yCtx);
    //Convert the param to XML to use as a filter

    try {
        ByteSource byteSource = new ByteSource() {
            @Override
            public InputStream openStream() throws IOException {
                return cs.resourceData();
            }
        };

        return byteSource.asCharSource(Charsets.UTF_8).read();
    } catch (IOException e) {
        throw new NetconfException("Error decoding CompositeStream to String", e);
    }
}

From source file:org.opendaylight.yangtools.yang.parser.impl.YangParserImpl.java

@Override
public Map<File, Module> parseYangModelsMapped(final Collection<File> yangFiles) {
    if (yangFiles == null || yangFiles.isEmpty()) {
        return Collections.emptyMap();
    }/*from  ww  w.  j av a  2  s.  c  o  m*/

    Map<ByteSource, File> byteSourceToFile = new HashMap<>();
    for (final File file : yangFiles) {
        ByteSource source = new ByteSource() {
            @Override
            public InputStream openStream() throws IOException {
                return new NamedFileInputStream(file, file.getPath());
            }
        };
        byteSourceToFile.put(source, file);
    }

    Map<ByteSource, Module> byteSourceToModule;
    try {
        byteSourceToModule = parseYangModelSources(byteSourceToFile.keySet(), null);
    } catch (IOException | YangSyntaxErrorException e) {
        throw new YangParseException("Failed to parse yang data", e);
    }
    Map<File, Module> result = new LinkedHashMap<>();
    for (Map.Entry<ByteSource, Module> entry : byteSourceToModule.entrySet()) {
        result.put(byteSourceToFile.get(entry.getKey()), entry.getValue());
    }
    return result;
}