Example usage for com.google.common.io Resources asByteSource

List of usage examples for com.google.common.io Resources asByteSource

Introduction

In this page you can find the example usage for com.google.common.io Resources asByteSource.

Prototype

public static ByteSource asByteSource(URL url) 

Source Link

Document

Returns a ByteSource that reads from the given URL.

Usage

From source file:org.graylog2.web.resources.WebInterfaceAssetsResource.java

private Response getResponse(Request request, String filename, URL resourceUrl, boolean fromPlugin)
        throws IOException, URISyntaxException {
    final Date lastModified;
    final InputStream stream;
    final HashCode hashCode;

    switch (resourceUrl.getProtocol()) {
    case "file": {
        final File file = new File(resourceUrl.toURI());
        lastModified = new Date(file.lastModified());
        stream = new FileInputStream(file);
        hashCode = Files.hash(file, Hashing.sha256());
        break;//  w  ww .ja  v  a 2  s.  co  m
    }
    case "jar": {
        final URI uri = resourceUrl.toURI();
        final FileSystem fileSystem = fileSystemCache.getUnchecked(uri);
        final java.nio.file.Path path = fileSystem.getPath(pluginPrefixFilename(fromPlugin, filename));
        final FileTime lastModifiedTime = java.nio.file.Files.getLastModifiedTime(path);
        lastModified = new Date(lastModifiedTime.toMillis());
        stream = resourceUrl.openStream();
        hashCode = Resources.asByteSource(resourceUrl).hash(Hashing.sha256());
        break;
    }
    default:
        throw new IllegalArgumentException("Not a jar or file");
    }

    final EntityTag entityTag = new EntityTag(hashCode.toString());

    final Response.ResponseBuilder response = request.evaluatePreconditions(lastModified, entityTag);
    if (response != null) {
        return response.build();
    }

    final String contentType = firstNonNull(mimeTypes.getContentType(filename),
            MediaType.APPLICATION_OCTET_STREAM);
    final CacheControl cacheControl = new CacheControl();
    cacheControl.setMaxAge((int) TimeUnit.DAYS.toSeconds(365));
    cacheControl.setNoCache(false);
    cacheControl.setPrivate(false);
    return Response.ok(stream).header(HttpHeaders.CONTENT_TYPE, contentType).tag(entityTag)
            .cacheControl(cacheControl).lastModified(lastModified).build();
}

From source file:org.lenskit.util.io.LKFileUtils.java

/**
 * Create a URL-backed byte source.// w  w  w.j a v  a 2  s  .c o m
 * @param url The URL of the byte source.
 * @param compression The compression mode.
 * @return The byte source, possibly decompressing.
 */
public static ByteSource byteSource(URL url, CompressionMode compression) {
    CompressionMode effectiveMode = compression.getEffectiveCompressionMode(url.getPath());
    ByteSource source = Resources.asByteSource(url);
    if (!effectiveMode.equals(CompressionMode.NONE)) {
        source = new CompressedByteSource(source, effectiveMode.getCompressorName());
    }
    return source;
}

From source file:io.spikex.filter.Grok.java

@Override
protected void startFilter() {

    // Create grok directory if it doesn't exist
    File grokDir = new File(dataPath().toFile(), "grok");
    boolean exists = grokDir.exists();
    if (!exists) {
        if (!grokDir.mkdirs()) {
            throw new IllegalStateException("Failed to create grok directory in: " + dataPath());
        }/* w w  w .  j a v a  2  s  . co  m*/
    }

    // input and output fields
    m_inputField = config().getString(CONF_KEY_INPUT_FIELD, DEF_INPUT_FIELD);
    m_outputField = config().getString(CONF_KEY_OUTPUT_FIELD, DEF_OUTPUT_FIELD);
    m_multiLine.setOutputField(m_outputField);

    // Group fields
    JsonObject group = config().getObject(CONF_KEY_GROUP, new JsonObject());
    m_groupField = group.getString(CONF_KEY_GROUP, DEF_GROUP_FIELD);
    JsonArray groupFields = group.getArray(CONF_KEY_FIELDS, new JsonArray());
    for (int i = 0; i < groupFields.size(); i++) {
        m_groupFields.add((String) groupFields.get(i));
    }
    m_multiLine.setGroupField(m_groupField);

    // Pre build match lines
    // match-lines
    if (config().containsField(CONF_KEY_MATCH_LINES)) {
        JsonArray matchLines = config().getArray(CONF_KEY_MATCH_LINES, new JsonArray());
        for (int i = 0; i < matchLines.size(); i++) {
            MatchLine matcher = new MatchLine();
            m_matchLines.add(matcher);
        }
    }

    // grok-urls
    JsonArray defGrokUrls = new JsonArray(DEF_PATTERNS);
    JsonArray grokUrls = config().getArray(CONF_KEY_PATTERNS, defGrokUrls);

    for (int i = 0; i < grokUrls.size(); i++) {

        String url = grokUrls.get(i);
        URI uri = URI.create(resolveUrl(url));

        try {
            Path uriPath = Paths.get(uri); // Grok file URI
            String filename = uriPath.getFileName().toString();
            File grokFile = new File(grokDir, filename);
            if (!java.nio.file.Files.exists(grokFile.toPath())) {
                // Copy grok file to local directory
                logger().info("Copying \"{}\" to \"{}\"", filename, grokFile.getAbsolutePath());
                Resources.asByteSource(uri.toURL()).copyTo(Files.asByteSink(grokFile));
            }
            for (MatchLine matcher : m_matchLines) {
                matcher.addPatternFromFile(grokFile.getAbsolutePath());
            }
            m_grokMulti.addPatternFromFile(grokFile.getAbsolutePath());
        } catch (GrokException | IOException e) {
            throw new IllegalStateException("Failed to add grok pattern: " + uri.toString(), e);
        }
    }

    String pattern = null;
    try {
        // match-lines
        if (config().containsField(CONF_KEY_MATCH_LINES)) {

            JsonArray matchLines = config().getArray(CONF_KEY_MATCH_LINES, new JsonArray());
            for (int i = 0; i < matchLines.size(); i++) {

                JsonObject matchLine = (JsonObject) matchLines.get(i);
                MatchLine matcher = m_matchLines.get(i);

                pattern = matchLine.getString(CONF_KEY_PATTERN);
                matcher.compile(pattern);

                // tags
                JsonArray tags = matchLine.getArray(CONF_KEY_TAGS);
                Preconditions.checkArgument(tags.size() <= MAX_TAG_COUNT,
                        "You can define only " + MAX_TAG_COUNT + " tags for " + CONF_KEY_MATCH_LINES);

                for (int j = 0; j < tags.size() && j < MAX_TAG_COUNT; j++) {
                    matcher.addTag((String) tags.get(j), j);
                }

                // ignore
                JsonArray ignore = matchLine.getArray(CONF_KEY_IGNORE, new JsonArray());
                for (int j = 0; j < ignore.size(); j++) {
                    matcher.addIgnore((String) ignore.get(j));
                }
            }
        }
    } catch (GrokException e) {
        throw new IllegalStateException("Failed to compile pattern: " + pattern, e);
    }

    pattern = null;
    try {
        // multi-line
        if (config().containsField(CONF_KEY_MULTI_LINE)) {
            JsonObject multiLine = config().getObject(CONF_KEY_MULTI_LINE);
            pattern = multiLine.getString(CONF_KEY_PATTERN);
            m_grokMulti.compile(pattern);
            // segment-field
            String field = multiLine.getString(CONF_KEY_SEGMENT_FIELD, "");
            m_multiLine.setSegmentField(field);
            // tags
            JsonArray tags = multiLine.getArray(CONF_KEY_TAGS);
            Preconditions.checkArgument(tags.size() <= MAX_TAG_COUNT,
                    "You can define only " + MAX_TAG_COUNT + " tags for " + CONF_KEY_MULTI_LINE);
            for (int i = 0; i < tags.size() && i < MAX_TAG_COUNT; i++) {
                m_multiTags[i] = tags.get(i);
            }
        }
    } catch (GrokException e) {
        throw new IllegalStateException("Failed to compile pattern: " + pattern, e);
    }
}

From source file:ratpack.server.internal.DefaultServerConfigBuilder.java

@Override
public ServerConfig.Builder props(URL url) {
    return props(Resources.asByteSource(url));
}

From source file:org.apache.druid.cli.validate.DruidJsonValidator.java

@Override
public void run() {
    File file = new File(jsonFile);
    if (!file.exists()) {
        LOG.info("File[%s] does not exist.%n", file);
    }// w ww . j  a  v a 2s . c om

    final Injector injector = makeInjector();
    final ObjectMapper jsonMapper = injector.getInstance(ObjectMapper.class);

    registerModules(jsonMapper,
            Iterables.concat(
                    Initialization.getFromExtensions(injector.getInstance(ExtensionsConfig.class),
                            DruidModule.class),
                    Arrays.asList(new FirehoseModule(), new IndexingHadoopModule(),
                            new IndexingServiceFirehoseModule(), new LocalDataStorageDruidModule())));

    final ClassLoader loader;
    if (Thread.currentThread().getContextClassLoader() != null) {
        loader = Thread.currentThread().getContextClassLoader();
    } else {
        loader = DruidJsonValidator.class.getClassLoader();
    }

    if (toLogger) {
        logWriter = new NullWriter() {
            private final Logger logger = new Logger(DruidJsonValidator.class);

            @Override
            public void write(char[] cbuf, int off, int len) {
                logger.info(new String(cbuf, off, len));
            }
        };
    }

    try {
        if ("query".equalsIgnoreCase(type)) {
            jsonMapper.readValue(file, Query.class);
        } else if ("hadoopConfig".equalsIgnoreCase(type)) {
            jsonMapper.readValue(file, HadoopDruidIndexerConfig.class);
        } else if ("task".equalsIgnoreCase(type)) {
            jsonMapper.readValue(file, Task.class);
        } else if ("parse".equalsIgnoreCase(type)) {
            final StringInputRowParser parser;
            if (file.isFile()) {
                logWriter.write("loading parse spec from file '" + file + "'");
                parser = jsonMapper.readValue(file, StringInputRowParser.class);
            } else if (loader.getResource(jsonFile) != null) {
                logWriter.write("loading parse spec from resource '" + jsonFile + "'");
                parser = jsonMapper.readValue(loader.getResource(jsonFile), StringInputRowParser.class);
            } else {
                logWriter.write("cannot find proper spec from 'file'.. regarding it as a json spec");
                parser = jsonMapper.readValue(jsonFile, StringInputRowParser.class);
            }
            parser.initializeParser();
            if (resource != null) {
                final CharSource source;
                if (new File(resource).isFile()) {
                    logWriter.write("loading data from file '" + resource + "'");
                    source = Resources.asByteSource(new File(resource).toURI().toURL())
                            .asCharSource(Charset.forName(parser.getEncoding()));
                } else if (loader.getResource(resource) != null) {
                    logWriter.write("loading data from resource '" + resource + "'");
                    source = Resources.asByteSource(loader.getResource(resource))
                            .asCharSource(Charset.forName(parser.getEncoding()));
                } else {
                    logWriter.write("cannot find proper data from 'resource'.. regarding it as data string");
                    source = CharSource.wrap(resource);
                }
                readData(parser, source);
            }
        } else {
            throw new UOE("Unknown type[%s]", type);
        }
    } catch (Exception e) {
        LOG.error(e, "INVALID JSON!");
        throw Throwables.propagate(e);
    }
}

From source file:com.facebook.buck.java.JarFattener.java

/**
 * @return a {@link Step} that writes the final from the resource named {@code name}.
 *///w  w  w  . j av a2s  . com
private Step writeFromResource(Path destination, final String name) {
    return new WriteFileStep(getProjectFilesystem(), Resources.asByteSource(Resources.getResource(name)),
            destination, /* executable */ false);
}

From source file:co.paralleluniverse.actors.ActorModule.java

private static boolean equalContent(URL url1, URL url2) {
    try {/*from   w w w.jav  a 2 s  .  c om*/
        return Resources.asByteSource(url1).contentEquals(Resources.asByteSource(url2));
    } catch (IOException e) {
        throw new RuntimeException(e);
    }
}

From source file:eu.lp0.cursus.publish.html.XSLTHTMLGenerator.java

public Map<String, ByteSource> getCodePages() {
    return Maps.toMap(CODE_PAGES, new Function<String, ByteSource>() {
        @Override/* w ww.j  a  va2 s  . c  om*/
        @Nullable
        public ByteSource apply(@Nullable String input) {
            return Resources.asByteSource(Resources.getResource(PackageConstants.RESOURCE_PATH + "/" + input)); //$NON-NLS-1$
        }
    });
}

From source file:com.machak.idea.plugins.actions.CopyHippoSharedFiles.java

public static String readText(final File file) {
    try {//  w w  w.  j  a  v a 2  s . co  m
        final ByteSource source = Resources.asByteSource(file.toURI().toURL());
        return source.asCharSource(Charsets.UTF_8).read();
    } catch (IOException e) {

    }
    return null;
}

From source file:google.registry.xml.XmlTransformer.java

/** Creates a single {@link Schema} from multiple {@code .xsd} files. */
public static Schema loadXmlSchemas(List<String> schemaFilenames) {
    try (Closer closer = Closer.create()) {
        StreamSource[] sources = new StreamSource[schemaFilenames.size()];
        for (int i = 0; i < schemaFilenames.size(); ++i) {
            sources[i] = new StreamSource(closer.register(Resources
                    .asByteSource(Resources.getResource(XmlTransformer.class, "xsd/" + schemaFilenames.get(i)))
                    .openStream()));//from w w  w.j  a  v  a 2s.  co  m
        }
        return SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI).newSchema(sources);
    } catch (IOException | SAXException e) {
        throw new RuntimeException(e);
    }
}