Example usage for com.google.common.io Resources getResource

List of usage examples for com.google.common.io Resources getResource

Introduction

In this page you can find the example usage for com.google.common.io Resources getResource.

Prototype

public static URL getResource(String resourceName) 

Source Link

Document

Returns a URL pointing to resourceName if the resource is found using the Thread#getContextClassLoader() context class loader .

Usage

From source file:org.etourdot.xincproc.samples.FilterUsageSamples.java

/**
 * This is a simple sample where 2 filters are chained
 * <p>The first filter is a {@link org.etourdot.xincproc.xinclude.sax.XIncProcXIncludeFilter} resolving
 * xinclude</p>//from w w  w  .ja  v a2s  .  c o m
 * <p>The second filter extract only 'p' elements</p>
 *
 * @throws Exception
 */
public void newFilterFromUri() throws Exception {
    final XMLReader xmlReader = XMLReaderFactory.createXMLReader();
    final URI fileUri = Resources.getResource("include.xml").toURI();
    final XMLFilter filter1 = XIncProcEngine.newXIncludeFilter(fileUri);
    xmlReader.setProperty("http://xml.org/sax/properties/lexical-handler", filter1);
    xmlReader.setProperty("http://xml.org/sax/properties/declaration-handler", filter1);
    filter1.setParent(xmlReader);
    final PElementFilter filter = new PElementFilter(filter1);
    final InputSource inputSource = new InputSource(fileUri.toASCIIString());
    filter.parse(inputSource);
    LOG.info(filter.getResult());
}

From source file:com.cloudera.cdk.examples.data.CreateUserDatasetGeneric.java

@Override
public int run(String[] args) throws IOException {

    // Construct a local filesystem dataset repository rooted at /tmp/data
    FileSystem fs = FileSystem.getLocal(new Configuration());
    Path root = new Path("/tmp/data");
    DatasetRepository repo = new FileSystemDatasetRepository(fs, root);

    // Read an Avro schema from the user.avsc file on the classpath
    Schema schema = new Schema.Parser().parse(Resources.getResource("user.avsc").openStream());

    // Create a dataset of users with the Avro schema in the repository
    DatasetDescriptor descriptor = new DatasetDescriptor.Builder().schema(schema).get();
    Dataset users = repo.create("users", descriptor);

    // Get a writer for the dataset and write some users to it
    DatasetWriter<GenericRecord> writer = users.getWriter();
    try {/*from  ww  w. ja  va2 s .co m*/
        writer.open();
        String[] colors = { "green", "blue", "pink", "brown", "yellow" };
        Random rand = new Random();
        GenericRecordBuilder builder = new GenericRecordBuilder(schema);
        for (int i = 0; i < 100; i++) {
            GenericRecord record = builder.set("username", "user-" + i)
                    .set("creationDate", System.currentTimeMillis())
                    .set("favoriteColor", colors[rand.nextInt(colors.length)]).build();
            writer.write(record);
        }
    } finally {
        writer.close();
    }

    return 0;
}

From source file:com.streamsets.datacollector.hdfs.standalone.HdfsDestinationPipelineOperationsIT.java

private static String getPipelineJson() throws Exception {
    URI uri = Resources.getResource("hdfs_destination_pipeline_operations.json").toURI();
    String pipelineJson = new String(Files.readAllBytes(Paths.get(uri)), StandardCharsets.UTF_8);
    pipelineJson = pipelineJson.replaceAll("/uri", miniDFS.getURI().toString());
    return pipelineJson;
}

From source file:com.mapr.franz.server.Server.java

public static Properties loadProperties() {
    Properties props = new Properties();
    try {/* w w  w. ja  v a2 s . c om*/
        InputStream base = Resources.getResource("base.properties").openStream();
        props.load(base);
        base.close();

        File propFile = new File(PROPERTIES_FILE);
        if (propFile.exists()) {
            log.debug("Adding additional properties from {}", propFile.getCanonicalPath());

            FileInputStream in = new FileInputStream(PROPERTIES_FILE);
            props.load(in);
            in.close();
        }
    } catch (IOException e) {
        e.printStackTrace();
    }
    return props;
}

From source file:org.cloudifysource.cosmo.orchestrator.workflow.config.RuoteRuntimeConfig.java

private static String getContent(String resource) throws IOException {
    final URL url = Resources.getResource(resource);
    return Resources.toString(url, Charsets.UTF_8);
}

From source file:eu.redzoo.article.planetcassandra.reactive.service.callback.ClassicHotelService.java

public ClassicHotelService(Session session) throws IOException {
    this.session = session;
    defaultPicture = Resources.toByteArray(Resources.getResource("error.jpg"));
    preparedSelectStmt = session.prepare(
            "select id, name, description, classification, picture_uri, room_ids from hotels where id = ?");
    preparedSelectStmt.setConsistencyLevel(ConsistencyLevel.QUORUM);
}

From source file:net.minecraftforge.fml.common.asm.transformers.MarkerTransformer.java

private void readMapFile(String rulesFile) throws IOException {
    File file = new File(rulesFile);
    URL rulesResource;/*from   w  ww  . j  a va 2s .  c o  m*/
    if (file.exists()) {
        rulesResource = file.toURI().toURL();
    } else {
        rulesResource = Resources.getResource(rulesFile);
    }
    Resources.readLines(rulesResource, Charsets.UTF_8, new LineProcessor<Void>() {
        @Override
        public Void getResult() {
            return null;
        }

        @Override
        public boolean processLine(String input) throws IOException {
            String line = Iterables.getFirst(Splitter.on('#').limit(2).split(input), "").trim();
            if (line.length() == 0) {
                return true;
            }
            List<String> parts = Lists.newArrayList(Splitter.on(" ").trimResults().split(line));
            if (parts.size() != 2) {
                throw new RuntimeException("Invalid config file line " + input);
            }
            List<String> markerInterfaces = Lists
                    .newArrayList(Splitter.on(",").trimResults().split(parts.get(1)));
            for (String marker : markerInterfaces) {
                markers.put(parts.get(0), marker);
            }
            return true;
        }
    });
}

From source file:com.streamsets.datacollector.kafka.standalone.KafkaOriginMultiPartitionPipelineRunIT.java

@Override
protected String getPipelineJson() throws Exception {
    URI uri = Resources.getResource("kafka_origin_pipeline_standalone.json").toURI();
    String pipelineJson = new String(Files.readAllBytes(Paths.get(uri)), StandardCharsets.UTF_8);
    pipelineJson = pipelineJson.replace("topicName", TOPIC);
    pipelineJson = pipelineJson.replaceAll("localhost:9092", KafkaTestUtil.getMetadataBrokerURI());
    pipelineJson = pipelineJson.replaceAll("localhost:2181", KafkaTestUtil.getZkConnect());
    return pipelineJson;
}

From source file:com.madvay.tools.android.perf.apat.Main.java

private static void printLicense() {
    try {/*from  w ww. ja  v a  2 s .  com*/
        List<String> lines = Resources.readLines(Resources.getResource("LICENSE"), Charsets.UTF_8);
        for (String l : lines) {
            outln(l);
        }
    } catch (IOException err) {
        err(err);
    }
}

From source file:org.jmxtrans.embedded.samples.graphite.GraphiteDataInjector.java

public static GraphiteDataInjector newHostedGraphiteDataInjector() {
    GraphiteDataInjector graphiteDataInjector = new GraphiteDataInjector();
    // TODO DEFINE YOUR_HOSTED_GRAPHITE_KEY
    String hostedGraphiteKey = null;
    try {/*from  www.  j  av a  2 s . c om*/
        hostedGraphiteKey = Resources.toString(Resources.getResource("hosted-graphite.credentials"),
                Charset.defaultCharset());
    } catch (IOException e) {
        throw Throwables.propagate(e);
    }
    graphiteDataInjector.graphiteMetricPrefix = hostedGraphiteKey + ".edu.servers.";
    graphiteDataInjector.graphiteHost = "carbon.hostedgraphite.com";
    graphiteDataInjector.setMaxGraphiteDataPointsPerSecond(100);
    graphiteDataInjector.batchSize = 50;
    return graphiteDataInjector;
}