Example usage for org.apache.commons.io Charsets UTF_8

List of usage examples for org.apache.commons.io Charsets UTF_8

Introduction

In this page you can find the example usage for org.apache.commons.io Charsets UTF_8.

Prototype

Charset UTF_8

To view the source code for org.apache.commons.io Charsets UTF_8.

Click Source Link

Document

Eight-bit Unicode Transformation Format.

Usage

From source file:org.apache.solr.security.TestAuthorizationFramework.java

public void distribSetUp() throws Exception {
    super.distribSetUp();
    try (ZkStateReader zkStateReader = new ZkStateReader(zkServer.getZkAddress(), TIMEOUT, TIMEOUT)) {
        zkStateReader.getZkClient().create(ZkStateReader.SOLR_SECURITY_CONF_PATH,
                "{\"authorization\":{\"class\":\"org.apache.solr.security.MockAuthorizationPlugin\"}}"
                        .getBytes(Charsets.UTF_8),
                CreateMode.PERSISTENT, true);
    }/*from  w ww .ja va  2  s  . c  o  m*/
}

From source file:org.apache.sqoop.tools.tool.RepositoryLoadTool.java

@SuppressWarnings("static-access")
@Override//from   w  w  w  .ja  va 2 s. c o  m
public boolean runToolWithConfiguration(String[] arguments) {

    Options options = new Options();
    options.addOption(
            OptionBuilder.isRequired().hasArg().withArgName("filename").withLongOpt("input").create('i'));

    CommandLineParser parser = new GnuParser();

    try {
        CommandLine line = parser.parse(options, arguments);
        String inputFileName = line.getOptionValue('i');

        LOG.info("Reading JSON from file" + inputFileName);
        InputStream input = new FileInputStream(inputFileName);
        String jsonTxt = IOUtils.toString(input, Charsets.UTF_8);
        JSONObject json = (JSONObject) JSONValue.parse(jsonTxt);
        boolean res = load(json);
        input.close();
        return res;

    } catch (FileNotFoundException e) {
        LOG.error("Repository dump file not found:", e);
        System.out.println("Input file not found. Please check Server logs for details.");
        return false;
    } catch (IOException e) {
        LOG.error("Unable to read repository dump file:", e);
        System.out.println("Unable to read input file. Please check Server logs for details.");
        return false;
    } catch (ParseException e) {
        LOG.error("Error parsing command line arguments:", e);
        System.out.println("Error parsing command line arguments. Please check Server logs for details.");
        return false;
    }
}

From source file:org.apache.streams.data.data.util.ActivitySerDeTest.java

/**
 * Tests that all example activities can be loaded into Activity beans
 * @throws Exception/*  w  w w .j a va  2 s  . c  om*/
 */
@Test
public void testActivitySerDe() throws Exception {

    InputStream testActivityFolderStream = ActivitySerDeTest.class.getClassLoader()
            .getResourceAsStream("activities");
    List<String> files = IOUtils.readLines(testActivityFolderStream, Charsets.UTF_8);

    for (String file : files) {
        LOGGER.info("File: " + file);
        LOGGER.info("Serializing: activities/" + file);
        InputStream testActivityFileStream = ActivitySerDeTest.class.getClassLoader()
                .getResourceAsStream("activities/" + file);
        Activity activity = MAPPER.readValue(testActivityFileStream, Activity.class);
        String activityString = MAPPER.writeValueAsString(activity);
        LOGGER.info("Deserialized: " + activityString);
    }
}

From source file:org.apache.streams.example.elasticsearch.test.ElasticsearchHdfsIT.java

@Before
public void prepareTest() throws Exception {

    testConfiguration = new ElasticsearchConfiguration();
    testConfiguration.setHosts(Lists.newArrayList("localhost"));
    testConfiguration.setClusterName(cluster().getClusterName());

    ElasticsearchWriterConfiguration setupWriterConfiguration = MAPPER.convertValue(testConfiguration,
            ElasticsearchWriterConfiguration.class);
    setupWriterConfiguration.setIndex("source");
    setupWriterConfiguration.setType("activity");
    setupWriterConfiguration.setBatchSize(5l);

    ElasticsearchPersistWriter setupWriter = new ElasticsearchPersistWriter(setupWriterConfiguration);
    setupWriter.prepare(null);/* w ww.j a  v a 2 s  .c om*/

    InputStream testActivityFolderStream = ElasticsearchHdfsIT.class.getClassLoader()
            .getResourceAsStream("activities");
    List<String> files = IOUtils.readLines(testActivityFolderStream, Charsets.UTF_8);

    for (String file : files) {
        LOGGER.info("File: " + file);
        InputStream testActivityFileStream = ElasticsearchHdfsIT.class.getClassLoader()
                .getResourceAsStream("activities/" + file);
        Activity activity = MAPPER.readValue(testActivityFileStream, Activity.class);
        StreamsDatum datum = new StreamsDatum(activity, activity.getVerb());
        setupWriter.write(datum);
        LOGGER.info("Wrote: " + activity.getVerb());
    }

    setupWriter.cleanUp();

    flushAndRefresh();

}

From source file:org.apache.streams.example.elasticsearch.test.ElasticsearchReindexIT.java

@Before
public void prepareTest() throws Exception {

    testConfiguration = new ElasticsearchConfiguration();
    testConfiguration.setHosts(Lists.newArrayList("localhost"));
    testConfiguration.setClusterName(cluster().getClusterName());

    ElasticsearchWriterConfiguration setupWriterConfiguration = MAPPER.convertValue(testConfiguration,
            ElasticsearchWriterConfiguration.class);
    setupWriterConfiguration.setIndex("source");
    setupWriterConfiguration.setType("activity");
    setupWriterConfiguration.setBatchSize(5l);

    ElasticsearchPersistWriter setupWriter = new ElasticsearchPersistWriter(setupWriterConfiguration);
    setupWriter.prepare(null);/* w  w  w  .ja va  2  s .  c o  m*/

    InputStream testActivityFolderStream = ElasticsearchReindexIT.class.getClassLoader()
            .getResourceAsStream("activities");
    List<String> files = IOUtils.readLines(testActivityFolderStream, Charsets.UTF_8);

    for (String file : files) {
        LOGGER.info("File: " + file);
        InputStream testActivityFileStream = ElasticsearchReindexIT.class.getClassLoader()
                .getResourceAsStream("activities/" + file);
        Activity activity = MAPPER.readValue(testActivityFileStream, Activity.class);
        StreamsDatum datum = new StreamsDatum(activity, activity.getVerb());
        setupWriter.write(datum);
        LOGGER.info("Wrote: " + activity.getVerb());
    }

    setupWriter.cleanUp();

    flushAndRefresh();

}

From source file:org.apache.streams.example.elasticsearch.test.ElasticsearchReindexParentChildIT.java

@Before
public void prepareTest() throws Exception {

    testConfiguration = new ElasticsearchConfiguration();
    testConfiguration.setHosts(Lists.newArrayList("localhost"));
    testConfiguration.setClusterName(cluster().getClusterName());

    PutIndexTemplateRequestBuilder putTemplateRequestBuilder = client().admin().indices()
            .preparePutTemplate("mappings");
    URL templateURL = ElasticsearchReindexParentChildIT.class.getResource("/ActivityChildObjectParent.json");
    ObjectNode template = MAPPER.readValue(templateURL, ObjectNode.class);
    String templateSource = MAPPER.writeValueAsString(template);
    putTemplateRequestBuilder.setSource(templateSource);

    client().admin().indices().putTemplate(putTemplateRequestBuilder.request()).actionGet();

    Reflections reflections = new Reflections(
            new ConfigurationBuilder().setUrls(ClasspathHelper.forPackage("org.apache.streams.pojo.json"))
                    .setScanners(new SubTypesScanner()));
    Set<Class<? extends ActivityObject>> objectTypes = reflections.getSubTypesOf(ActivityObject.class);

    ElasticsearchWriterConfiguration setupWriterConfiguration = MAPPER.convertValue(testConfiguration,
            ElasticsearchWriterConfiguration.class);
    setupWriterConfiguration.setIndex("source");
    setupWriterConfiguration.setBatchSize(5l);

    ElasticsearchPersistWriter setupWriter = new ElasticsearchPersistWriter(setupWriterConfiguration);
    setupWriter.prepare(null);// ww  w .j a  v a 2s .  c  om

    for (Class objectType : objectTypes) {
        Object object = objectType.newInstance();
        ActivityObject activityObject = MAPPER.convertValue(object, ActivityObject.class);
        StreamsDatum datum = new StreamsDatum(activityObject, activityObject.getObjectType());
        datum.getMetadata().put("type", "object");
        setupWriter.write(datum);
    }

    InputStream testActivityFolderStream = ElasticsearchReindexIT.class.getClassLoader()
            .getResourceAsStream("activities");
    List<String> files = IOUtils.readLines(testActivityFolderStream, Charsets.UTF_8);

    for (String file : files) {
        LOGGER.info("File: " + file);
        InputStream testActivityFileStream = ElasticsearchReindexIT.class.getClassLoader()
                .getResourceAsStream("activities/" + file);
        Activity activity = MAPPER.readValue(testActivityFileStream, Activity.class);
        StreamsDatum datum = new StreamsDatum(activity, activity.getVerb());
        datum.getMetadata().put("parent", activity.getObject().getObjectType());
        datum.getMetadata().put("type", "activity");
        setupWriter.write(datum);
        LOGGER.info("Wrote: " + activity.getVerb());
    }

    setupWriter.cleanUp();

    flushAndRefresh();

}

From source file:org.apache.streams.example.elasticsearch.test.MongoElasticsearchSyncIT.java

@Before
public void prepareTest() throws Exception {

    syncConfiguration = MAPPER.readValue(MongoElasticsearchSyncIT.class.getResourceAsStream("/testSync.json"),
            MongoElasticsearchSyncConfiguration.class);

    syncConfiguration.getDestination().setClusterName(cluster().getClusterName());

    MongoPersistWriter setupWriter = new MongoPersistWriter(syncConfiguration.getSource());

    setupWriter.prepare(null);/*from   w w  w . j av  a  2s.  co  m*/

    InputStream testActivityFolderStream = MongoElasticsearchSyncIT.class.getClassLoader()
            .getResourceAsStream("activities");
    List<String> files = IOUtils.readLines(testActivityFolderStream, Charsets.UTF_8);

    for (String file : files) {
        LOGGER.info("File: " + file);
        InputStream testActivityFileStream = MongoElasticsearchSyncIT.class.getClassLoader()
                .getResourceAsStream("activities/" + file);
        Activity activity = MAPPER.readValue(testActivityFileStream, Activity.class);
        activity.getAdditionalProperties().remove("$license");
        StreamsDatum datum = new StreamsDatum(activity, activity.getVerb());
        setupWriter.write(datum);
        LOGGER.info("Wrote: " + activity.getVerb());
        srcCount++;
    }

    setupWriter.cleanUp();

}

From source file:org.apache.streams.graph.test.Neo4jHttpPersistWriterIT.java

@Test
public void testNeo4jHttpPersistWriter() throws IOException {

    InputStream testActivityFolderStream = Neo4jHttpPersistWriterIT.class.getClassLoader()
            .getResourceAsStream("activities");
    List<String> files = IOUtils.readLines(testActivityFolderStream, Charsets.UTF_8);

    for (String file : files) {
        LOGGER.info("File: " + file);
        InputStream testActivityFileStream = Neo4jHttpPersistWriterIT.class.getClassLoader()
                .getResourceAsStream("activities/" + file);
        Activity activity = mapper.readValue(testActivityFileStream, Activity.class);
        activity.getActor().setId(activity.getActor().getObjectType());
        activity.getObject().setId(activity.getObject().getObjectType());
        StreamsDatum datum = new StreamsDatum(activity, activity.getVerb());
        graphPersistWriter.write(datum);
        LOGGER.info("Wrote: " + activity.getVerb());
    }/*from   www .  j a  v a 2s  .  co  m*/

    graphPersistWriter.cleanUp();

    // hit neo with http and check vertex/edge counts

}

From source file:org.apache.streams.graph.test.TestNeo4jBinaryPersistWriter.java

@Test
public void testNeo4jBinaryPersistWriter() throws Exception {

    InputStream testActivityFolderStream = TestNeo4jBinaryPersistWriter.class.getClassLoader()
            .getResourceAsStream("activities");
    List<String> files = IOUtils.readLines(testActivityFolderStream, Charsets.UTF_8);

    for (String file : files) {
        LOGGER.info("File: " + file);
        InputStream testActivityFileStream = TestNeo4jBinaryPersistWriter.class.getClassLoader()
                .getResourceAsStream("activities/" + file);
        Activity activity = mapper.readValue(testActivityFileStream, Activity.class);
        activity.getActor().setId(activity.getActor().getObjectType());
        activity.getObject().setId(activity.getObject().getObjectType());
        if (!Strings.isNullOrEmpty((String) activity.getObject().getAdditionalProperties().get("verb"))) {
            activity.getObject()//from   w  ww  .j  ava 2s.  c o m
                    .setObjectType((String) activity.getObject().getAdditionalProperties().get("verb"));
            activity.getObject().setId(activity.getObject().getObjectType());
        }
        if (!Strings.isNullOrEmpty(activity.getActor().getId())) {
            StreamsDatum actorDatum = new StreamsDatum(activity.getActor(), activity.getActor().getId());
            graphPersistWriter.write(actorDatum);
        }
        if (!Strings.isNullOrEmpty(activity.getObject().getId())) {
            StreamsDatum objectDatum = new StreamsDatum(activity.getObject(), activity.getObject().getId());
            graphPersistWriter.write(objectDatum);
        }
        if (!Strings.isNullOrEmpty(activity.getVerb()) && !Strings.isNullOrEmpty(activity.getActor().getId())
                && !Strings.isNullOrEmpty(activity.getObject().getId())) {
            StreamsDatum activityDatum = new StreamsDatum(activity, activity.getVerb());
            graphPersistWriter.write(activityDatum);
        }
        LOGGER.info("Wrote: " + activity.getVerb());
    }

    graphPersistWriter.cleanUp();

    graphPersistWriter.graph.beginTx();
    Node organization = graphPersistWriter.graph.findNodes(DynamicLabel.label("streams"), "id", "organization")
            .next();
    Node person = graphPersistWriter.graph.findNodes(DynamicLabel.label("streams"), "id", "person").next();
    Assert.assertNotNull(organization);
    Assert.assertTrue(organization.hasLabel(DynamicLabel.label("streams")));
    Assert.assertTrue(organization.hasLabel(DynamicLabel.label("organization")));
    Assert.assertNotNull(person);
    Assert.assertTrue(person.hasLabel(DynamicLabel.label("streams")));
    Assert.assertTrue(person.hasLabel(DynamicLabel.label("person")));
    Assert.assertTrue(person.hasRelationship());
    Assert.assertTrue(person.hasRelationship(Direction.OUTGOING));
    Assert.assertTrue(person.hasRelationship(DynamicRelationshipType.withName("join"), Direction.OUTGOING));
    Assert.assertTrue(person.hasRelationship(DynamicRelationshipType.withName("leave"), Direction.OUTGOING));
    //        Iterable < Relationship > relationships = person.getRelationships(Direction.OUTGOING);
    //        List<Relationship> relationshipList = Lists.newArrayList(relationships);
    //        Assert.assertEquals(relationshipList.size(), 2);
    Relationship joinRelationship = person.getSingleRelationship(DynamicRelationshipType.withName("join"),
            Direction.OUTGOING);
    Assert.assertNotNull(joinRelationship);
    Node joinRelationshipStart = joinRelationship.getStartNode();
    Node joinRelationshipEnd = joinRelationship.getEndNode();
    Assert.assertEquals(joinRelationshipStart, person);
    Assert.assertEquals(joinRelationshipEnd, organization);
    Relationship leaveRelationship = person.getSingleRelationship(DynamicRelationshipType.withName("leave"),
            Direction.OUTGOING);
    Assert.assertNotNull(leaveRelationship);
    Node leaveRelationshipStart = leaveRelationship.getStartNode();
    Node leaveRelationshipEnd = leaveRelationship.getEndNode();
    Assert.assertEquals(leaveRelationshipStart, person);
    Assert.assertEquals(leaveRelationshipEnd, organization);

}

From source file:org.apache.streams.mongo.test.TestMongoPersist.java

@Test
public void testMongoPersist() throws Exception {

    MongoConfiguration mongoConfiguration = new MongoConfiguration().withHost("localhost").withDb("test")
            .withPort(37017l).withCollection("activities");

    MongoPersistWriter writer = new MongoPersistWriter(mongoConfiguration);

    writer.prepare(null);//from   www  .ja  v a  2 s  . c  o m

    InputStream testActivityFolderStream = TestMongoPersist.class.getClassLoader()
            .getResourceAsStream("activities");
    List<String> files = IOUtils.readLines(testActivityFolderStream, Charsets.UTF_8);

    for (String file : files) {
        LOGGER.info("File: " + file);
        InputStream testActivityFileStream = TestMongoPersist.class.getClassLoader()
                .getResourceAsStream("activities/" + file);
        Activity activity = MAPPER.readValue(testActivityFileStream, Activity.class);
        activity.getAdditionalProperties().remove("$license");
        StreamsDatum datum = new StreamsDatum(activity, activity.getVerb());
        writer.write(datum);
        LOGGER.info("Wrote: " + activity.getVerb());
        count++;
    }

    writer.cleanUp();

    MongoPersistReader reader = new MongoPersistReader(mongoConfiguration);

    reader.prepare(null);

    StreamsResultSet resultSet = reader.readAll();

    assert (resultSet.size() == count);

}