Example usage for org.apache.commons.io IOUtils toInputStream

List of usage examples for org.apache.commons.io IOUtils toInputStream

Introduction

In this page you can find the example usage for org.apache.commons.io IOUtils toInputStream.

Prototype

public static InputStream toInputStream(String input) 

Source Link

Document

Convert the specified string to an input stream, encoded as bytes using the default character encoding of the platform.

Usage

From source file:com.amediamanager.config.S3ConfigurationProvider.java

@Override
public void persistNewProperty(String key, String value) {
    if (this.properties != null) {
        this.properties.put(key, value);
        AmazonS3 s3Client = new AmazonS3Client();
        try {/*  w  ww  . j  a v  a 2s .c  om*/
            s3Client.putObject(this.bucket, this.key,
                    IOUtils.toInputStream(this.propsToString(this.properties)), null);
        } catch (AmazonS3Exception ase) {
            LOG.error("Error persisting config from s3://{}/{}", new Object[] { this.bucket, this.key, ase });
        }
    } else {
        LOG.error("Could not persist new property because this.properties is null.");
    }
}

From source file:com.temenos.useragent.generic.mediatype.JsonPayloadHandler.java

private EntityWrapper createEntityWrapper(JSONObject jsonObject) {
    EntityWrapper entityWrapper = new DefaultEntityWrapper();
    JsonEntityHandler jsonEntity = new JsonEntityHandler();
    jsonEntity.setContent(IOUtils.toInputStream(jsonObject.toString()));
    entityWrapper.setHandler(jsonEntity);
    return entityWrapper;
}

From source file:ddf.content.endpoint.rest.ContentEndpointCreateTest.java

@Test
public void testParseAttachmentContentTypeSpecified() throws Exception {
    InputStream is = IOUtils.toInputStream(TEST_JSON);
    MetadataMap<String, String> headers = new MetadataMap<String, String>();
    headers.add(CONTENT_DISPOSITION, "form-data; name=file; filename=C:\\DDF\\geojson_valid.json");
    headers.add(CONTENT_TYPE, "application/json;id=geojson");
    Attachment attachment = new Attachment(is, headers);

    ContentFramework framework = mock(ContentFramework.class);
    ContentEndpoint endpoint = new ContentEndpoint(framework, getMockMimeTypeMapper());
    CreateInfo createInfo = endpoint.parseAttachment(attachment);
    Assert.assertNotNull(createInfo);/*from   www.ja  v a  2 s  . co m*/
    Assert.assertEquals("application/json;id=geojson", createInfo.getContentType());
    Assert.assertEquals("geojson_valid.json", createInfo.getFilename());
}

From source file:edu.northwestern.bioinformatics.studycalendar.restlets.TemplateResource.java

@Override
public Representation put(Representation entity, Variant variant) throws ResourceException {
    Study out;/*from w w w.j  av a  2  s .  c  o m*/
    try {
        String in = entity.getText();
        String error = getTemplateSchemaValidator().validate(IOUtils.toInputStream(in));
        if (isNotBlank(error)) {
            throw new ResourceException(Status.CLIENT_ERROR_BAD_REQUEST, error);
        }
        Study imported = templateImportService.readAndSaveTemplate(getRequestedObject(),
                IOUtils.toInputStream(in));
        out = studyService.getCompleteTemplateHistory(imported);
    } catch (IOException e) {
        log.warn("PUT failed with IOException", e);
        throw new ResourceException(e);
    }
    getResponse().setEntity(createXmlRepresentation(out));
    if (getRequestedObject() == null) {
        getResponse().setStatus(Status.SUCCESS_CREATED);
    } else {
        getResponse().setStatus(Status.SUCCESS_OK);
    }

    return null;
}

From source file:com.vectorcast.plugins.vectorcastexecution.job.ManageProject.java

/**
 * Parse the project file//  w  ww  .  ja va  2  s  .c  o  m
 * @throws IOException exception
 * @throws InvalidProjectFileException exception
 */
public void parse() throws IOException, InvalidProjectFileException {
    Integer version = 14;
    try {
        DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
        DocumentBuilder dbBuilder = dbFactory.newDocumentBuilder();
        InputStream is = IOUtils.toInputStream(manageFile);
        Document doc = dbBuilder.parse(is);

        NodeList nList = doc.getElementsByTagName("project");
        Node projectNode = nList.item(0);
        String verStr = ((Element) projectNode).getAttribute("version");
        version = Integer.valueOf(verStr);
        for (int pos = 0; pos < nList.getLength(); pos++) {
            Node node = nList.item(pos);
            NodeList innerList = node.getChildNodes();
            for (int inner = 0; inner < innerList.getLength(); inner++) {
                Node innerNode = innerList.item(inner);
                if (innerNode.getNodeName().equals("group") && node.getNodeType() == Node.ELEMENT_NODE) {
                    Element element = (Element) innerNode;
                    String name = element.getAttribute("name");
                    Group group = new Group(name);
                    groups.add(group);
                    group.parse(innerNode);
                } else if (version < 17 && innerNode.getNodeName().equals("source-collection")
                        && node.getNodeType() == Node.ELEMENT_NODE) {
                    Element element = (Element) innerNode;
                    String name = element.getAttribute("name");
                    Source source = new Source(name);
                    sources.add(source);
                    source.parse(innerNode);
                } else if (version >= 17 && innerNode.getNodeName().equals("compiler")
                        && node.getNodeType() == Node.ELEMENT_NODE) {
                    Element element = (Element) innerNode;
                    Compiler compiler = new Compiler();
                    compilers.add(compiler);
                    compiler.parse(innerNode);
                }
            }
        }
    } catch (ParserConfigurationException ex) {
        Logger.getLogger(NewSingleJob.class.getName()).log(Level.SEVERE, null, ex);
        throw new InvalidProjectFileException();
    } catch (SAXException ex) {
        //            Logger.getLogger(NewSingleJob.class.getName()).log(Level.SEVERE, null, ex);
        throw new InvalidProjectFileException();
    }
    if (version < 17) {
        for (Source source : sources) {
            for (Platform platform : source.platforms) {
                for (Compiler compiler : platform.compilers) {
                    for (TestSuite testSuite : compiler.testsuites) {
                        for (Group group : testSuite.groups) {
                            for (Environment env : group.getEnvs()) {
                                MultiJobDetail job = new MultiJobDetail(source.getName(), platform.getName(),
                                        compiler.getName(), testSuite.getName(), env.getName());
                                jobs.add(job);
                            }
                        }
                    }
                }
            }
        }
    } else if (version >= 17) {
        for (Compiler compiler : compilers) {
            for (TestSuite testSuite : compiler.testsuites) {
                for (Group group : testSuite.groups) {
                    for (Environment env : group.getEnvs()) {
                        MultiJobDetail job = new MultiJobDetail(/*source*/null, /*platform*/null,
                                compiler.getName(), testSuite.getName(), env.getName());
                        jobs.add(job);
                    }
                }
            }
        }
    }
}

From source file:com.norconex.committer.elasticsearch.ElasticsearchCommitterTest.java

@Test
public void testCommitAdd() throws Exception {
    String content = "hello world!";
    InputStream is = IOUtils.toInputStream(content);

    // Add new doc to ES
    String id = "1";
    committer.add(id, is, new Properties());
    committer.commit();/* w w  w  . j a v a2s. c om*/

    IOUtils.closeQuietly(is);

    // Check that it's in ES
    GetResponse response = client.prepareGet(indexName, typeName, id).execute().actionGet();
    assertTrue(response.isExists());
    // Check content

    Map<String, Object> responseMap = response.getSource();
    assertEquals(content, responseMap.get(ElasticsearchCommitter.DEFAULT_ES_CONTENT_FIELD));
}

From source file:com.msopentech.odatajclient.engine.communication.request.cud.ODataValueUpdateRequest.java

/**
 * {@inheritDoc }
 */
@Override
protected InputStream getPayload() {
    return IOUtils.toInputStream(value.toString());
}

From source file:com.github.thesmartenergy.mdq.entities.Ontology.java

@PostConstruct
public void initialize() {
    try {/*from  w ww  .  j  ava 2  s.  c o m*/
        String dir = context.getRealPath("/WEB-INF/classes/");
        LOG.info(dir);
        LOG.info(context.getRealPath("/WEB-INF/classes"));
        LOG.info(context.getClassLoader().getResource("/").toString());
        LOG.info(context.getClassLoader().getResource("/").getPath());
        LOG.info(context.getClassLoader().getResource("/").toURI().toString());

        // initialize file manager
        fileManager = FileManager.makeGlobal();
        Locator loc = new LocatorFile(dir);
        Model conf = RDFDataMgr.loadModel(dir + "/configuration.ttl");
        LocationMapper mapper = new LocationMapper(conf);
        fileManager.addLocator(loc);
        fileManager.setLocationMapper(mapper);
        FileManager.setGlobalFileManager(fileManager);

        turtle = IOUtils.toString(fileManager.open(uri));
        Model model = ModelFactory.createDefaultModel().read(IOUtils.toInputStream(turtle), base, "TTL");
        model.listSubjects().forEachRemaining(new Consumer<Resource>() {
            @Override
            public void accept(Resource t) {
                if (!t.isURIResource()) {
                    return;
                }
                documents.put(t.getURI(), Ontology.this);
            }
        });
        documents.put(uri, this);

        // parse the SPARQL-Generate query and create plan
        String queryString = IOUtils.toString(fileManager.open(base + "query/Vocabulary"));
        PlanFactory factory = new PlanFactory(fileManager);
        RootPlan plan = factory.create(queryString);

        // parse each vocabulary
        File vocabDir = new File(Ontology.class.getResource("/vocab").toURI());
        for (File vocabFile : vocabDir.listFiles()) {
            // create the initial model
            Model vocabModel = ModelFactory.createDefaultModel();

            // create the initial binding
            String variable = "msg";
            String uri = "urn:iana:mime:application/json";
            String message = IOUtils.toString(new FileInputStream(vocabFile));
            QuerySolutionMap initialBinding = new QuerySolutionMap();
            TypeMapper typeMapper = TypeMapper.getInstance();
            RDFDatatype dt = typeMapper.getSafeTypeByName(uri);
            Node arqLiteral = NodeFactory.createLiteral(message, dt);
            RDFNode jenaLiteral = vocabModel.asRDFNode(arqLiteral);
            initialBinding.add(variable, jenaLiteral);

            // execute the plan
            plan.exec(initialBinding, vocabModel);

            //                File f = new File("C:/temp/mdq/out.ttl");
            //                OutputStream out = new FileOutputStream(f);
            //                vocabModel.write(out, "TTL"); 
            //                out.close();

            // get the uri of the vocabulary                
            Resource vocabResource = vocabModel
                    .listSubjectsWithProperty(
                            vocabModel.createProperty("http://www.w3.org/1999/02/22-rdf-syntax-ns#type"),
                            vocabModel.createResource("http://purl.org/vocommons/voaf#Vocabulary"))
                    .nextResource();

            // set the vocabulary
            final Vocabulary vocabularyDocument = new Vocabulary(this, vocabModel);
            documents.put(vocabResource.getURI(), vocabularyDocument);

            // get the uri of each of the resources defined by the vocabulary
            vocabModel.listSubjectsWithProperty(
                    vocabModel.createProperty("http://www.w3.org/2000/01/rdf-schema#isDefinedBy"),
                    vocabResource).forEachRemaining(new Consumer<Resource>() {
                        @Override
                        public void accept(Resource t) {
                            if (!t.isURIResource()) {
                                return;
                            }
                            documents.put(t.getURI(), vocabularyDocument);
                        }
                    });
        }
    } catch (Exception ex) {
        LOG.log(Level.SEVERE, "error while initializing app ", ex);
        throw new RuntimeException("error while initializing app ", ex);
    }
}

From source file:gaffer.serialisation.json.hyperloglogplus.HyperLogLogPlusJsonSerialisationTest.java

public void testNullHyperLogLogSketchDeserialisedAsEmptySketch() throws IOException {
    // Given//from  w  ww  .j  a v a  2 s.  com
    final String sketchAsString = "{}";

    // When / Then
    try {
        // TODO - See 'Can't easily create HyperLogLogPlus sketches in JSON'
        mapper.readValue(IOUtils.toInputStream(sketchAsString), HyperLogLogPlus.class);
        fail("Exception expected");
    } catch (IllegalArgumentException e) {
        assertNotNull(e);
    }
}

From source file:com.temenos.useragent.generic.mediatype.HalJsonPayloadHandler.java

@Override
public void setPayload(String payload) {
    if (payload == null) {
        throw new IllegalArgumentException("Payload is null");
    }/*from ww  w .ja va2  s .c  o  m*/
    ReadableRepresentation jsonRepresentation = representationFactory.readRepresentation(
            RepresentationFactory.HAL_JSON, new InputStreamReader(IOUtils.toInputStream(payload)));
    representation = jsonRepresentation;
}