Example usage for java.io InputStream toString

List of usage examples for java.io InputStream toString

Introduction

In this page you can find the example usage for java.io InputStream toString.

Prototype

public String toString() 

Source Link

Document

Returns a string representation of the object.

Usage

From source file:ch.cyberduck.core.Path.java

/**
 * Will copy from in to out. Does not attempt to skip any bytes from the streams.
 *
 * @param in       The stream to read from
 * @param out      The stream to write to
 * @param throttle The bandwidth limit//from w ww  .j a v a2s  .co m
 * @param l        The stream listener to notify about bytes received and sent
 * @param status   Transfer status
 * @throws IOException                 Write not completed due to a I/O problem
 * @throws ConnectionCanceledException When transfer is interrupted by user setting the
 *                                     status flag to cancel.
 */
protected void download(final InputStream in, final OutputStream out, final BandwidthThrottle throttle,
        final StreamListener l, final TransferStatus status) throws IOException {
    if (log.isDebugEnabled()) {
        log.debug("download(" + in.toString() + ", " + out.toString());
    }
    this.getSession()
            .message(MessageFormat.format(Locale.localizedString("Downloading {0}", "Status"), this.getName()));

    this.transfer(new ThrottledInputStream(in, throttle), out, l, -1, status);
}

From source file:ch.cyberduck.core.Path.java

/**
 * Will copy from in to out. Will attempt to skip Status#getCurrent
 * from the inputstream but not from the outputstream. The outputstream
 * is asssumed to append to a already existing file if
 * Status#getCurrent > 0/*from  www .  j a  v  a2  s .c  o m*/
 *
 * @param out      The stream to write to
 * @param in       The stream to read from
 * @param throttle The bandwidth limit
 * @param l        The stream listener to notify about bytes received and sent
 * @param offset   Start reading at offset in file
 * @param limit    Transfer only up to this length
 * @param status   Transfer status
 * @throws IOResumeException           If the input stream fails to skip the appropriate
 *                                     number of bytes
 * @throws IOException                 Write not completed due to a I/O problem
 * @throws ConnectionCanceledException When transfer is interrupted by user setting the
 *                                     status flag to cancel.
 */
protected void upload(final OutputStream out, final InputStream in, final BandwidthThrottle throttle,
        final StreamListener l, long offset, final long limit, final TransferStatus status) throws IOException {
    if (log.isDebugEnabled()) {
        log.debug("upload(" + out.toString() + ", " + in.toString());
    }
    this.getSession()
            .message(MessageFormat.format(Locale.localizedString("Uploading {0}", "Status"), this.getName()));

    if (offset > 0) {
        long skipped = in.skip(offset);
        if (log.isInfoEnabled()) {
            log.info(String.format("Skipping %d bytes", skipped));
        }
        if (skipped < status.getCurrent()) {
            throw new IOResumeException(
                    String.format("Skipped %d bytes instead of %d", skipped, status.getCurrent()));
        }
    }
    this.transfer(in, new ThrottledOutputStream(out, throttle), l, limit, status);
}

From source file:info.rmapproject.api.responsemgr.DiscoResponseManager.java

/**
 * Updates RMap:DiSCO.  Does this by inactivating the previous version of the DiSCO and 
 * creating a new version using valid client-provided RDF.
 *
 * @param origDiscoUri the DiSCO URI to update
 * @param discoRdf the DiSCO as RDF//from   w ww .  jav a 2s  . co  m
 * @param contentType the request content type
 * @return HTTP Response
 * @throws RMapApiException the RMap API exception
 */
public Response updateRMapDiSCO(String origDiscoUri, InputStream discoRdf, RDFType contentType)
        throws RMapApiException {
    boolean reqSuccessful = false;
    Response response = null;
    try {
        if (origDiscoUri == null || origDiscoUri.length() == 0) {
            throw new RMapApiException(ErrorCode.ER_NO_OBJECT_URI_PROVIDED);
        }
        if (discoRdf == null || discoRdf.toString().length() == 0) {
            throw new RMapApiException(ErrorCode.ER_NO_DISCO_RDF_PROVIDED);
        }
        if (contentType == null) {
            throw new RMapApiException(ErrorCode.ER_NO_CONTENT_TYPE_PROVIDED);
        }

        URI uriOrigDiscoUri = null;
        try {
            origDiscoUri = URLDecoder.decode(origDiscoUri, "UTF-8");
            uriOrigDiscoUri = new URI(origDiscoUri);
        } catch (Exception ex) {
            throw RMapApiException.wrap(ex, ErrorCode.ER_PARAM_WONT_CONVERT_TO_URI);
        }

        RMapDiSCO newRmapDisco = rdfHandler.rdf2RMapDiSCO(discoRdf, contentType, Constants.BASE_URL);
        if (newRmapDisco == null) {
            throw new RMapApiException(ErrorCode.ER_CORE_RDF_TO_DISCO_FAILED);
        }

        //Get the current user to associate with the DiSCO update event
        RMapRequestAgent reqAgent = apiUserService.getCurrentRequestAgent();
        RMapEvent discoEvent = rmapService.updateDiSCO(uriOrigDiscoUri, newRmapDisco, reqAgent);

        if (discoEvent == null) {
            throw new RMapApiException(ErrorCode.ER_CORE_UPDATEDISCO_NOT_COMPLETED);
        }

        URI uDiscoURI = newRmapDisco.getId().getIri();
        if (uDiscoURI == null) {
            throw new RMapApiException(ErrorCode.ER_CORE_GET_DISCOID_RETURNED_NULL);
        }
        String sDiscoURI = uDiscoURI.toString();
        if (sDiscoURI.length() == 0) {
            throw new RMapApiException(ErrorCode.ER_CORE_DISCOURI_STRING_EMPTY);
        }

        URI uEventURI = discoEvent.getId().getIri();
        if (uEventURI == null) {
            throw new RMapApiException(ErrorCode.ER_CORE_GET_EVENTID_RETURNED_NULL);
        }
        String sEventURI = uEventURI.toString();
        if (sEventURI.length() == 0) {
            throw new RMapApiException(ErrorCode.ER_CORE_EVENTURI_STRING_EMPTY);
        }

        String newEventURL = Utils.makeEventUrl(sEventURI);
        String prevDiscoUrl = Utils.makeDiscoUrl(origDiscoUri);
        String newDiscoUrl = Utils.makeDiscoUrl(sDiscoURI);

        String linkRel = "<" + newEventURL + ">" + ";rel=\"" + PROV.WASGENERATEDBY + "\"";
        linkRel = linkRel.concat(",<" + prevDiscoUrl + ">" + ";rel=\"predecessor-version\"");

        response = Response.status(Response.Status.CREATED).entity(sDiscoURI).location(new URI(newDiscoUrl))
                .header("Link", linkRel) //switch this to link()
                .build();

        reqSuccessful = true;

    } catch (RMapApiException ex) {
        throw RMapApiException.wrap(ex);
    } catch (RMapDefectiveArgumentException ex) {
        throw RMapApiException.wrap(ex, ErrorCode.ER_GET_DISCO_BAD_ARGUMENT);
    } catch (RMapDiSCONotFoundException ex) {
        throw RMapApiException.wrap(ex, ErrorCode.ER_DISCO_OBJECT_NOT_FOUND);
    } catch (RMapInactiveVersionException ex) {
        throw RMapApiException.wrap(ex, ErrorCode.ER_CORE_UPDATING_INACTIVE_DISCO);
    } catch (RMapNotLatestVersionException ex) {
        throw RMapApiException.wrap(ex, ErrorCode.ER_NOT_LATEST_DISCOVERS);
    } catch (RMapException ex) {
        if (ex.getCause() instanceof RMapDeletedObjectException) {
            throw RMapApiException.wrap(ex, ErrorCode.ER_OBJECT_DELETED);
        } else if (ex.getCause() instanceof RMapTombstonedObjectException) {
            throw RMapApiException.wrap(ex, ErrorCode.ER_OBJECT_TOMBSTONED);
        } else if (ex.getCause() instanceof RMapObjectNotFoundException) {
            throw RMapApiException.wrap(ex, ErrorCode.ER_OBJECT_NOT_FOUND);
        } else {
            throw RMapApiException.wrap(ex, ErrorCode.ER_CORE_GENERIC_RMAP_EXCEPTION);
        }
    } catch (Exception ex) {
        throw RMapApiException.wrap(ex, ErrorCode.ER_UNKNOWN_SYSTEM_ERROR);
    } finally {
        if (rmapService != null)
            rmapService.closeConnection();
        if (!reqSuccessful && response != null)
            response.close();
    }
    return response;
}

From source file:org.egov.android.view.activity.CreateComplaintActivity.java

/**
 * Function used to copy the file to complaint folder from gallery
 * //from  w  w  w.j  a  va 2 s  .  com
 * @param path
 *            => image file path from gallery
 */
@SuppressWarnings("resource")
private void _createImageFile(String path) {
    try {
        int photoNo = file_upload_limit + 1;
        String url = assetPath + File.separator + "current" + File.separator + "photo_" + photoNo + ".jpg";
        InputStream in = new FileInputStream(path);
        StorageManager sm = new StorageManager();
        Object[] obj = sm.getStorageInfo(CreateComplaintActivity.this);
        long totalSize = (Long) obj[2];
        if (totalSize < in.toString().length()) {
            showMessage(getMessage(R.string.sdcard_space_not_sufficient));
            return;
        }
        FileOutputStream out = new FileOutputStream(url);
        byte[] data = new byte[in.available()];
        in.read(data);
        out.write(data);
        in.close();
        out.close();

        compressImage(url, url);

        _validateImageUrl(url);

    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:org.dspace.content.packager.METSManifest.java

/**
 * Create a new manifest object from a serialized METS XML document.
 * Parse document read from the input stream, optionally validating.
 * @param is input stream containing serialized XML
 * @param validate if true, enable XML validation using schemas
 *   in document.  Also validates any sub-documents.
 * @throws MetadataValidationException if there is any error parsing
 *          or validating the METS./* w  w  w .j  a v  a  2 s . c  o m*/
 * @return new METSManifest object.
 */
public static METSManifest create(InputStream is, boolean validate, String configName)
        throws IOException, MetadataValidationException {
    SAXBuilder builder = new SAXBuilder(validate);

    builder.setIgnoringElementContentWhitespace(true);

    // Set validation feature
    if (validate) {
        builder.setFeature("http://apache.org/xml/features/validation/schema", true);
    }

    // Tell the parser where local copies of schemas are, to speed up
    // validation.  Local XSDs are identified in the configuration file.
    if (localSchemas.length() > 0) {
        builder.setProperty("http://apache.org/xml/properties/schema/external-schemaLocation", localSchemas);
    }

    // Parse the METS file
    Document metsDocument;

    try {
        metsDocument = builder.build(is);

        /*** XXX leave commented out except if needed for
         *** viewing the METS document that actually gets read.
         *
         * XMLOutputter outputPretty = new XMLOutputter(Format.getPrettyFormat());
         * log.debug("Got METS DOCUMENT:");
         * log.debug(outputPretty.outputString(metsDocument));
         ****/
    } catch (JDOMException je) {
        throw new MetadataValidationException("Error validating METS in " + is.toString(), je);
    }

    return new METSManifest(builder, metsDocument.getRootElement(), configName);
}

From source file:org.signserver.client.cli.defaultimpl.TimeStampCommand.java

/**
 * Reads a certificate in PEM-format from an InputStream.
 *
 * The stream may contain other things, the first certificate in the
 * stream is read.// w w  w . j ava  2s . c o m
 *
 * @param certstream the input stream containing the certificate in
 * PEM-format
 * @return Ordered List of X509Certificate, first certificate first,
 * or empty List
 * @exception IOException if the stream cannot be read.
 * @exception CertificateException if the stream does not contain a
 * correct certificate.
 */
private List<X509Certificate> getCertsFromPEM(final InputStream certstream)
        throws IOException, CertificateException {
    final ArrayList<X509Certificate> ret = new ArrayList<X509Certificate>();

    final BufferedReader bufRdr = new BufferedReader(new InputStreamReader(certstream));

    while (bufRdr.ready()) {
        final ByteArrayOutputStream ostr = new ByteArrayOutputStream();
        final PrintStream opstr = new PrintStream(ostr);
        String temp;
        while ((temp = bufRdr.readLine()) != null && !temp.equals(PEM_BEGIN)) {
        }
        if (temp == null) {
            throw new IOException("Error in " + certstream.toString() + ", missing " + PEM_BEGIN + " boundary");
        }
        while ((temp = bufRdr.readLine()) != null && !temp.equals(PEM_END)) {
            opstr.print(temp);
        }
        if (temp == null) {
            throw new IOException("Error in " + certstream.toString() + ", missing " + PEM_END + " boundary");
        }
        opstr.close();

        final byte[] certbuf = Base64.decode(ostr.toByteArray());
        ostr.close();
        // Phweeew, were done, now decode the cert from file back to
        // X509Certificate object
        final CertificateFactory cf = getCertificateFactory();
        final X509Certificate x509cert = (X509Certificate) cf
                .generateCertificate(new ByteArrayInputStream(certbuf));
        ret.add(x509cert);
    }
    return ret;
}

From source file:fr.esiea.esieaddress.controllers.importation.CSVImportCtrl.java

@RequestMapping(method = RequestMethod.POST)
@ResponseBody/*from  w  ww  .j a  v a  2s  . c om*/
@Secured("ROLE_USER")
public void upload(MultipartHttpServletRequest files, final HttpServletRequest request)
        throws DaoException, ServiceException, FileNotFoundException {
    LOGGER.info("[IMPORT] Start to import contact");

    //TODO Make it less verbose and may use a buffer to make it safer
    Map<String, MultipartFile> multipartFileMap = files.getMultiFileMap().toSingleValueMap();
    Set<String> fileNames = multipartFileMap.keySet();

    for (String fileName : fileNames) {

        MultipartFile multipartFile = multipartFileMap.get(fileName);
        String originalFilename = multipartFile.getOriginalFilename();

        if (checkFileName(originalFilename) && multipartFile.getSize() < FILE_SIZE_MAX) {

            InputStream inputStream = null;

            try {
                inputStream = multipartFile.getInputStream();
            } catch (IOException e) {
                throw new FileNotFoundException(e.toString());
            }

            try (Reader contactsFile = new InputStreamReader(inputStream)) {
                Map<String, Object> modelErrors = new HashMap<>();
                LOGGER.debug("[IMPORT] File is reading");
                Collection<Contact> contacts = csvService.ReadContactCSV(contactsFile);
                for (Contact contact : contacts) {
                    try {
                        contactCrudService.insert(contact);
                    } catch (ValidationException e) {
                        Object modelError = e.getModel();
                        LOGGER.warn("found an error in contact " + modelError);
                        modelErrors.put(contact.getId(), (Map) modelError);
                    }
                }

                if (!modelErrors.isEmpty())
                    throw new ValidationException(modelErrors);
            } catch (IOException e) {
                throw new FileNotFoundException(e.toString());
            } finally {
                if (inputStream != null)
                    try {
                        inputStream.close();
                    } catch (IOException e) {
                        LOGGER.error("[IMPORT] Impossible to close the file " + inputStream.toString());
                    }
            }
        }
    }
}

From source file:de.micromata.genome.test.web.SimHttpServletRequest.java

protected void setInputStream(final InputStream servletIs) {
    this.servletIs = new ServletInputStream() {

        public int available() throws IOException {
            return servletIs.available();
        }//from   ww w . j av  a  2 s  .co m

        public void close() throws IOException {
            servletIs.close();
        }

        public boolean equals(Object obj) {
            return servletIs.equals(obj);
        }

        public int hashCode() {
            return servletIs.hashCode();
        }

        public void mark(int readlimit) {
            servletIs.mark(readlimit);
        }

        public boolean markSupported() {
            return servletIs.markSupported();
        }

        public int read(byte[] b, int off, int len) throws IOException {
            return servletIs.read(b, off, len);
        }

        public int read(byte[] b) throws IOException {
            return servletIs.read(b);
        }

        public void reset() throws IOException {
            servletIs.reset();
        }

        public long skip(long n) throws IOException {
            return servletIs.skip(n);
        }

        public String toString() {
            return servletIs.toString();
        }

        @Override
        public int read() throws IOException {
            throw new UnsupportedOperationException();
        }
    };
}

From source file:org.kepler.objectmanager.ActorMetadata.java

/**
 * Constructor. Takes in xml metadata. This should be a moml entity with the
 * kepler additional metadata properties. The entity is parsed and an
 * ActorMetadata object is created with appropriate fields.
 * //from w  ww.  j  av a2 s  . c o m
 * @param moml
 *            the xml metadata
 */
public ActorMetadata(InputStream moml) throws InvalidMetadataException {
    if (isDebugging && moml != null)
        log.debug("ActorMetadata(" + moml.toString() + ")");

    initialize();

    try {
        String momlStr = FileUtil.convertStreamToString(moml);

        if (isDebugging) {
            // log.debug("\n********************************");
            // log.debug(momlStr);
            // log.debug("********************************\n");
            log.debug("**** MoMLParser ****");
        }

        /**
         * The MoMLParser cannot be the first thing to be called on Kepler
         * generated MoML files. Because TypedIOPorts are converted to
         * PortAttributes, an error is thrown when trying to parse Kepler
         * generated MoMLs that have any added ports. TODO: How to fix this?
         */
        MoMLParser parser = new MoMLParser(new Workspace());
        parser.reset();

        if (isDebugging)
            log.debug("got moml parser outputing moml");

        NamedObj obj = null;
        try {
            obj = parser.parse(null, momlStr);
        } catch (Exception e) {
            log.error(e.getMessage());
        }
        if (obj == null)
            return;

        if (isDebugging) {
            String filename = "parsed-actor_" + obj.getName() + ".moml";
            writeDebugMomlFor(obj, filename);
        }

        if (obj instanceof TypedCompositeActor) {
            _links = ((TypedCompositeActor) obj).exportLinks(1, null);
        }
        _actorName = obj.getName();

        StringAttribute classAttribute = (StringAttribute) obj.getAttribute("class");

        if (classAttribute == null) {
            throw new InvalidMetadataException("Missing 'class' attribute for " + obj.getFullName());
        }

        Attribute idAtt = classAttribute.getAttribute("id");

        if (idAtt == null) {
            throw new InvalidMetadataException("Missing 'id' attribute for " + obj.getFullName());
        }

        _classId = ((StringAttribute) idAtt).getExpression();

        _className = classAttribute.getExpression();
        _internalClassName = obj.getClassName();

        Attribute actIdAtt = obj.getAttribute(NamedObjId.NAME);

        if (actIdAtt == null) {
            throw new InvalidMetadataException(
                    "Missing '" + NamedObjId.NAME + "' attribute for " + obj.getFullName());
        }

        _actorId = ((NamedObjId) actIdAtt).getExpression();

        NamedObj actor = getActorClass(_className, _actorName, obj);

        // Handle class definitions
        if (actor instanceof InstantiableNamedObj) {
            InstantiableNamedObj ino = (InstantiableNamedObj) actor;
            if (ino.isClassDefinition()) {
                _isClass = true;
                //_internalClassName = _className;
            }
        }
        this.setActor(actor);

        if (isDebugging) {
            String filename = "instantiated-actor-before_" + getActor().getName() + ".moml";
            writeDebugMomlFor(obj, filename);
        }

        // get the semantic type and dependency lsids and any general
        // properties
        for (Object o : obj.attributeList()) {

            Attribute a = null;
            if (o instanceof Attribute) {
                a = (Attribute) o;
            } else {
                log.error("Object is not an Attribute");
                continue;
            }

            getSemanticTypesAndDependencyLsids(a);

        }

        // get the ports

        // NOTE: we parse obj instead of actor since actor does not have
        // the PortAttributes
        parseNamedObj(obj);
        addAllRelations();

        if (isDebugging) {
            String filename = "instantiated-actor-after_" + getActor().getName() + ".moml";
            writeDebugMomlFor(obj, filename);
        }

    } catch (IOException ioe) {
        throw new InvalidMetadataException("Error reading data from lsid " + _actorId + ": " + ioe.getMessage(),
                ioe);
    } catch (Exception e) {
        if (isDebugging)
            log.debug(e.getStackTrace());
        throw new InvalidMetadataException("Error in parsing actor metadata: " + e.getMessage(), e);
    }
}

From source file:com.icesoft.faces.webapp.parser.TagToComponentMap.java

/**
 * Same as addTags but this one has more info such as attributes/descriptions
 *
 * @param tldInput The TLD to process/*ww w  .j a v  a  2s  .com*/
 * @throws IOException If digester barfs.
 */
public void addTagAttrib(InputStream tldInput) throws IOException {

    Digester digester = new Digester();
    digester.setNamespaceAware(true);
    digester.setValidating(false);
    digester.setEntityResolver(ParserUtils.entityResolver);
    digester.setUseContextClassLoader(false);

    /* Need to set the class loader to work.  Not sure why.
       May need to change when we move behind servlet container or Tomcat */
    digester.setClassLoader(loader);

    // This rule creates an element we can use to populate the map;
    digester.addObjectCreate("taglib/tag", "com.icesoft.faces.webapp.parser.TagToTagClassElement");
    digester.addObjectCreate("taglib/uri", "java.lang.StringBuffer");

    // This rule pushes everything into the hash table;
    NameRule nRule = new NameRule(tagToComponentMap, faceletsTaglibXmlWriter);
    digester.addRule("taglib/tag", nRule);
    digester.addRule("taglib/uri", nRule);

    // These rules scoop the values from <name> and <tag-class> elements;
    digester.addCallMethod("taglib/tag/name", "setTagName", 0);
    digester.addCallMethod("taglib/tag/tag-class", "setTagClass", 0);
    digester.addCallMethod("taglib/tag/description", "setDescription", 0);
    digester.addCallMethod("taglib/uri", "append", 0);

    digester.addObjectCreate("taglib/tag/attribute", "com.icesoft.faces.webapp.parser.AttributeElement");

    digester.addCallMethod("taglib/tag/attribute/name", "setName", 0);
    digester.addCallMethod("taglib/tag/attribute/required", "setRequired", 0);
    digester.addCallMethod("taglib/tag/attribute/description", "setDescription", 0);
    digester.addSetNext("taglib/tag/attribute", "addAttribute");

    try {
        digester.parse(tldInput);
    } catch (Throwable e) {
        IOException ioe = new IOException("Can't parse tld " + tldInput.toString());
        ioe.initCause(e);
        throw ioe;
    } finally {
        tldInput.close();
    }
}