Example usage for java.io BufferedInputStream reset

List of usage examples for java.io BufferedInputStream reset

Introduction

In this page you can find the example usage for java.io BufferedInputStream reset.

Prototype

public synchronized void reset() throws IOException 

Source Link

Document

See the general contract of the reset method of InputStream.

Usage

From source file:edu.harvard.iq.dvn.ingest.statdataio.impl.plugins.sav.SAVFileReaderSpi.java

@Override
public boolean canDecodeInput(BufferedInputStream stream) throws IOException {
    if (stream == null) {
        throw new IllegalArgumentException("stream == null!");
    }//from   www .j a v  a 2s  .  c o  m

    dbgLog.fine("\napplying the sav test: inputstream case\n");

    byte[] b = new byte[SAV_HEADER_SIZE];

    if (stream.markSupported()) {
        stream.mark(0);
    }
    int nbytes = stream.read(b, 0, SAV_HEADER_SIZE);

    if (nbytes == 0) {
        throw new IOException();
    }
    //printHexDump(b, "hex dump of the byte-array");
    dbgLog.info(
            "hex dump of the 1st 4 bytes[$FL2 == 24 46 4C 32]=" + (new String(Hex.encodeHex(b))).toUpperCase());

    if (stream.markSupported()) {
        stream.reset();
    }

    boolean DEBUG = false;

    String hdr4sav = new String(b);
    dbgLog.fine("from string[$FL2 == 24 46 4C 32]=" + new String(Hex.encodeHex(b)).toUpperCase());

    if (hdr4sav.equals(SAV_FILE_SIGNATURE)) {
        dbgLog.fine("this file is spss-sav type");
        return true;
    } else {
        dbgLog.fine("this file is NOT spss-sav type");
        return false;
    }
}

From source file:edu.harvard.iq.dataverse.ingest.tabulardata.impl.plugins.sav.SAVFileReaderSpi.java

@Override
public boolean canDecodeInput(BufferedInputStream stream) throws IOException {
    if (stream == null) {
        throw new IllegalArgumentException("stream == null!");
    }//from  ww w  .j  a  v  a 2 s .  co  m

    dbgLog.fine("\napplying the sav test: inputstream case\n");

    byte[] b = new byte[SAV_HEADER_SIZE];

    if (stream.markSupported()) {
        stream.mark(0);
    }
    int nbytes = stream.read(b, 0, SAV_HEADER_SIZE);

    if (nbytes == 0) {
        throw new IOException();
    }
    //printHexDump(b, "hex dump of the byte-array");
    dbgLog.fine(
            "hex dump of the 1st 4 bytes[$FL2 == 24 46 4C 32]=" + (new String(Hex.encodeHex(b))).toUpperCase());

    if (stream.markSupported()) {
        stream.reset();
    }

    boolean DEBUG = false;

    String hdr4sav = new String(b);
    dbgLog.fine("from string[$FL2 == 24 46 4C 32]=" + new String(Hex.encodeHex(b)).toUpperCase());

    if (hdr4sav.equals(SAV_FILE_SIGNATURE)) {
        dbgLog.fine("this file is spss-sav type");
        return true;
    } else {
        dbgLog.fine("this file is NOT spss-sav type");
        return false;
    }
}

From source file:de.tudarmstadt.ukp.dkpro.core.io.bincas.BinaryCasReader.java

@Override
public void getNext(CAS aCAS) throws IOException, CollectionException {
    Resource res = nextFile();//  w w  w.j a v a2 s  .c  o  m
    InputStream is = null;
    try {
        is = CompressionUtils.getInputStream(res.getLocation(), res.getInputStream());
        BufferedInputStream bis = new BufferedInputStream(is);

        TypeSystemImpl ts = null;

        // Check if this is original UIMA CAS format or DKPro Core format
        bis.mark(10);
        DataInputStream dis = new DataInputStream(bis);
        byte[] dkproHeader = new byte[] { 'D', 'K', 'P', 'r', 'o', '1' };
        byte[] header = new byte[dkproHeader.length];
        dis.read(header);

        // If it is DKPro Core format, read the type system
        if (Arrays.equals(header, dkproHeader)) {
            ObjectInputStream ois = new ObjectInputStream(bis);
            CASMgrSerializer casMgrSerializer = (CASMgrSerializer) ois.readObject();
            ts = casMgrSerializer.getTypeSystem();
            ts.commit();
        } else {
            bis.reset();
        }

        if (ts == null) {
            // Check if this is a UIMA binary CAS stream
            byte[] uimaHeader = new byte[] { 'U', 'I', 'M', 'A' };

            byte[] header4 = new byte[uimaHeader.length];
            System.arraycopy(header, 0, header4, 0, header4.length);

            if (header4[0] != 'U') {
                ArrayUtils.reverse(header4);
            }

            // If it is not a UIMA binary CAS stream, assume it is output from
            // SerializedCasWriter
            if (!Arrays.equals(header4, uimaHeader)) {
                ObjectInputStream ois = new ObjectInputStream(bis);
                CASCompleteSerializer serializer = (CASCompleteSerializer) ois.readObject();
                deserializeCASComplete(serializer, (CASImpl) aCAS);
            } else {
                // Since there was no type system, it must be type 0 or 4
                deserializeCAS(aCAS, bis);
            }
        } else {
            // Only format 6 can have type system information
            deserializeCAS(aCAS, bis, ts, null);
        }
    } catch (ResourceInitializationException e) {
        throw new IOException(e);
    } catch (ClassNotFoundException e) {
        throw new IOException(e);
    } finally {
        closeQuietly(is);
    }
}

From source file:com.google.acre.script.AcreFetch.java

@SuppressWarnings("boxing")
public void fetch(boolean system, String response_encoding, boolean log_to_user, boolean no_redirect) {

    if (request_url.length() > 2047) {
        throw new AcreURLFetchException("fetching URL failed - url is too long");
    }//from www  .j  av  a 2s .c  o m

    DefaultHttpClient client = new DefaultHttpClient(_connectionManager, null);

    HttpParams params = client.getParams();

    // pass the deadline down to the invoked service.
    // this will be ignored unless we are fetching from another
    // acre server.
    // note that we may send a deadline that is already passed:
    // it's not our job to throw here since we don't know how
    // the target service will interpret the quota header.
    // NOTE: this is done *after* the user sets the headers to overwrite
    // whatever settings they might have tried to change for this value
    // (which could be a security hazard)
    long sub_deadline = (HostEnv.LIMIT_EXECUTION_TIME) ? _deadline - HostEnv.SUBREQUEST_DEADLINE_ADVANCE
            : System.currentTimeMillis() + HostEnv.ACRE_URLFETCH_TIMEOUT;
    int reentries = _reentries + 1;
    request_headers.put(HostEnv.ACRE_QUOTAS_HEADER, "td=" + sub_deadline + ",r=" + reentries);

    // if this is not an internal call, we need to invoke the call thru a proxy
    if (!_internal) {
        // XXX No sense wasting the resources to gzip inside the network.
        // XXX seems that twitter gets upset when we do this
        /*
        if (!request_headers.containsKey("accept-encoding")) {
        request_headers.put("accept-encoding", "gzip");
        }
        */
        String proxy_host = Configuration.Values.HTTP_PROXY_HOST.getValue();
        int proxy_port = -1;
        if (!(proxy_host.length() == 0)) {
            proxy_port = Configuration.Values.HTTP_PROXY_PORT.getInteger();
            HttpHost proxy = new HttpHost(proxy_host, proxy_port, "http");
            params.setParameter(AllClientPNames.DEFAULT_PROXY, proxy);
        }
    }

    params.setParameter(AllClientPNames.COOKIE_POLICY, CookiePolicy.BROWSER_COMPATIBILITY);

    // in msec

    long timeout = _deadline - System.currentTimeMillis();
    if (timeout < 0)
        timeout = 0;
    params.setParameter(AllClientPNames.CONNECTION_TIMEOUT, (int) timeout);
    params.setParameter(AllClientPNames.SO_TIMEOUT, (int) timeout);

    // we're not streaming the request so this should be a win.
    params.setParameter(AllClientPNames.TCP_NODELAY, true);

    // reuse an existing socket if it is in TIME_WAIT state.
    params.setParameter(AllClientPNames.SO_REUSEADDR, true);

    // set the encoding of our POST payloads to UTF-8
    params.setParameter(AllClientPNames.HTTP_CONTENT_CHARSET, "UTF-8");

    BasicCookieStore cstore = new BasicCookieStore();
    for (AcreCookie cookie : request_cookies.values()) {
        cstore.addCookie(cookie.toClientCookie());
    }
    client.setCookieStore(cstore);

    HttpRequestBase method;

    HashMap<String, String> logmsg = new HashMap<String, String>();
    logmsg.put("Method", request_method);
    logmsg.put("URL", request_url);

    params.setParameter(AllClientPNames.HANDLE_REDIRECTS, !no_redirect);
    logmsg.put("Redirect", Boolean.toString(!no_redirect));

    try {
        if (request_method.equals("GET")) {
            method = new HttpGet(request_url);
        } else if (request_method.equals("POST")) {
            method = new HttpPost(request_url);
        } else if (request_method.equals("HEAD")) {
            method = new HttpHead(request_url);
        } else if (request_method.equals("PUT")) {
            method = new HttpPut(request_url);
        } else if (request_method.equals("DELETE")) {
            method = new HttpDelete(request_url);
        } else if (request_method.equals("PROPFIND")) {
            method = new HttpPropFind(request_url);
        } else {
            throw new AcreURLFetchException("Failed: unsupported (so far) method " + request_method);
        }
        method.getParams().setBooleanParameter(AllClientPNames.USE_EXPECT_CONTINUE, false);
    } catch (java.lang.IllegalArgumentException e) {
        throw new AcreURLFetchException("Unable to fetch URL; this is most likely an issue with URL encoding.");
    } catch (java.lang.IllegalStateException e) {
        throw new AcreURLFetchException("Unable to fetch URL; possibly an illegal protocol?");
    }

    StringBuilder request_header_log = new StringBuilder();
    for (Map.Entry<String, String> header : request_headers.entrySet()) {
        String key = header.getKey();
        String value = header.getValue();

        // XXX should suppress cookie headers?
        // content-type and length?

        if ("content-type".equalsIgnoreCase(key)) {
            Matcher m = contentTypeCharsetPattern.matcher(value);
            if (m.find()) {
                content_type = m.group(1);
                content_type_charset = m.group(2);
            } else {
                content_type_charset = "utf-8";
            }
            method.addHeader(key, value);
        } else if ("content-length".equalsIgnoreCase(key)) {
            // ignore user-supplied content-length, which is
            // probably wrong due to chars vs bytes and is
            // redundant anyway
            ArrayList<String> msg = new ArrayList<String>();
            msg.add("User-supplied content-length header is ignored");
            _acre_response.log("warn", msg);
        } else if ("user-agent".equalsIgnoreCase(key)) {
            params.setParameter(AllClientPNames.USER_AGENT, value);
        } else {
            method.addHeader(key, value);
        }
        if (!("x-acre-auth".equalsIgnoreCase(key))) {
            request_header_log.append(key + ": " + value + "\r\n");
        }
    }
    logmsg.put("Headers", request_header_log.toString());

    // XXX need more detailed error checking
    if (method instanceof HttpEntityEnclosingRequestBase && request_body != null) {

        HttpEntityEnclosingRequestBase em = (HttpEntityEnclosingRequestBase) method;
        try {
            if (request_body instanceof String) {
                StringEntity ent = new StringEntity((String) request_body, content_type_charset);
                em.setEntity(ent);
            } else if (request_body instanceof JSBinary) {
                ByteArrayEntity ent = new ByteArrayEntity(((JSBinary) request_body).get_data());
                em.setEntity(ent);
            }
        } catch (UnsupportedEncodingException e) {
            throw new AcreURLFetchException(
                    "Failed to fetch URL. " + " - Unsupported charset: " + content_type_charset);
        }
    }

    if (!system && log_to_user) {
        ArrayList<Object> msg = new ArrayList<Object>();
        msg.add("urlfetch request");
        msg.add(logmsg);
        _acre_response.log("debug", msg);
    }
    _logger.info("urlfetch.request", logmsg);

    long startTime = System.currentTimeMillis();

    try {
        // this sends the http request and waits
        HttpResponse hres = client.execute(method);
        status = hres.getStatusLine().getStatusCode();
        HashMap<String, String> res_logmsg = new HashMap<String, String>();
        res_logmsg.put("URL", request_url);
        res_logmsg.put("Status", ((Integer) status).toString());

        Header content_type_header = null;

        // translate response headers
        StringBuilder response_header_log = new StringBuilder();
        Header[] rawheaders = hres.getAllHeaders();
        for (Header rawheader : rawheaders) {
            String headername = rawheader.getName().toLowerCase();
            if (headername.equalsIgnoreCase("content-type")) {
                content_type_header = rawheader;
                // XXX should strip everything after ;
                content_type = rawheader.getValue();

                // XXX don't set content_type_parameters, deprecated?
            } else if (headername.equalsIgnoreCase("x-metaweb-cost")) {
                _costCollector.merge(rawheader.getValue());
            } else if (headername.equalsIgnoreCase("x-metaweb-tid")) {
                res_logmsg.put("ITID", rawheader.getValue());
            }

            headers.put(headername, rawheader.getValue());
            response_header_log.append(headername + ": " + rawheader.getValue() + "\r\n");
        }

        res_logmsg.put("Headers", response_header_log.toString());

        if (!system && log_to_user) {
            ArrayList<Object> msg = new ArrayList<Object>();
            msg.add("urlfetch response");
            msg.add(res_logmsg);
            _acre_response.log("debug", msg);
        }

        _logger.info("urlfetch.response", res_logmsg);

        // read cookies
        for (Cookie c : cstore.getCookies()) {
            cookies.put(c.getName(), new AcreCookie(c));
        }

        // get body encoding

        String charset = null;
        if (content_type_header != null) {
            HeaderElement values[] = content_type_header.getElements();
            if (values.length == 1) {
                NameValuePair param = values[0].getParameterByName("charset");
                if (param != null) {
                    charset = param.getValue();
                }
            }
        }

        if (charset == null)
            charset = response_encoding;

        // read body
        HttpEntity ent = hres.getEntity();
        if (ent != null) {
            InputStream res_stream = ent.getContent();
            Header cenc = ent.getContentEncoding();
            if (cenc != null && res_stream != null) {
                HeaderElement[] codecs = cenc.getElements();
                for (HeaderElement codec : codecs) {
                    if (codec.getName().equalsIgnoreCase("gzip")) {
                        res_stream = new GZIPInputStream(res_stream);
                    }
                }
            }

            long firstByteTime = 0;
            long endTime = 0;
            if (content_type != null
                    && (content_type.startsWith("image/") || content_type.startsWith("application/octet-stream")
                            || content_type.startsWith("multipart/form-data"))) {
                // HttpClient's InputStream doesn't support mark/reset, so
                // wrap it with one that does.
                BufferedInputStream bufis = new BufferedInputStream(res_stream);
                bufis.mark(2);
                bufis.read();
                firstByteTime = System.currentTimeMillis();
                bufis.reset();
                byte[] data = IOUtils.toByteArray(bufis);

                endTime = System.currentTimeMillis();
                body = new JSBinary();
                ((JSBinary) body).set_data(data);

                try {
                    if (res_stream != null) {
                        res_stream.close();
                    }
                } catch (IOException e) {
                    // ignore
                }
            } else if (res_stream == null || charset == null) {
                firstByteTime = endTime = System.currentTimeMillis();
                body = "";
            } else {
                StringWriter writer = new StringWriter();
                Reader reader = new InputStreamReader(res_stream, charset);
                int i = reader.read();
                firstByteTime = System.currentTimeMillis();
                writer.write(i);
                IOUtils.copy(reader, writer);
                endTime = System.currentTimeMillis();
                body = writer.toString();

                try {
                    reader.close();
                    writer.close();
                } catch (IOException e) {
                    // ignore
                }
            }

            long waitingTime = firstByteTime - startTime;
            long readingTime = endTime - firstByteTime;

            _logger.debug("urlfetch.timings", "waiting time: " + waitingTime + "ms");
            _logger.debug("urlfetch.timings", "reading time: " + readingTime + "ms");

            Statistics.instance().collectUrlfetchTime(startTime, firstByteTime, endTime);

            _costCollector.collect((system) ? "asuc" : "auuc").collect((system) ? "asuw" : "auuw", waitingTime)
                    .collect((system) ? "asub" : "auub", waitingTime);
        }
    } catch (IllegalArgumentException e) {
        Throwable cause = e.getCause();
        if (cause == null)
            cause = e;
        throw new AcreURLFetchException("failed to fetch URL. " + " - Request Error: " + cause.getMessage());
    } catch (IOException e) {
        Throwable cause = e.getCause();
        if (cause == null)
            cause = e;
        throw new AcreURLFetchException("Failed to fetch URL. " + " - Network Error: " + cause.getMessage());
    } catch (RuntimeException e) {
        Throwable cause = e.getCause();
        if (cause == null)
            cause = e;
        throw new AcreURLFetchException("Failed to fetch URL. " + " - Network Error: " + cause.getMessage());
    } finally {
        method.abort();
    }
}

From source file:org.codelibs.fess.transformer.FessXpathTransformer.java

@Override
protected void storeData(final ResponseData responseData, final ResultData resultData) {
    final File tempFile = ResponseDataUtil.createResponseBodyFile(responseData);
    try {/*from   www . ja  v a2  s . c o  m*/
        final DOMParser parser = getDomParser();
        BufferedInputStream bis = null;
        try {
            bis = new BufferedInputStream(new FileInputStream(tempFile));
            final byte[] bomBytes = new byte[UTF8_BOM_SIZE];
            bis.mark(UTF8_BOM_SIZE);
            bis.read(bomBytes); // NOSONAR
            if (!isUtf8BomBytes(bomBytes)) {
                bis.reset();
            }
            final InputSource is = new InputSource(bis);
            if (responseData.getCharSet() != null) {
                is.setEncoding(responseData.getCharSet());
            }
            parser.parse(is);
        } catch (final Exception e) {
            throw new RobotCrawlAccessException("Could not parse " + responseData.getUrl(), e);
        } finally {
            IOUtils.closeQuietly(bis);
        }

        final Document document = parser.getDocument();

        final Map<String, Object> dataMap = new HashMap<String, Object>();
        for (final Map.Entry<String, String> entry : fieldRuleMap.entrySet()) {
            final String path = entry.getValue();
            try {
                final XObject xObj = getXPathAPI().eval(document, path);
                final int type = xObj.getType();
                switch (type) {
                case XObject.CLASS_BOOLEAN:
                    final boolean b = xObj.bool();
                    putResultDataBody(dataMap, entry.getKey(), Boolean.toString(b));
                    break;
                case XObject.CLASS_NUMBER:
                    final double d = xObj.num();
                    putResultDataBody(dataMap, entry.getKey(), Double.toString(d));
                    break;
                case XObject.CLASS_STRING:
                    final String str = xObj.str();
                    putResultDataBody(dataMap, entry.getKey(), str);
                    break;
                case XObject.CLASS_NULL:
                case XObject.CLASS_UNKNOWN:
                case XObject.CLASS_NODESET:
                case XObject.CLASS_RTREEFRAG:
                case XObject.CLASS_UNRESOLVEDVARIABLE:
                default:
                    final Node value = getXPathAPI().selectSingleNode(document, entry.getValue());
                    putResultDataBody(dataMap, entry.getKey(), value != null ? value.getTextContent() : null);
                    break;
                }
            } catch (final TransformerException e) {
                logger.warn("Could not parse a value of " + entry.getKey() + ":" + entry.getValue());
            }
        }

        FileInputStream fis = null;
        try {
            fis = new FileInputStream(tempFile);
            responseData.setResponseBody(fis);
            putAdditionalData(dataMap, responseData, document);
        } catch (final FileNotFoundException e) {
            logger.warn(tempFile + " does not exist.", e);
            putAdditionalData(dataMap, responseData, document);
        } finally {
            IOUtils.closeQuietly(fis);
        }

        try {
            resultData.setData(SerializeUtil.fromObjectToBinary(dataMap));
        } catch (final Exception e) {
            throw new RobotCrawlAccessException("Could not serialize object: " + responseData.getUrl(), e);
        }
        resultData.setEncoding(charsetName);
    } finally {
        if (!tempFile.delete()) {
            logger.warn("Could not delete a temp file: " + tempFile);
        }
    }
}

From source file:com.zimbra.cs.service.FeedManager.java

private static int getLeadingChar(BufferedInputStream is, StringBuilder charset) throws IOException {
    is.mark(128);//from ww  w .ja v a 2 s  .c om
    // check for any BOMs that would override the specified charset
    int ch = is.read();
    switch (ch) {
    case 0xEF:
        if (is.read() == 0xBB && is.read() == 0xBF) {
            is.mark(128);
            ch = is.read();
            charset.setLength(0);
            charset.append("utf-8");
        }
        break;
    case 0xFE:
        if (is.read() == 0xFF && is.read() == 0x00) {
            ch = is.read();
            charset.setLength(0);
            charset.append("utf-16");
        }
        break;
    case 0xFF:
        if (is.read() == 0xFE) {
            ch = is.read();
            charset.setLength(0);
            charset.append("utf-16");
        }
        break;
    }
    // skip up to 120 bytes of leading whitespace
    for (int index = 0; index < 120 && (ch == '\0' || Character.isWhitespace(ch)); index++)
        ch = is.read();
    // reset to the original state and return the first non-whtespace character
    is.reset();
    return ch;
}

From source file:org.codelibs.fess.web.admin.SuggestBadWordAction.java

@Token(save = false, validate = true)
@Execute(validator = true, input = "uploadpage")
public String upload() {
    BufferedInputStream is = null;
    File tempFile = null;/*from  ww  w .  j a  v  a2  s  .  c  o  m*/
    FileOutputStream fos = null;
    final byte[] b = new byte[20];
    try {
        tempFile = File.createTempFile("suggestbadword-import-", ".csv");
        is = new BufferedInputStream(suggestBadWordForm.suggestBadWordFile.getInputStream());
        is.mark(20);
        if (is.read(b, 0, 20) <= 0) {
            throw new FessSystemException("no import data.");
        }
        is.reset();
        fos = new FileOutputStream(tempFile);
        StreamUtil.drain(is, fos);
    } catch (final Exception e) {
        if (tempFile != null && !tempFile.delete()) {
            logger.warn("Could not delete " + tempFile.getAbsolutePath());
        }
        logger.error("Failed to import data.", e);
        throw new SSCActionMessagesException(e, "errors.failed_to_import_data");
    } finally {
        IOUtils.closeQuietly(is);
        IOUtils.closeQuietly(fos);
    }

    final File oFile = tempFile;
    try {
        final String head = new String(b, Constants.UTF_8);
        if (!(head.startsWith("\"BadWord\"") || head.startsWith("BadWord"))) {
            logger.error("Unknown file: " + suggestBadWordForm.suggestBadWordFile);
            throw new SSCActionMessagesException("errors.unknown_import_file");
        }
        final String enc = crawlerProperties.getProperty(Constants.CSV_FILE_ENCODING_PROPERTY, Constants.UTF_8);
        new Thread(new Runnable() {
            public void run() {
                Reader reader = null;
                try {
                    reader = new BufferedReader(new InputStreamReader(new FileInputStream(oFile), enc));
                    suggestBadWordService.importCsv(reader);
                } catch (final Exception e) {
                    logger.error("Failed to import data.", e);
                    throw new FessSystemException("Failed to import data.", e);
                } finally {
                    if (!oFile.delete()) {
                        logger.warn("Could not delete " + oFile.getAbsolutePath());
                    }
                    IOUtils.closeQuietly(reader);
                    suggestHelper.deleteAllBadWord();
                    suggestHelper.updateSolrBadwordFile();
                }
            }
        }).start();
    } catch (final ActionMessagesException e) {
        if (!oFile.delete()) {
            logger.warn("Could not delete " + oFile.getAbsolutePath());
        }
        throw e;
    } catch (final Exception e) {
        if (!oFile.delete()) {
            logger.warn("Could not delete " + oFile.getAbsolutePath());
        }
        logger.error("Failed to import data.", e);
        throw new SSCActionMessagesException(e, "errors.failed_to_import_data");
    }
    SAStrutsUtil.addSessionMessage("success.upload_suggest_bad_word");

    return "uploadpage?redirect=true";
}

From source file:org.codelibs.fess.web.admin.SuggestElevateWordAction.java

@Token(save = false, validate = true)
@Execute(validator = true, input = "uploadpage")
public String upload() {
    BufferedInputStream is = null;
    File tempFile = null;/*from ww  w .  j av a  2  s .c  om*/
    FileOutputStream fos = null;
    final byte[] b = new byte[20];
    try {
        tempFile = File.createTempFile("suggestelevateword-import-", ".csv");
        is = new BufferedInputStream(suggestElevateWordForm.suggestElevateWordFile.getInputStream());
        is.mark(20);
        if (is.read(b, 0, 20) <= 0) {
            throw new FessSystemException("no import data.");
        }
        is.reset();
        fos = new FileOutputStream(tempFile);
        StreamUtil.drain(is, fos);
    } catch (final Exception e) {
        if (tempFile != null && !tempFile.delete()) {
            logger.warn("Could not delete " + tempFile.getAbsolutePath());
        }
        logger.error("Failed to import data.", e);
        throw new SSCActionMessagesException(e, "errors.failed_to_import_data");
    } finally {
        IOUtils.closeQuietly(is);
        IOUtils.closeQuietly(fos);
    }

    final File oFile = tempFile;
    try {
        final String head = new String(b, Constants.UTF_8);
        if (!(head.startsWith("\"SuggestWord\"") || head.startsWith("SuggestWord"))) {
            logger.error("Unknown file: " + suggestElevateWordForm.suggestElevateWordFile);
            throw new SSCActionMessagesException("errors.unknown_import_file");
        }
        final String enc = crawlerProperties.getProperty(Constants.CSV_FILE_ENCODING_PROPERTY, Constants.UTF_8);
        new Thread(new Runnable() {
            public void run() {
                Reader reader = null;
                try {
                    reader = new BufferedReader(new InputStreamReader(new FileInputStream(oFile), enc));
                    suggestElevateWordService.importCsv(reader);
                } catch (final Exception e) {
                    logger.error("Failed to import data.", e);
                    throw new FessSystemException("Failed to import data.", e);
                } finally {
                    if (!oFile.delete()) {
                        logger.warn("Could not delete " + oFile.getAbsolutePath());
                    }
                    IOUtils.closeQuietly(reader);
                    suggestHelper.storeAllElevateWords();
                }
            }
        }).start();
    } catch (final ActionMessagesException e) {
        if (!oFile.delete()) {
            logger.warn("Could not delete " + oFile.getAbsolutePath());
        }
        throw e;
    } catch (final Exception e) {
        if (!oFile.delete()) {
            logger.warn("Could not delete " + oFile.getAbsolutePath());
        }
        logger.error("Failed to import data.", e);
        throw new SSCActionMessagesException(e, "errors.failed_to_import_data");
    }
    SAStrutsUtil.addSessionMessage("success.upload_suggest_elevate_word");

    return "uploadpage?redirect=true";
}

From source file:com.baomidou.framework.upload.UploadMultipartRequest.java

/**
 * //from  w  ww.  j  ava  2  s .  c om
 */
private UploadFile writeTo(File fileOrDirectory, String fileName, FileRenamePolicy policy, FilePart filePart)
        throws IOException {
    OutputStream fileOut = null;
    UploadFile cf = new UploadFile();
    try {
        // Only do something if this part contains a file
        if (fileName != null) {
            BufferedInputStream in = new BufferedInputStream(filePart.getInputStream());
            cf.setType(filePart.getContentType());
            /**
             * ??
             * 
             * <p></p>
             */
            if (StringUtils.isNotBlank(fileHeaderExts)) {
                try {
                    /**
                     * ? 3 
                     */
                    byte[] data = new byte[3];
                    in.mark(3);
                    in.read(data, 0, data.length);
                    in.reset();
                    String fileExt = readFileExt(data, fileName);
                    if (fileExt != null) {
                        cf.setSuffix(fileExt);
                    } else {
                        cf.setUploadCode(UploadCode.ILLEGAL_EXT);
                        logger.debug(" upload fileType is null.");
                        return cf;
                    }
                } catch (Exception e) {
                    logger.debug("upload file error. ", e);
                    cf.setUploadCode(UploadCode.EXCEPTION);
                    return cf;
                }
            } else {
                cf.setSuffix(fileName.substring(fileName.lastIndexOf(".")));
            }

            // Check if user supplied directory
            File file;
            if (fileOrDirectory.isDirectory()) {
                // Write it to that dir the user supplied, 
                // with the filename it arrived with
                file = new File(fileOrDirectory, fileName);
            } else {
                // Write it to the file the user supplied,
                // ignoring the filename it arrived with
                file = fileOrDirectory;
            }

            if (policy instanceof UploadFileRenamePolicy) {
                ((UploadFileRenamePolicy) policy).setSuffix(cf.getSuffix());
            }

            if (policy != null) {
                file = policy.rename(file);
                fileName = file.getName();
                cf.setFilename(fileName);
            }

            fileOut = new BufferedOutputStream(new FileOutputStream(file));
            cf.setSize(write(fileOut, filePart.getContentType(), in));
        }
    } catch (Exception e) {
        logger.debug("upload file write error. ", e);
        cf.setUploadCode(UploadCode.EXCEPTION);
    } finally {
        if (fileOut != null)
            fileOut.close();
    }
    return cf;
}

From source file:org.codelibs.fess.crawler.extractor.impl.AbstractXmlExtractor.java

protected String getEncoding(final BufferedInputStream bis) {
    final byte[] b = new byte[preloadSizeForCharset];
    try {//from  w ww .  j av  a 2  s . com
        bis.mark(preloadSizeForCharset);
        final int c = bis.read(b);

        if (c == -1) {
            return encoding;
        }

        final String head = new String(b, 0, c, encoding);
        if (StringUtil.isBlank(head)) {
            return encoding;
        }
        final Matcher matcher = getEncodingPattern().matcher(head);
        if (matcher.find()) {
            final String enc = matcher.group(1);
            if (Charset.isSupported(enc)) {
                return enc;
            }
        }
    } catch (final Exception e) {
        if (logger.isInfoEnabled()) {
            logger.info("Use a default encoding: " + encoding, e);
        }
    } finally {
        try {
            bis.reset();
        } catch (final IOException e) {
            throw new ExtractException(e);
        }
    }

    return encoding;
}