Example usage for java.util.zip GZIPOutputStream GZIPOutputStream

List of usage examples for java.util.zip GZIPOutputStream GZIPOutputStream

Introduction

In this page you can find the example usage for java.util.zip GZIPOutputStream GZIPOutputStream.

Prototype

public GZIPOutputStream(OutputStream out, boolean syncFlush) throws IOException 

Source Link

Document

Creates a new output stream with a default buffer size and the specified flush mode.

Usage

From source file:io.ecarf.core.cloud.task.processor.reason.phase2.DoReasonTask7.java

@Override
public void run() throws IOException {

    GoogleCloudService cloud = (GoogleCloudService) this.getCloudService();

    Stopwatch stopwatch1 = Stopwatch.createUnstarted();
    Stopwatch stopwatch2 = Stopwatch.createUnstarted();
    Set<String> termsSet;

    if (terms == null) {
        // too large, probably saved as a file

        log.info("Using json file for terms: " + termsFile);
        Validate.notNull(termsFile);//  ww w. j av a 2  s .c  om

        String localTermsFile = Utils.TEMP_FOLDER + termsFile;
        cloud.downloadObjectFromCloudStorage(termsFile, localTermsFile, bucket);

        // convert from JSON
        termsSet = io.cloudex.framework.utils.FileUtils.jsonFileToSet(localTermsFile);

    } else {
        termsSet = ObjectUtils.csvToSet(terms);
    }

    String localSchemaFile = Utils.TEMP_FOLDER + schemaFile;
    // download the file from the cloud storage
    cloud.downloadObjectFromCloudStorage(schemaFile, localSchemaFile, bucket);

    // uncompress if compressed
    if (GzipUtils.isCompressedFilename(schemaFile)) {
        localSchemaFile = GzipUtils.getUncompressedFilename(localSchemaFile);
    }

    Map<Long, Set<Triple>> allSchemaTriples = TripleUtils.getRelevantSchemaETriples(localSchemaFile,
            TermUtils.RDFS_TBOX);

    // get all the triples we care about
    schemaTerms = new HashMap<>();

    for (String termStr : termsSet) {

        Long term = Long.parseLong(termStr);

        if (allSchemaTriples.containsKey(term)) {
            schemaTerms.put(term, allSchemaTriples.get(term));
        }
    }

    String decoratedTable = table;
    int emptyRetries = 0;
    int totalInferredTriples = 0;
    int maxRetries = Config.getIntegerProperty(Constants.REASON_RETRY_KEY, 6);
    String instanceId = cloud.getInstanceId();

    QueryGenerator<Long> generator = new QueryGenerator<Long>(schemaTerms, null);

    // timestamp loop
    do {

        Set<Long> productiveTerms = new HashSet<>();
        int interimInferredTriples = 0;

        // First of all run all the queries asynchronously and remember the jobId and filename for each term

        List<QueryResult> queryResults = new ArrayList<QueryResult>();
        generator.setDecoratedTable(decoratedTable);

        List<String> queries = generator.getQueries();
        log.debug("Generated Queries: " + queries);
        String queryResultFilePrefix = Utils.TEMP_FOLDER + instanceId + '_' + System.currentTimeMillis()
                + "_QueryResults_";
        int fileCount = 0;
        for (String query : queries) {
            String jobId = cloud.startBigDataQuery(query);
            queryResults
                    .add(QueryResult.create().setFilename(queryResultFilePrefix + fileCount).setJobId(jobId));
            fileCount++;
        }

        // invoke all the queries in parallel
        //this.invokeAll(queryTasks);

        long start = System.currentTimeMillis();

        String inferredTriplesFile = Utils.TEMP_FOLDER + instanceId + '_' + start + Constants.DOT_INF;

        // save all the query results in files in parallel
        //this.invokeAll(saveTasks);

        for (QueryResult queryResult : queryResults) {
            try {
                // block and wait for each job to complete then save results to a file
                QueryStats stats = cloud.saveBigQueryResultsToFile(queryResult.getJobId(),
                        queryResult.getFilename());
                queryResult.setStats(stats);

            } catch (IOException ioe) {
                // transient backend errors
                log.warn("failed to save query results to file, jobId: " + queryResult.getJobId(), ioe);
                //TODO should throw an exception
            }
        }

        try (PrintWriter writer = new PrintWriter(
                new GZIPOutputStream(new FileOutputStream(inferredTriplesFile), Constants.GZIP_BUF_SIZE))) {

            // now loop through the queries
            //for(Entry<Term, Set<Triple>> entry: schemaTerms.entrySet()) {
            for (QueryResult queryResult : queryResults) {

                //Term term = entry.getKey();
                QueryStats stats = queryResult.getStats();

                BigInteger rows = stats.getTotalRows();//term.getRows();

                this.totalBytes = this.totalBytes + stats.getTotalProcessedBytes();//term.getBytes();

                // only process if triples are found matching this term
                if (!BigInteger.ZERO.equals(rows)) {

                    stopwatch1.start();

                    int inferredTriplesCount = this.inferAndSaveTriplesToFile(queryResult, productiveTerms,
                            decoratedTable, writer);

                    interimInferredTriples += inferredTriplesCount;

                    this.totalRows = this.totalRows.add(rows);

                    stopwatch1.stop();

                } else {
                    log.info("Skipping query as no data is found");
                }
            }
        }

        totalInferredTriples += interimInferredTriples;

        if (interimInferredTriples > 0) {

            // stream smaller numbers of inferred triples
            // try uploading from cloud storage
            int streamingThreshold = Config.getIntegerProperty("ecarf.io.reasoning.streaming.threshold",
                    100000);

            log.info("Inserting " + interimInferredTriples + ", inferred triples into Big Data table for "
                    + productiveTerms.size() + " productive terms. Filename: " + inferredTriplesFile);

            if (interimInferredTriples <= streamingThreshold) {
                // stream the data

                Set<Triple> inferredTriples = TripleUtils.loadCompressedCSVTriples(inferredTriplesFile, true);
                log.info("Total triples to stream into Big Data: " + inferredTriples.size());
                cloud.streamObjectsIntoBigData(inferredTriples,
                        TableUtils.getBigQueryEncodedTripleTable(table));

                log.info("All inferred triples are streamed into Big Data table");

            } else {

                // load the data through cloud storage
                // upload the file to cloud storage
                log.info("Uploading inferred triples file into cloud storage: " + inferredTriplesFile);
                StorageObject file = cloud.uploadFileToCloudStorage(inferredTriplesFile, bucket);
                log.info("File " + file + ", uploaded successfully. Now loading it into big data.");

                String jobId = cloud.loadCloudStorageFilesIntoBigData(Lists.newArrayList(file.getUri()),
                        TableUtils.getBigQueryEncodedTripleTable(table), false);
                log.info(
                        "All inferred triples are loaded into Big Data table through cloud storage, completed jobId: "
                                + jobId);

            }

            // reset empty retries
            emptyRetries = 0;

            stopwatch2.reset();

        } else {
            log.info("No new inferred triples");
            // increment empty retries
            emptyRetries++;

            if (!stopwatch2.isRunning()) {
                stopwatch2.start();
            }
        }

        log.info("Total inferred triples so far = " + totalInferredTriples + ", current retry count: "
                + emptyRetries);

        if (emptyRetries < maxRetries) {
            ApiUtils.block(Config.getIntegerProperty(Constants.REASON_SLEEP_KEY, 20));

            // FIXME move into the particular cloud implementation service
            long elapsed = System.currentTimeMillis() - start;
            decoratedTable = "[" + table + "@-" + elapsed + "-]";

            log.info("Using table decorator: " + decoratedTable + ". Empty retries count: " + emptyRetries);
        }

    } while (emptyRetries < maxRetries); // end timestamp loop

    //executor.shutdown();
    log.info("Finished reasoning, total inferred triples = " + totalInferredTriples);
    //log.info("Number of avoided duplicate terms = " + this.duplicates);
    log.info("Total rows retrieved from big data = " + this.totalRows);
    log.info("Total processed GBytes = " + ((double) this.totalBytes / FileUtils.ONE_GB));
    log.info("Total process reasoning time (serialization in inf file) = " + stopwatch1);
    log.info("Total time spent in empty inference cycles = " + stopwatch2);
}

From source file:io.ecarf.core.cloud.task.processor.reason.phase2.DoReasonTask6.java

@Override
public void run() throws IOException {

    GoogleCloudService cloud = (GoogleCloudService) this.getCloudService();

    //String table = metadata.getValue(EcarfMetaData.ECARF_TABLE);
    //Set<String> terms = metadata.getTerms();
    //String schemaFile = metadata.getValue(EcarfMetaData.ECARF_SCHEMA);
    //String bucket = metadata.getBucket();
    Stopwatch stopwatch1 = Stopwatch.createUnstarted();
    Stopwatch stopwatch2 = Stopwatch.createUnstarted();
    Set<String> termsSet;

    if (terms == null) {
        // too large, probably saved as a file
        //String termsFile = metadata.getValue(EcarfMetaData.ECARF_TERMS_FILE);
        log.info("Using json file for terms: " + termsFile);
        Validate.notNull(termsFile);/*from w  w w.  ja v  a2  s  . c  o  m*/

        String localTermsFile = Utils.TEMP_FOLDER + termsFile;
        cloud.downloadObjectFromCloudStorage(termsFile, localTermsFile, bucket);

        // convert from JSON
        termsSet = io.cloudex.framework.utils.FileUtils.jsonFileToSet(localTermsFile);

    } else {
        termsSet = ObjectUtils.csvToSet(terms);
    }

    String localSchemaFile = Utils.TEMP_FOLDER + schemaFile;
    // download the file from the cloud storage
    cloud.downloadObjectFromCloudStorage(schemaFile, localSchemaFile, bucket);

    // uncompress if compressed
    if (GzipUtils.isCompressedFilename(schemaFile)) {
        localSchemaFile = GzipUtils.getUncompressedFilename(localSchemaFile);
    }

    Map<String, Set<Triple>> allSchemaTriples = TripleUtils.getRelevantSchemaNTriples(localSchemaFile,
            TermUtils.RDFS_TBOX);

    // get all the triples we care about
    schemaTerms = new HashMap<>();

    for (String term : termsSet) {
        if (allSchemaTriples.containsKey(term)) {
            schemaTerms.put(term, allSchemaTriples.get(term));
        }
    }

    String decoratedTable = table;
    int emptyRetries = 0;
    int totalInferredTriples = 0;
    int maxRetries = Config.getIntegerProperty(Constants.REASON_RETRY_KEY, 6);
    String instanceId = cloud.getInstanceId();

    QueryGenerator<String> generator = new QueryGenerator<String>(schemaTerms, null);

    // timestamp loop
    do {

        Set<String> productiveTerms = new HashSet<>();
        int interimInferredTriples = 0;

        // First of all run all the queries asynchronously and remember the jobId and filename for each term

        List<QueryResult> queryResults = new ArrayList<QueryResult>();
        generator.setDecoratedTable(decoratedTable);

        List<String> queries = generator.getQueries();
        log.debug("Generated Queries: " + queries);
        String queryResultFilePrefix = Utils.TEMP_FOLDER + instanceId + '_' + System.currentTimeMillis()
                + "_QueryResults_";
        int fileCount = 0;
        for (String query : queries) {
            String jobId = cloud.startBigDataQuery(query);
            queryResults
                    .add(QueryResult.create().setFilename(queryResultFilePrefix + fileCount).setJobId(jobId));
            fileCount++;
        }

        // invoke all the queries in parallel
        //this.invokeAll(queryTasks);

        long start = System.currentTimeMillis();

        String inferredTriplesFile = Utils.TEMP_FOLDER + instanceId + '_' + start + Constants.DOT_INF;

        // save all the query results in files in parallel
        //this.invokeAll(saveTasks);

        for (QueryResult queryResult : queryResults) {
            try {
                // block and wait for each job to complete then save results to a file
                QueryStats stats = cloud.saveBigQueryResultsToFile(queryResult.getJobId(),
                        queryResult.getFilename());
                queryResult.setStats(stats);

            } catch (IOException ioe) {
                // transient backend errors
                log.warn("failed to save query results to file, jobId: " + queryResult.getJobId(), ioe);
                //TODO should throw an exception
            }
        }

        try (PrintWriter writer = new PrintWriter(
                new GZIPOutputStream(new FileOutputStream(inferredTriplesFile), Constants.GZIP_BUF_SIZE))) {

            // now loop through the queries
            //for(Entry<Term, Set<Triple>> entry: schemaTerms.entrySet()) {
            for (QueryResult queryResult : queryResults) {

                //Term term = entry.getKey();
                QueryStats stats = queryResult.getStats();

                BigInteger rows = stats.getTotalRows();//term.getRows();

                this.totalBytes = this.totalBytes + stats.getTotalProcessedBytes();//term.getBytes();

                // only process if triples are found matching this term
                if (!BigInteger.ZERO.equals(rows)) {

                    stopwatch1.start();

                    int inferredTriplesCount = this.inferAndSaveTriplesToFile(queryResult, productiveTerms,
                            decoratedTable, writer);

                    interimInferredTriples += inferredTriplesCount;

                    this.totalRows = this.totalRows.add(rows);

                    stopwatch1.stop();

                } else {
                    log.info("Skipping query as no data is found");
                }
            }
        }

        totalInferredTriples += interimInferredTriples;

        if (interimInferredTriples > 0) {

            // stream smaller numbers of inferred triples
            // try uploading from cloud storage
            int streamingThreshold = Config.getIntegerProperty("ecarf.io.reasoning.streaming.threshold",
                    100000);

            log.info("Inserting " + interimInferredTriples + ", inferred triples into Big Data table for "
                    + productiveTerms.size() + " productive terms. Filename: " + inferredTriplesFile);

            if (interimInferredTriples <= streamingThreshold) {
                // stream the data

                Set<Triple> inferredTriples = TripleUtils.loadCompressedCSVTriples(inferredTriplesFile, false);
                log.info("Total triples to stream into Big Data: " + inferredTriples.size());
                cloud.streamObjectsIntoBigData(inferredTriples, TableUtils.getBigQueryTripleTable(table));

                log.info("All inferred triples are streamed into Big Data table");

            } else {

                // load the data through cloud storage
                // upload the file to cloud storage
                log.info("Uploading inferred triples file into cloud storage: " + inferredTriplesFile);
                StorageObject file = cloud.uploadFileToCloudStorage(inferredTriplesFile, bucket);
                log.info("File " + file + ", uploaded successfully. Now loading it into big data.");

                String jobId = cloud.loadCloudStorageFilesIntoBigData(Lists.newArrayList(file.getUri()),
                        TableUtils.getBigQueryTripleTable(table), false);
                log.info(
                        "All inferred triples are loaded into Big Data table through cloud storage, completed jobId: "
                                + jobId);

            }

            // reset empty retries
            emptyRetries = 0;

            stopwatch2.reset();

        } else {
            log.info("No new inferred triples");
            // increment empty retries
            emptyRetries++;

            if (!stopwatch2.isRunning()) {
                stopwatch2.start();
            }
        }

        log.info("Total inferred triples so far = " + totalInferredTriples + ", current retry count: "
                + emptyRetries);

        if (emptyRetries < maxRetries) {
            ApiUtils.block(Config.getIntegerProperty(Constants.REASON_SLEEP_KEY, 20));

            // FIXME move into the particular cloud implementation service
            long elapsed = System.currentTimeMillis() - start;
            decoratedTable = "[" + table + "@-" + elapsed + "-]";

            log.info("Using table decorator: " + decoratedTable + ". Empty retries count: " + emptyRetries);
        }

    } while (emptyRetries < maxRetries); // end timestamp loop

    //executor.shutdown();
    log.info("Finished reasoning, total inferred triples = " + totalInferredTriples);
    log.info("Number of avoided duplicate terms = " + this.duplicates);
    log.info("Total rows retrieved from big data = " + this.totalRows);
    log.info("Total processed GBytes = " + ((double) this.totalBytes / FileUtils.ONE_GB));
    log.info("Total process reasoning time (serialization in inf file) = " + stopwatch1);
    log.info("Total time spent in empty inference cycles = " + stopwatch2);
}

From source file:io.ecarf.core.cloud.task.processor.reason.phase2.DoReasonTask8.java

@Override
public void run() throws IOException {

    GoogleCloudService cloud = (GoogleCloudService) this.getCloudService();

    Stopwatch stopwatch1 = Stopwatch.createUnstarted();
    Stopwatch stopwatch2 = Stopwatch.createUnstarted();
    Set<String> termsSet;

    if (terms == null) {
        // too large, probably saved as a file

        log.info("Using json file for terms: " + termsFile);
        Validate.notNull(termsFile);/*from  w ww  .  ja v a  2  s  .  co  m*/

        String localTermsFile = Utils.TEMP_FOLDER + termsFile;
        cloud.downloadObjectFromCloudStorage(termsFile, localTermsFile, bucket);

        // convert from JSON
        termsSet = io.cloudex.framework.utils.FileUtils.jsonFileToSet(localTermsFile);

    } else {
        termsSet = ObjectUtils.csvToSet(terms);
    }

    String localSchemaFile = Utils.TEMP_FOLDER + schemaFile;
    // download the file from the cloud storage
    cloud.downloadObjectFromCloudStorage(schemaFile, localSchemaFile, bucket);

    // uncompress if compressed
    if (GzipUtils.isCompressedFilename(schemaFile)) {
        localSchemaFile = GzipUtils.getUncompressedFilename(localSchemaFile);
    }

    Map<Long, Set<Triple>> allSchemaTriples = TripleUtils.getRelevantSchemaETriples(localSchemaFile,
            TermUtils.RDFS_TBOX);

    // get all the triples we care about
    schemaTerms = new HashMap<>();

    for (String termStr : termsSet) {

        Long term = Long.parseLong(termStr);

        if (allSchemaTriples.containsKey(term)) {
            schemaTerms.put(term, allSchemaTriples.get(term));
        }
    }

    String decoratedTable = table;
    int emptyRetries = 0;
    int totalInferredTriples = 0;
    int maxRetries = Config.getIntegerProperty(Constants.REASON_RETRY_KEY, 6);
    this.ddLimit = Config.getIntegerProperty(Constants.REASON_DATA_DIRECT_DOWNLOAD_LIMIT, 1_200_000);
    String instanceId = cloud.getInstanceId();

    QueryGenerator<Long> generator = new QueryGenerator<Long>(schemaTerms, null);

    // timestamp loop
    do {

        Set<Long> productiveTerms = new HashSet<>();
        int interimInferredTriples = 0;

        // First of all run all the queries asynchronously and remember the jobId and filename for each term

        List<QueryResult> queryResults = new ArrayList<QueryResult>();
        generator.setDecoratedTable(decoratedTable);

        List<String> queries = generator.getQueries();
        log.debug("Generated Queries: " + queries);
        String queryResultFilePrefix = instanceId + '_' + System.currentTimeMillis() + "_QueryResults_";
        int fileCount = 0;
        for (String query : queries) {
            String jobId = cloud.startBigDataQuery(query, new BigDataTable(this.table));
            queryResults
                    .add(QueryResult.create().setFilename(queryResultFilePrefix + fileCount).setJobId(jobId));
            fileCount++;
        }

        // invoke all the queries in parallel
        //this.invokeAll(queryTasks);

        long start = System.currentTimeMillis();

        String inferredTriplesFile = Utils.TEMP_FOLDER + instanceId + '_' + start + Constants.DOT_INF;

        // save all the query results in files in parallel
        //this.invokeAll(saveTasks);

        for (QueryResult queryResult : queryResults) {
            try {
                // block and wait for each job to complete then save results to a file
                QueryStats stats = cloud.saveBigQueryResultsToFile(queryResult.getJobId(),
                        queryResult.getFilename(), this.bucket, null, this.ddLimit);
                queryResult.setStats(stats);

            } catch (IOException ioe) {

                log.error("failed to save query results to file, jobId: " + queryResult.getJobId(), ioe);
                throw ioe;
            }
        }

        try (PrintWriter writer = new PrintWriter(
                new GZIPOutputStream(new FileOutputStream(inferredTriplesFile), Constants.GZIP_BUF_SIZE))) {

            // now loop through the queries
            //for(Entry<Term, Set<Triple>> entry: schemaTerms.entrySet()) {
            for (QueryResult queryResult : queryResults) {

                //Term term = entry.getKey();
                QueryStats stats = queryResult.getStats();

                BigInteger rows = stats.getTotalRows();//term.getRows();

                this.totalBytes = this.totalBytes + stats.getTotalProcessedBytes();//term.getBytes();

                // only process if triples are found matching this term
                if (!BigInteger.ZERO.equals(rows)) {

                    stopwatch1.start();

                    int inferredTriplesCount = this.inferAndSaveTriplesToFile(queryResult, productiveTerms,
                            writer);

                    interimInferredTriples += inferredTriplesCount;

                    this.totalRows = this.totalRows.add(rows);

                    stopwatch1.stop();

                } else {
                    log.info("Skipping query as no data is found");
                }
            }
        }

        totalInferredTriples += interimInferredTriples;

        if (interimInferredTriples > 0) {

            // stream smaller numbers of inferred triples
            // try uploading from cloud storage
            int streamingThreshold = Config.getIntegerProperty("ecarf.io.reasoning.streaming.threshold",
                    100000);

            log.info("Inserting " + interimInferredTriples + ", inferred triples into Big Data table for "
                    + productiveTerms.size() + " productive terms. Filename: " + inferredTriplesFile);

            if (interimInferredTriples <= streamingThreshold) {
                // stream the data

                Set<Triple> inferredTriples = TripleUtils.loadCompressedCSVTriples(inferredTriplesFile, true);
                log.info("Total triples to stream into Big Data: " + inferredTriples.size());
                cloud.streamObjectsIntoBigData(inferredTriples,
                        TableUtils.getBigQueryEncodedTripleTable(table));

                log.info("All inferred triples are streamed into Big Data table");

            } else {

                // load the data through cloud storage
                // upload the file to cloud storage
                log.info("Uploading inferred triples file into cloud storage: " + inferredTriplesFile);
                StorageObject file = cloud.uploadFileToCloudStorage(inferredTriplesFile, bucket);
                log.info("File " + file + ", uploaded successfully. Now loading it into big data.");

                String jobId = cloud.loadCloudStorageFilesIntoBigData(Lists.newArrayList(file.getUri()),
                        TableUtils.getBigQueryEncodedTripleTable(table), false);
                log.info(
                        "All inferred triples are loaded into Big Data table through cloud storage, completed jobId: "
                                + jobId);

            }

            // reset empty retries
            emptyRetries = 0;

            stopwatch2.reset();

        } else {
            log.info("No new inferred triples");
            // increment empty retries
            emptyRetries++;

            if (!stopwatch2.isRunning()) {
                stopwatch2.start();
            }
        }

        log.info("Total inferred triples so far = " + totalInferredTriples + ", current retry count: "
                + emptyRetries);

        if (emptyRetries < maxRetries) {
            ApiUtils.block(Config.getIntegerProperty(Constants.REASON_SLEEP_KEY, 20));

            // FIXME move into the particular cloud implementation service
            long elapsed = System.currentTimeMillis() - start;
            decoratedTable = "[" + table + "@-" + elapsed + "-]";

            log.info("Using table decorator: " + decoratedTable + ". Empty retries count: " + emptyRetries);
        }

    } while (emptyRetries < maxRetries); // end timestamp loop

    //executor.shutdown();
    log.info("Finished reasoning, total inferred triples = " + totalInferredTriples);
    //log.info("Number of avoided duplicate terms = " + this.duplicates);
    log.info("Total rows retrieved from big data = " + this.totalRows);
    log.info("Total processed GBytes = " + ((double) this.totalBytes / FileUtils.ONE_GB));
    log.info("Total process reasoning time (serialization in inf file) = " + stopwatch1);
    log.info("Total time spent in empty inference cycles = " + stopwatch2);
}

From source file:ch.unifr.pai.twice.widgets.mpproxy.server.SimpleHttpUrlConnectionServletFilter.java

/**
 * Apply the filter logic// w ww. ja v  a2 s  .c  om
 * 
 * @see javax.servlet.Filter#doFilter(javax.servlet.ServletRequest, javax.servlet.ServletResponse, javax.servlet.FilterChain)
 */
@Override
public void doFilter(ServletRequest genericRequest, ServletResponse genericResponse, FilterChain chain)
        throws IOException, ServletException {
    if (genericRequest instanceof HttpServletRequest && genericResponse instanceof HttpServletResponse) {
        HttpServletRequest request = (HttpServletRequest) genericRequest;
        HttpServletResponse response = (HttpServletResponse) genericResponse;

        if (request.getSession().getAttribute(Constants.uuidCookie) == null) {
            request.getSession().setAttribute(Constants.uuidCookie, UUID.randomUUID().toString());
        }
        response.addCookie(new Cookie(Constants.uuidCookie,
                request.getSession().getAttribute(Constants.uuidCookie).toString()));
        String fullUrl = getFullRequestString(request);

        fullUrl.replace("gwt.codesvr=127.0.0.1:9997&", "");
        String servletPath = getServletPath(request);
        if (!servletPath.endsWith("/"))
            servletPath += "/";

        URLParser parser = new URLParser(fullUrl, servletPath);
        String url = parser.getFullProxyPath();

        // Prevent the managing resources to be filtered.
        if (request.getRequestURL().toString().startsWith(servletPath + Constants.nonFilterPrefix)
                || (url != null && url.equals(fullUrl))) {
            chain.doFilter(genericRequest, genericResponse);
            return;
        }

        // The read only screen
        if (request.getRequestURL().toString().contains("miceScreenShot")) {

            String result = ReadOnlyPresentation.getScreenshotForUUID(request.getParameter("uuid"));
            PrintWriter w = response.getWriter();
            if (result == null) {
                w.println("No screenshot available");
            } else {
                w.print(result);
            }
            w.flush();
            w.close();
            return;
        }
        // ProxyURLParser parser = new ProxyURLParser(fullUrl);
        // String url = parser.writeRequestUrl();
        if (url == null || url.isEmpty() || !url.startsWith("http")) {
            // We've lost context - lets try to re-establish it from
            // other
            // sources...
            String newProxyBase = null;

            // ... a referer is the best hint
            String referer = request.getHeader("Referer");
            if (referer != null && !referer.isEmpty()) {
                URLParser refererParser = new URLParser(referer, Rewriter.getServletPath(referer));
                if (refererParser.getProxyBasePath() != null && !refererParser.getProxyBasePath().isEmpty()) {
                    newProxyBase = refererParser.getProxyBasePath();
                }
            }
            // ... otherwise use the last used proxy (since it probably
            // is a
            // redirection we might have success with this)
            if (newProxyBase == null) {
                newProxyBase = (String) request.getSession().getAttribute("lastProxy");
            }

            // Now redirect the client to the new url
            if (newProxyBase != null) {
                url = newProxyBase + (url != null && !url.isEmpty() ? '/' + url : "/");
                response.sendRedirect(servletPath + url);

            } else {
                response.sendError(404);
            }
            return;

        }
        url = url.replace("\\|", "&#124;");

        ProcessResult result = null;
        try {
            result = servlet.loadFromProxy(request, response, url, servletPath, parser.getProxyBasePath());

        } catch (UnknownHostException e) {
            // If we get a unknown host exception, we try it with the
            // referer
            String referer = request.getHeader("Referer");
            if (parser.getRefererRelative() != null && referer != null && !referer.isEmpty()) {
                URLParser refererParser = new URLParser(referer, Rewriter.getServletPath(referer));
                if (refererParser.getProxyBasePath() != null && !refererParser.getProxyBasePath().isEmpty()) {
                    String newUrl = refererParser.getProxyBasePath() + parser.getRefererRelative();
                    try {
                        result = servlet.loadFromProxy(request, response, newUrl, servletPath,
                                refererParser.getProxyBasePath());
                    } catch (UnknownHostException e1) {
                        result = null;
                        response.sendError(404);
                    }
                } else {
                    result = null;
                    response.sendError(404);
                }
            } else {
                result = null;
                response.sendError(404);
            }

        }

        if (result != null) {
            // If an error is returned, we don't need to process the
            // inputstream
            InputStream input;
            ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
            OutputStream output = outputStream;
            if (result.isGzipped()) {
                output = new GZIPOutputStream(outputStream, 100000);
            }
            String s = URLRewriterServer.process(result.getContent(), fullUrl);
            s = URLRewriterServer.removeTopHref(s);
            if (request.getSession().getAttribute(Constants.miceManaged) == null
                    || !request.getSession().getAttribute(Constants.miceManaged).equals("true")) {
                s = s.replace("<head>",
                        "<head><meta name=\"viewport\" content=\"width=device-width, initial-scale=1, maximum-scale=1\">");
                // Pattern p = Pattern.compile("<body.*?>");
                // Matcher m = p.matcher(s);
                // StringBuffer sb = new StringBuffer();
                // while (m.find()) {
                // m.appendReplacement(
                // sb,
                // m.group()
                // + "<link href=\""
                // + servletPath
                // +
                // "miceproxy/navigation.css\" rel=\"stylesheet\" type=\"text/css\"/><div id=\"miceNavigation\"><input id=\"miceUrlBox\" type=\"text\" value=\""
                // + parser.getFullProxyPath()
                // +
                // "\"/></div><div id=\"contentWrapper\">");
                // }
                // s = m.appendTail(sb).toString();
                // s = s.replace("</body>",
                // "</div></body>");
            }

            // The page shall only be injected if it is a
            // html page and if it really has html content
            // (prevent e.g. blank.html to be injected)
            if (result.getContentType() != null && result.getContentType().contains("text/html")
                    && (s.contains("body") || s.contains("BODY")))
                s += "<script type=\"text/javascript\" language=\"javascript\" src=\"" + servletPath
                        + "miceproxy/miceproxy.nocache.js\"></script>";
            IOUtils.write(s, output, result.getCharset());
            output.flush();
            if (output instanceof GZIPOutputStream)
                ((GZIPOutputStream) output).finish();
            outputStream.writeTo(response.getOutputStream());
        }

    }
}

From source file:com.hujiang.restvolley.webapi.request.JsonStreamerEntity.java

@Override
public void writeTo(final OutputStream out) throws IOException {
    if (out == null) {
        throw new IllegalStateException("Output stream cannot be null.");
    }//from   www .j ava  2  s  . co  m

    // Record the time when uploading started.
    long now = System.currentTimeMillis();

    // Use GZIP compression when sending streams, otherwise just use
    // a buffered output stream to speed things up a bit.
    OutputStream os = contentEncoding != null ? new GZIPOutputStream(out, BUFFER_SIZE) : out;

    // Always send a JSON object.
    os.write('{');

    // Keys used by the HashMaps.
    Set<String> keys = jsonParams.keySet();

    int keysCount = keys.size();
    if (0 < keysCount) {
        int keysProcessed = 0;
        boolean isFileWrapper;

        // Go over all keys and handle each's value.
        for (String key : keys) {
            // Indicate that this key has been processed.
            keysProcessed++;

            try {
                // Evaluate the value (which cannot be null).
                Object value = jsonParams.get(key);

                // Write the JSON object's key.
                os.write(escape(key));
                os.write(':');

                // Bail out prematurely if value's null.
                if (value == null) {
                    os.write(JSON_NULL);
                } else {
                    writeToJsonStream(os, value);
                }
            } finally {
                // Separate each K:V with a comma, except the last one.
                if (elapsedField != null || keysProcessed < keysCount) {
                    os.write(',');
                }
            }
        }

        // Calculate how many milliseconds it took to upload the contents.
        long elapsedTime = System.currentTimeMillis() - now;

        // Include the elapsed time taken to upload everything.
        // This might be useful for somebody, but it serves us well since
        // there will almost always be a ',' as the last sent character.
        if (elapsedField != null) {
            os.write(elapsedField);
            os.write(':');
            os.write((elapsedTime + "").getBytes());
        }
    }

    // Close the JSON object.
    os.write('}');

    // Flush the contents up the stream.
    os.flush();
}

From source file:org.alfresco.mobile.android.api.network.NetworkHttpInvoker.java

protected Response invoke(UrlBuilder url, String method, String contentType, Map<String, String> headers,
        Output writer, BindingSession session, BigInteger offset, BigInteger length) {
    try {/* w  w w .ja  va2s  . c om*/
        // log before connect
        //Log.d("URL", url.toString());
        if (LOG.isDebugEnabled()) {
            LOG.debug(method + " " + url);
        }

        // connect
        HttpURLConnection conn = getHttpURLConnection(new URL(url.toString()));
        conn.setRequestMethod(method);
        conn.setDoInput(true);
        conn.setDoOutput(writer != null);
        conn.setAllowUserInteraction(false);
        conn.setUseCaches(false);
        conn.setRequestProperty(HTTP.USER_AGENT, ClientVersion.OPENCMIS_CLIENT);

        // timeouts
        int connectTimeout = session.get(SessionParameter.CONNECT_TIMEOUT, -1);
        if (connectTimeout >= 0) {
            conn.setConnectTimeout(connectTimeout);
        }

        int readTimeout = session.get(SessionParameter.READ_TIMEOUT, -1);
        if (readTimeout >= 0) {
            conn.setReadTimeout(readTimeout);
        }

        // set content type
        if (contentType != null) {
            conn.setRequestProperty(HTTP.CONTENT_TYPE, contentType);
        }
        // set other headers
        if (headers != null) {
            for (Map.Entry<String, String> header : headers.entrySet()) {
                conn.addRequestProperty(header.getKey(), header.getValue());
            }
        }

        // authenticate
        AuthenticationProvider authProvider = CmisBindingsHelper.getAuthenticationProvider(session);
        if (authProvider != null) {
            Map<String, List<String>> httpHeaders = authProvider.getHTTPHeaders(url.toString());
            if (httpHeaders != null) {
                for (Map.Entry<String, List<String>> header : httpHeaders.entrySet()) {
                    if (header.getValue() != null) {
                        for (String value : header.getValue()) {
                            conn.addRequestProperty(header.getKey(), value);
                        }
                    }
                }
            }

            if (conn instanceof HttpsURLConnection) {
                SSLSocketFactory sf = authProvider.getSSLSocketFactory();
                if (sf != null) {
                    ((HttpsURLConnection) conn).setSSLSocketFactory(sf);
                }

                HostnameVerifier hv = authProvider.getHostnameVerifier();
                if (hv != null) {
                    ((HttpsURLConnection) conn).setHostnameVerifier(hv);
                }
            }
        }

        // range
        if ((offset != null) || (length != null)) {
            StringBuilder sb = new StringBuilder("bytes=");

            if ((offset == null) || (offset.signum() == -1)) {
                offset = BigInteger.ZERO;
            }

            sb.append(offset.toString());
            sb.append("-");

            if ((length != null) && (length.signum() == 1)) {
                sb.append(offset.add(length.subtract(BigInteger.ONE)).toString());
            }

            conn.setRequestProperty("Range", sb.toString());
        }

        // compression
        Object compression = session.get(AlfrescoSession.HTTP_ACCEPT_ENCODING);
        if (compression == null) {
            conn.setRequestProperty("Accept-Encoding", "");
        } else {
            Boolean compressionValue;
            try {
                compressionValue = Boolean.parseBoolean(compression.toString());
                if (compressionValue) {
                    conn.setRequestProperty("Accept-Encoding", "gzip,deflate");
                } else {
                    conn.setRequestProperty("Accept-Encoding", "");
                }
            } catch (Exception e) {
                conn.setRequestProperty("Accept-Encoding", compression.toString());
            }
        }

        // locale
        if (session.get(AlfrescoSession.HTTP_ACCEPT_LANGUAGE) instanceof String
                && session.get(AlfrescoSession.HTTP_ACCEPT_LANGUAGE) != null) {
            conn.setRequestProperty("Accept-Language",
                    session.get(AlfrescoSession.HTTP_ACCEPT_LANGUAGE).toString());
        }

        // send data
        if (writer != null) {
            Object chunkTransfert = session.get(AlfrescoSession.HTTP_CHUNK_TRANSFERT);
            if (chunkTransfert != null && Boolean.parseBoolean(chunkTransfert.toString())) {
                conn.setRequestProperty(HTTP.TRANSFER_ENCODING, "chunked");
                conn.setChunkedStreamingMode(0);
            }

            conn.setConnectTimeout(900000);

            OutputStream connOut = null;

            Object clientCompression = session.get(SessionParameter.CLIENT_COMPRESSION);
            if ((clientCompression != null) && Boolean.parseBoolean(clientCompression.toString())) {
                conn.setRequestProperty(HTTP.CONTENT_ENCODING, "gzip");
                connOut = new GZIPOutputStream(conn.getOutputStream(), 4096);
            } else {
                connOut = conn.getOutputStream();
            }

            OutputStream out = new BufferedOutputStream(connOut, BUFFER_SIZE);
            writer.write(out);
            out.flush();
        }

        // connect
        conn.connect();

        // get stream, if present
        int respCode = conn.getResponseCode();
        InputStream inputStream = null;
        if ((respCode == HttpStatus.SC_OK) || (respCode == HttpStatus.SC_CREATED)
                || (respCode == HttpStatus.SC_NON_AUTHORITATIVE_INFORMATION)
                || (respCode == HttpStatus.SC_PARTIAL_CONTENT)) {
            inputStream = conn.getInputStream();
        }

        // log after connect
        if (LOG.isTraceEnabled()) {
            LOG.trace(method + " " + url + " > Headers: " + conn.getHeaderFields());
        }

        // forward response HTTP headers
        if (authProvider != null) {
            authProvider.putResponseHeaders(url.toString(), respCode, conn.getHeaderFields());
        }

        // get the response
        return new Response(respCode, conn.getResponseMessage(), conn.getHeaderFields(), inputStream,
                conn.getErrorStream());
    } catch (Exception e) {
        throw new CmisConnectionException("Cannot access " + url + ": " + e.getMessage(), e);
    }
}

From source file:org.apache.chemistry.opencmis.client.bindings.spi.http.AbstractApacheClientHttpInvoker.java

protected Response invoke(UrlBuilder url, String method, String contentType, Map<String, String> headers,
        final Output writer, final BindingSession session, BigInteger offset, BigInteger length) {
    int respCode = -1;

    try {/*from www.  ja  v a  2  s . c  o m*/
        // log before connect
        if (LOG.isDebugEnabled()) {
            LOG.debug("Session {}: {} {}", session.getSessionId(), method, url);
        }

        // get HTTP client object from session
        DefaultHttpClient httpclient = (DefaultHttpClient) session.get(HTTP_CLIENT);
        if (httpclient == null) {
            session.writeLock();
            try {
                httpclient = (DefaultHttpClient) session.get(HTTP_CLIENT);
                if (httpclient == null) {
                    httpclient = createHttpClient(url, session);
                    session.put(HTTP_CLIENT, httpclient, true);
                }
            } finally {
                session.writeUnlock();
            }
        }

        HttpRequestBase request = null;

        if ("GET".equals(method)) {
            request = new HttpGet(url.toString());
        } else if ("POST".equals(method)) {
            request = new HttpPost(url.toString());
        } else if ("PUT".equals(method)) {
            request = new HttpPut(url.toString());
        } else if ("DELETE".equals(method)) {
            request = new HttpDelete(url.toString());
        } else {
            throw new CmisRuntimeException("Invalid HTTP method!");
        }

        // set content type
        if (contentType != null) {
            request.setHeader("Content-Type", contentType);
        }
        // set other headers
        if (headers != null) {
            for (Map.Entry<String, String> header : headers.entrySet()) {
                request.addHeader(header.getKey(), header.getValue());
            }
        }

        // authenticate
        AuthenticationProvider authProvider = CmisBindingsHelper.getAuthenticationProvider(session);
        if (authProvider != null) {
            Map<String, List<String>> httpHeaders = authProvider.getHTTPHeaders(url.toString());
            if (httpHeaders != null) {
                for (Map.Entry<String, List<String>> header : httpHeaders.entrySet()) {
                    if (header.getKey() != null && isNotEmpty(header.getValue())) {
                        String key = header.getKey();
                        if (key.equalsIgnoreCase("user-agent")) {
                            request.setHeader("User-Agent", header.getValue().get(0));
                        } else {
                            for (String value : header.getValue()) {
                                if (value != null) {
                                    request.addHeader(key, value);
                                }
                            }
                        }
                    }
                }
            }
        }

        // range
        if ((offset != null) || (length != null)) {
            StringBuilder sb = new StringBuilder("bytes=");

            if ((offset == null) || (offset.signum() == -1)) {
                offset = BigInteger.ZERO;
            }

            sb.append(offset.toString());
            sb.append('-');

            if ((length != null) && (length.signum() == 1)) {
                sb.append(offset.add(length.subtract(BigInteger.ONE)).toString());
            }

            request.setHeader("Range", sb.toString());
        }

        // compression
        Object compression = session.get(SessionParameter.COMPRESSION);
        if ((compression != null) && Boolean.parseBoolean(compression.toString())) {
            request.setHeader("Accept-Encoding", "gzip,deflate");
        }

        // locale
        if (session.get(CmisBindingsHelper.ACCEPT_LANGUAGE) instanceof String) {
            request.setHeader("Accept-Language", session.get(CmisBindingsHelper.ACCEPT_LANGUAGE).toString());
        }

        // send data
        if (writer != null) {
            Object clientCompression = session.get(SessionParameter.CLIENT_COMPRESSION);
            final boolean clientCompressionFlag = (clientCompression != null)
                    && Boolean.parseBoolean(clientCompression.toString());
            if (clientCompressionFlag) {
                request.setHeader("Content-Encoding", "gzip");
            }

            AbstractHttpEntity streamEntity = new AbstractHttpEntity() {
                @Override
                public boolean isChunked() {
                    return true;
                }

                @Override
                public boolean isRepeatable() {
                    return false;
                }

                @Override
                public long getContentLength() {
                    return -1;
                }

                @Override
                public boolean isStreaming() {
                    return false;
                }

                @Override
                public InputStream getContent() throws IOException {
                    throw new UnsupportedOperationException();
                }

                @Override
                public void writeTo(final OutputStream outstream) throws IOException {
                    OutputStream connOut = null;

                    if (clientCompressionFlag) {
                        connOut = new GZIPOutputStream(outstream, 4096);
                    } else {
                        connOut = outstream;
                    }

                    OutputStream out = new BufferedOutputStream(connOut, BUFFER_SIZE);
                    try {
                        writer.write(out);
                    } catch (IOException ioe) {
                        throw ioe;
                    } catch (Exception e) {
                        throw new IOException(e);
                    }
                    out.flush();

                    if (connOut instanceof GZIPOutputStream) {
                        ((GZIPOutputStream) connOut).finish();
                    }
                }
            };
            ((HttpEntityEnclosingRequestBase) request).setEntity(streamEntity);
        }

        // connect
        HttpResponse response = httpclient.execute(request);
        HttpEntity entity = response.getEntity();

        // get stream, if present
        respCode = response.getStatusLine().getStatusCode();
        InputStream inputStream = null;
        InputStream errorStream = null;

        if ((respCode == 200) || (respCode == 201) || (respCode == 203) || (respCode == 206)) {
            if (entity != null) {
                inputStream = entity.getContent();
            } else {
                inputStream = new ByteArrayInputStream(new byte[0]);
            }
        } else {
            if (entity != null) {
                errorStream = entity.getContent();
            } else {
                errorStream = new ByteArrayInputStream(new byte[0]);
            }
        }

        // collect headers
        Map<String, List<String>> responseHeaders = new HashMap<String, List<String>>();
        for (Header header : response.getAllHeaders()) {
            List<String> values = responseHeaders.get(header.getName());
            if (values == null) {
                values = new ArrayList<String>();
                responseHeaders.put(header.getName(), values);
            }
            values.add(header.getValue());
        }

        // log after connect
        if (LOG.isTraceEnabled()) {
            LOG.trace("Session {}: {} {} > Headers: {}", session.getSessionId(), method, url,
                    responseHeaders.toString());
        }

        // forward response HTTP headers
        if (authProvider != null) {
            authProvider.putResponseHeaders(url.toString(), respCode, responseHeaders);
        }

        // get the response
        return new Response(respCode, response.getStatusLine().getReasonPhrase(), responseHeaders, inputStream,
                errorStream);
    } catch (Exception e) {
        String status = (respCode > 0 ? " (HTTP status code " + respCode + ")" : "");
        throw new CmisConnectionException("Cannot access \"" + url + "\"" + status + ": " + e.getMessage(), e);
    }
}

From source file:io.ecarf.core.utils.Utils.java

/**
 * Serialize an object to a file//from  www. j  av a 2s . c  om
 * @param filename
 * @param object
 * @throws IOException
 */
public static void objectToFile(String filename, Object object, boolean compress, boolean java)
        throws IOException {
    Validate.isTrue(object instanceof Serializable, "object must implement Serializable");

    log.info("Serializing object of class: " + object.getClass() + " to file: " + filename
            + ", with compress = " + compress);

    OutputStream stream = new FileOutputStream(filename);

    if (compress) {
        stream = new GZIPOutputStream(stream, Constants.GZIP_BUF_SIZE);

    } //else {

    stream = new BufferedOutputStream(stream, Constants.GZIP_BUF_SIZE);
    //}
    if (java) {

        try (ObjectOutput oos = new ObjectOutputStream(stream);) {
            oos.writeObject(object);
        }

    } else {
        Kryo kryo = POOL.borrow();
        // do s.th. with kryo here, and afterwards release it
        try (Output output = new Output(stream, Constants.GZIP_BUF_SIZE)) {
            kryo.writeObject(output, object);
        }

        POOL.release(kryo);

    }

}

From source file:eionet.eunis.servlets.DownloadServlet.java

/**
 * Process the actual request./*from   ww  w.  j  a v a2s . c  om*/
 *
 * @param request The request to be processed.
 * @param response The response to be created.
 * @param content Whether the request body should be written (GET) or not (HEAD).
 * @throws IOException If something fails at I/O level.
 * @throws ServletException
 */
private void processRequest(HttpServletRequest request, HttpServletResponse response, boolean content)
        throws IOException, ServletException {

    String requestURI = request.getRequestURI();
    String contextPath = request.getContextPath();
    String pathInfo = request.getPathInfo();
    String servletPath = request.getServletPath();

    // Create the abstract file reference to the requested file.
    File file = null;
    String fileRelativePath = StringUtils.substringAfter(request.getRequestURI(), request.getContextPath());
    fileRelativePath = StringUtils.replace(fileRelativePath, "%20", " ");
    if (StringUtils.isNotEmpty(fileRelativePath) && StringUtils.isNotEmpty(appHome)) {
        file = new File(appHome, fileRelativePath);
    }

    // If file was not found, send 404.
    if (file == null || !file.exists() || file.isDirectory()) {
        response.sendError(HttpServletResponse.SC_NOT_FOUND);
        return;
    }

    // Prepare some variables. The ETag is an unique identifier of the file.
    String fileName = file.getName();
    long length = file.length();
    long lastModified = file.lastModified();
    String eTag = fileName + "_" + length + "_" + lastModified;

    // Validate request headers for caching ---------------------------------------------------

    // If-None-Match header should contain "*" or ETag. If so, then return 304.
    String ifNoneMatch = request.getHeader("If-None-Match");
    if (ifNoneMatch != null && matches(ifNoneMatch, eTag)) {
        response.setHeader("ETag", eTag); // Required in 304.
        response.sendError(HttpServletResponse.SC_NOT_MODIFIED);
        return;
    }

    // If-Modified-Since header should be greater than LastModified. If so, then return 304.
    // This header is ignored if any If-None-Match header is specified.
    long ifModifiedSince = request.getDateHeader("If-Modified-Since");
    if (ifNoneMatch == null && ifModifiedSince != -1 && ifModifiedSince + 1000 > lastModified) {
        response.setHeader("ETag", eTag); // Required in 304.
        response.sendError(HttpServletResponse.SC_NOT_MODIFIED);
        return;
    }

    // Validate request headers for resume ----------------------------------------------------

    // If-Match header should contain "*" or ETag. If not, then return 412.
    String ifMatch = request.getHeader("If-Match");
    if (ifMatch != null && !matches(ifMatch, eTag)) {
        response.sendError(HttpServletResponse.SC_PRECONDITION_FAILED);
        return;
    }

    // If-Unmodified-Since header should be greater than LastModified. If not, then return 412.
    long ifUnmodifiedSince = request.getDateHeader("If-Unmodified-Since");
    if (ifUnmodifiedSince != -1 && ifUnmodifiedSince + 1000 <= lastModified) {
        response.sendError(HttpServletResponse.SC_PRECONDITION_FAILED);
        return;
    }

    // Validate and process range -------------------------------------------------------------

    // Prepare some variables. The full Range represents the complete file.
    Range full = new Range(0, length - 1, length);
    List<Range> ranges = new ArrayList<Range>();

    // Validate and process Range and If-Range headers.
    String range = request.getHeader("Range");
    if (range != null) {

        // Range header should match format "bytes=n-n,n-n,n-n...". If not, then return 416.
        if (!range.matches("^bytes=\\d*-\\d*(,\\d*-\\d*)*$")) {
            response.setHeader("Content-Range", "bytes */" + length); // Required in 416.
            response.sendError(HttpServletResponse.SC_REQUESTED_RANGE_NOT_SATISFIABLE);
            return;
        }

        // If-Range header should either match ETag or be greater then LastModified. If not,
        // then return full file.
        String ifRange = request.getHeader("If-Range");
        if (ifRange != null && !ifRange.equals(eTag)) {
            try {
                long ifRangeTime = request.getDateHeader("If-Range"); // Throws IAE if invalid.
                if (ifRangeTime != -1 && ifRangeTime + 1000 < lastModified) {
                    ranges.add(full);
                }
            } catch (IllegalArgumentException ignore) {
                ranges.add(full);
            }
        }

        // If any valid If-Range header, then process each part of byte range.
        if (ranges.isEmpty()) {
            for (String part : range.substring(6).split(",")) {
                // Assuming a file with length of 100, the following examples returns bytes at:
                // 50-80 (50 to 80), 40- (40 to length=100), -20 (length-20=80 to length=100).
                long start = sublong(part, 0, part.indexOf("-"));
                long end = sublong(part, part.indexOf("-") + 1, part.length());

                if (start == -1) {
                    start = length - end;
                    end = length - 1;
                } else if (end == -1 || end > length - 1) {
                    end = length - 1;
                }

                // Check if Range is syntactically valid. If not, then return 416.
                if (start > end) {
                    response.setHeader("Content-Range", "bytes */" + length); // Required in 416.
                    response.sendError(HttpServletResponse.SC_REQUESTED_RANGE_NOT_SATISFIABLE);
                    return;
                }

                // Add range.
                ranges.add(new Range(start, end, length));
            }
        }
    }

    // Prepare and initialize response --------------------------------------------------------

    // Get content type by file name and set default GZIP support and content disposition.
    String contentType = getServletContext().getMimeType(fileName);
    boolean acceptsGzip = false;
    String disposition = "inline";

    // If content type is unknown, then set the default value.
    // For all content types, see: http://www.w3schools.com/media/media_mimeref.asp
    // To add new content types, add new mime-mapping entry in web.xml.
    if (contentType == null) {
        contentType = "application/octet-stream";
    }

    // If content type is text, then determine whether GZIP content encoding is supported by
    // the browser and expand content type with the one and right character encoding.
    // Else, expect for images, determine content disposition. If content type is supported by
    // the browser, then set to inline, else attachment which will pop a 'save as' dialogue.
    if (contentType.startsWith("text")) {
        String acceptEncoding = request.getHeader("Accept-Encoding");
        acceptsGzip = acceptEncoding != null && accepts(acceptEncoding, "gzip");
        contentType += ";charset=UTF-8";
    } else if (!contentType.startsWith("image")) {
        String accept = request.getHeader("Accept");
        disposition = accept != null && accepts(accept, contentType) ? "inline" : "attachment";
    }

    // Initialize response.
    response.reset();
    response.setBufferSize(DEFAULT_BUFFER_SIZE);
    response.setHeader("Content-Disposition", disposition + ";filename=\"" + fileName + "\"");
    response.setHeader("Accept-Ranges", "bytes");
    response.setHeader("ETag", eTag);
    response.setDateHeader("Last-Modified", lastModified);
    response.setDateHeader("Expires", System.currentTimeMillis() + DEFAULT_EXPIRE_TIME);

    // Send requested file (part(s)) to client ------------------------------------------------

    // Prepare streams.
    RandomAccessFile input = null;
    OutputStream output = null;

    try {
        // Open streams.
        input = new RandomAccessFile(file, "r");
        output = response.getOutputStream();

        if (ranges.isEmpty() || ranges.get(0) == full) {

            // Return full file.
            Range r = full;
            response.setContentType(contentType);
            response.setHeader("Content-Range", "bytes " + r.start + "-" + r.end + "/" + r.total);

            if (content) {
                if (acceptsGzip) {
                    // The browser accepts GZIP, so GZIP the content.
                    response.setHeader("Content-Encoding", "gzip");
                    output = new GZIPOutputStream(output, DEFAULT_BUFFER_SIZE);
                } else {
                    // Content length is not directly predictable in case of GZIP.
                    // So only add it if there is no means of GZIP, else browser will hang.
                    response.setHeader("Content-Length", String.valueOf(r.length));
                }

                // Copy full range.
                copy(input, output, r.start, r.length);
            }

        } else if (ranges.size() == 1) {

            // Return single part of file.
            Range r = ranges.get(0);
            response.setContentType(contentType);
            response.setHeader("Content-Range", "bytes " + r.start + "-" + r.end + "/" + r.total);
            response.setHeader("Content-Length", String.valueOf(r.length));
            response.setStatus(HttpServletResponse.SC_PARTIAL_CONTENT); // 206.

            if (content) {
                // Copy single part range.
                copy(input, output, r.start, r.length);
            }

        } else {

            // Return multiple parts of file.
            response.setContentType("multipart/byteranges; boundary=" + MULTIPART_BOUNDARY);
            response.setStatus(HttpServletResponse.SC_PARTIAL_CONTENT); // 206.

            if (content) {
                // Cast back to ServletOutputStream to get the easy println methods.
                ServletOutputStream sos = (ServletOutputStream) output;

                // Copy multi part range.
                for (Range r : ranges) {
                    // Add multipart boundary and header fields for every range.
                    sos.println();
                    sos.println("--" + MULTIPART_BOUNDARY);
                    sos.println("Content-Type: " + contentType);
                    sos.println("Content-Range: bytes " + r.start + "-" + r.end + "/" + r.total);

                    // Copy single part range of multi part range.
                    copy(input, output, r.start, r.length);
                }

                // End with multipart boundary.
                sos.println();
                sos.println("--" + MULTIPART_BOUNDARY + "--");
            }
        }
    } finally {
        // Gently close streams.
        close(output);
        close(input);
    }
}

From source file:com.jrestless.aws.gateway.handler.GatewayRequestObjectHandlerIntTest.java

@Test
public void testEncodedBase64Decoding() throws IOException {
    DefaultGatewayRequest request = new DefaultGatewayRequestBuilder().httpMethod("PUT")
            .resource("/binary-data")
            .body(new String(Base64.getEncoder().encode("test".getBytes()), StandardCharsets.UTF_8))
            .base64Encoded(true).headers(Collections.singletonMap(HttpHeaders.CONTENT_ENCODING, "gzip"))
            .build();//w  ww  .  j a  v  a2s.  c o  m
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    try (GZIPOutputStream zipOut = new GZIPOutputStream(baos, true)) {
        zipOut.write("test".getBytes());
    } // finish + flush
    request.setBody(Base64.getEncoder().encodeToString(baos.toByteArray()));
    handler.handleRequest(request, context);
    verify(testService).binaryData("test".getBytes());
}