Example usage for javax.servlet ServletOutputStream close

List of usage examples for javax.servlet ServletOutputStream close

Introduction

In this page you can find the example usage for javax.servlet ServletOutputStream close.

Prototype

public void close() throws IOException 

Source Link

Document

Closes this output stream and releases any system resources associated with this stream.

Usage

From source file:org.openmrs.module.pmtct.util.FileExporter.java

/**
 * Auto generated method comment//from w  w w.j  a v a  2 s. c  o  m
 * 
 * @param request
 * @param response
 * @param patientList
 * @param filename
 * @param title
 * @throws Exception
 */
public void exportInfantsTestResumeToCSVFile(HttpServletRequest request, HttpServletResponse response,
        List<Object> patientList, String filename, String title) throws Exception {
    ServletOutputStream outputStream = null;
    try {
        outputStream = response.getOutputStream();
        Patient p;
        Patient mother;
        PatientService ps = Context.getPatientService();

        response.setContentType("text/plain");
        response.setHeader("Content-Disposition", "attachment; filename=\"" + filename + "\"");
        outputStream.println("" + title);
        outputStream.println("Number of Patients: " + patientList.size());
        outputStream.println();
        outputStream.println(
                "No. ,Identifier, Names, Mother's Names, Gender, Birthdate, Infant Feeding method, PCR Test Result, Ser. Test 9 months Result, Ser. Test 18 months Result");
        outputStream.println();

        int ids = 0;

        Encounter cpnEnc;

        for (Object patient : patientList) {
            Object[] o = (Object[]) patient;

            p = ps.getPatient(Integer.parseInt(o[0].toString()));
            mother = pmtctTag.getChildMother(p.getPatientId());

            cpnEnc = pmtctTag.getCPNEncounterInfo(p.getPatientId());

            ids += 1;
            outputStream.println(ids + "," + p.getActiveIdentifiers().get(0).getIdentifier() + ","
                    + p.getPersonName() + "," + mother.getPersonName() + "," + p.getGender() + ","
                    + sdf.format(p.getBirthdate()) + ","
                    + pmtctTag.lastObsValueByConceptId(p.getPatientId(), PMTCTConstants.INFANT_FEEDING_METHOD)
                    + "," + pmtctTag.getConceptNameById("" + o[4]) + ","
                    + pmtctTag.getConceptNameById("" + o[5]) + "," + pmtctTag.getConceptNameById("" + o[6]));
        }

        outputStream.flush();
    } catch (Exception e) {
        log.error(e.getMessage());
    } finally {
        if (null != outputStream)
            outputStream.close();
    }
}

From source file:org.openmrs.module.pmtct.util.FileExporter.java

/**
 * Auto generated method comment//  ww w . j ava2s .  c  om
 * 
 * @param request
 * @param response
 * @param patientList
 * @param filename
 * @param title
 * @throws Exception
 */
public void exportGeneralStatisticsInCPNToCSVFile(HttpServletRequest request, HttpServletResponse response,
        List<Object> patientList, String filename, String title) throws Exception {
    ServletOutputStream outputStream = null;
    try {
        outputStream = response.getOutputStream();
        Patient p;
        PatientService ps = Context.getPatientService();

        response.setContentType("text/plain");
        response.setHeader("Content-Disposition", "attachment; filename=\"" + filename + "\"");
        outputStream.println("" + title);
        outputStream.println("Number of Patients: " + patientList.size());
        outputStream.println();
        outputStream.println(
                "No. ,Identifier, Names, Gender, BirthDay, Enrollment Date, CPN Date, HIV Status, DPA, Date of Confiment");
        outputStream.println();

        int ids = 0;

        Encounter cpnEnc;
        Encounter matEnc;

        for (Object patient : patientList) {
            Object[] o = (Object[]) patient;

            p = ps.getPatient(Integer.parseInt(o[1].toString()));
            cpnEnc = pmtctTag.getCPNEncounterInfo(p.getPatientId());
            matEnc = pmtctTag.getMaternityEncounterInfo(p.getPatientId());

            ids += 1;
            outputStream.println(ids + "," + p.getActiveIdentifiers().get(0).getIdentifier() + ","
                    + p.getPersonName() + "," + p.getGender() + "," + sdf.format(p.getBirthdate()) + ","
                    + o[4].toString() + "," + sdf.format(cpnEnc.getEncounterDatetime()) + ","
                    + pmtctTag.lastObsValueByConceptId(p.getPatientId(), PMTCTConstants.RESULT_OF_HIV_TEST)
                    + "," + pmtctTag.observationValueByConcept(cpnEnc, PMTCTConstants.PREGNANT_DUE_DATE) + ","
                    + pmtctTag.observationValueByConcept(matEnc, PMTCTConstants.DATE_OF_CONFINEMENT));
        }

        outputStream.flush();
    } catch (Exception e) {
        log.error(e.getMessage());
    } finally {
        if (null != outputStream)
            outputStream.close();
    }
}

From source file:org.openmrs.module.pmtct.util.FileExporter.java

public void exportGeneralStatisticsInMaternityToCSVFile(HttpServletRequest request,
        HttpServletResponse response, List<Object> patientList, String filename, String title)
        throws Exception {
    ServletOutputStream outputStream = null;
    try {//from   w w w.j av  a  2 s.c o  m
        outputStream = response.getOutputStream();
        Patient p;
        PatientService ps = Context.getPatientService();

        response.setContentType("text/plain");
        response.setHeader("Content-Disposition", "attachment; filename=\"" + filename + "\"");
        outputStream.println("" + title);
        outputStream.println("Number of Patients: " + patientList.size());
        outputStream.println();
        outputStream.println(
                "No. ,Identifier, Names, Enrollment Date, DPA, Date of Confiment, HIV Status, Child Born Status");
        outputStream.println();

        int ids = 0;

        Encounter cpnEnc;
        Encounter matEnc;

        for (Object patient : patientList) {
            Object[] o = (Object[]) patient;

            p = ps.getPatient(Integer.parseInt(o[0].toString()));
            cpnEnc = pmtctTag.getCPNEncounterInfo(p.getPatientId());
            matEnc = pmtctTag.getMaternityEncounterInfo(p.getPatientId());

            ids += 1;
            outputStream.println(ids + "," + p.getActiveIdentifiers().get(0).getIdentifier() + ","
                    + p.getPersonName() + "," + o[3].toString() + ","
                    + pmtctTag.observationValueByConcept(cpnEnc, PMTCTConstants.PREGNANT_DUE_DATE) + ","
                    + pmtctTag.observationValueByConcept(matEnc, PMTCTConstants.DATE_OF_CONFINEMENT) + ","
                    + pmtctTag.lastObsValueByConceptId(p.getPatientId(), PMTCTConstants.RESULT_OF_HIV_TEST)
                    + "," + pmtctTag.observationValueByConcept(matEnc,
                            PMTCTConfigurationUtils.getBornStatusConceptId()));
        }

        outputStream.flush();
    } catch (Exception e) {
        log.error(e.getMessage());
    } finally {
        if (null != outputStream)
            outputStream.close();
    }
}

From source file:au.org.ala.biocache.web.WMSController.java

private void writeBytes(HttpServletResponse response, byte[] bytes) throws IOException {
    response.setContentType("text/plain");
    response.setCharacterEncoding("UTF-8");
    ServletOutputStream outStream = response.getOutputStream();
    outStream.write(bytes);/*from ww  w.j a  v a2s.  c o m*/
    outStream.flush();
    outStream.close();
}

From source file:ro.cs.ts.web.servlet.LogoServlet.java

@Override
protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
    logger.debug("doPost - START -");
    ServletOutputStream sos = null;
    logger.debug("-----------------------------------------");

    try {//  w w w.j a  v a 2 s  . com
        //Servlet's OutputStream
        sos = resp.getOutputStream();
        Integer organisationId = null;

        WSLogo logo = null;

        //if there is an exception in getting the logo we display the pixel
        try {
            logo = getLogo((Integer) req.getSession().getAttribute(IConstant.SESS_ORGANISATION_ID));
        } catch (Exception e) {
            logger.debug("Exception at finding logo: Using default pixel");
        }

        if (logo == null || logo.getPicture() == null) {

            logo = new WSLogo();

            //Setting the One Pixel Picture extension
            logo.setExtension(ONE_PIXEL_PIC_EXTENSION);

            String themeCode = IConstant.STANDARD_THEME;
            //Setting the One Pixel Picture as the picture of this logo
            try {
                themeCode = ((UserAuth) SecurityContextHolder.getContext().getAuthentication().getPrincipal())
                        .getThemeCode();
            } catch (Exception e) {
                logger.debug("Exception at getting the theme");

            }

            String onePixelPicLocation = ONE_PIXEL_PIC_LOCATION.replace(REPLACE_THEME, themeCode);
            onePixelPicLocation = FileUtils.getInstance().getRealPathForResource(onePixelPicLocation);
            logo.setPicture(FileUtils.getInstance().getBytesFromFile(new File(onePixelPicLocation)));
        }

        //test if we have logo picture
        if (logo.getPicture() != null) {
            //Setting response's content type after picture's extension 
            resp.setContentType(getMime(logo.getExtension()));

            //Setting response's length (in bytes)
            resp.setContentLength(logo.getPicture().length);
        } else {
            resp.setContentType("image/gif");
            resp.setContentLength(1);
        }

        //Writing the picture
        dumpFile(logo, sos);

    } catch (Exception ex) {
        logger.error("", ex);
    } finally {
        //Flushing and Closing OutputStream
        sos.flush();
        sos.close();
    }
    logger.debug("doPost - END -");
}

From source file:au.org.ala.biocache.web.WMSController.java

void displayBlankImage(HttpServletResponse response) {
    try {//  ww  w.ja va2  s  .  c om
        ServletOutputStream outStream = response.getOutputStream();
        outStream.write(blankImageBytes);
        outStream.flush();
        outStream.close();
    } catch (Exception e) {
        logger.error("Unable to write image", e);
    }
}

From source file:es.juntadeandalucia.panelGestion.presentacion.controlador.impl.GeosearchController.java

/**
 * This method /* ww  w . j a  va 2 s.  c  om*/
 * TODO
 *
 * Para realizar la configuracin se obtienen los archivos de configuracin
 * se realizan las modificaciones necesarias y se le facilita al usuario
 * dichos archivos para que finalice l el procedimiento de configuracin.
 * Las ltimas versiones de Solr admiten modificaciones del Schema a travs
 * de un API REST pero consideramos lioso realizar por un lado la configuracin
 * del schema mediante API y transparente al usuario y por otro lado darle los
 * archivos al usuario para que los sustituya en Geosearch.
 * 
 * @see https://wiki.apache.org/solr/SchemaRESTAPI
 */
public void downloadConfig() {
    String errorMessage = null;

    ServletOutputStream os = null;

    try {
        // checks
        // checks if specified a table
        if (tables.isEmpty()) {
            throw new Exception("No se ha especificado ninguna tabla");
        }
        // checks if specified a field
        boolean specifiedField = false;
        tables_loop: for (GeosearchTableVO table : tables) {
            List<GeosearchFieldVO> fields = table.getFields();
            for (GeosearchFieldVO field : fields) {
                if (field.isDefined()) {
                    specifiedField = true;
                    break tables_loop;
                }
            }
        }
        if (!specifiedField) {
            throw new Exception("No se ha configurado ningn campo de las tablas seleccionadas");
        }
        // checks duplicated fields each table
        for (GeosearchTableVO table : tables) {
            if (tableHasDuplicatedFields(table)) {
                throw new Exception("Existen campos duplicados en la tabla '".concat(table.getTable().getName())
                        .concat("'. Revise su configuracin."));
            }
        }

        // ovverides the duplicated field values
        overrideDuplicatedFields();

        checkFieldErrors();

        // gets the zip file with configuration
        byte[] configurationData = generateConfigurationZipData();

        // configures the response
        HttpServletResponse response = (HttpServletResponse) externalCtx.getResponse();
        response.setContentType(CONTENT_TYPE);
        response.addHeader("Content-disposition", "attachment; filename=\"".concat(FILE_NAME).concat("\""));

        os = response.getOutputStream();
        os.write(configurationData);
        os.flush();
        os.close();
        facesContext.responseComplete();
    } catch (GeosearchException e) {
        errorMessage = "Error en la generacin de los archivos de configuracin: " + e.getLocalizedMessage();
    } catch (ParserConfigurationException e) {
        errorMessage = "Error en la generacin de los archivos de configuracin: " + e.getLocalizedMessage();
    } catch (XPathExpressionException e) {
        errorMessage = "Error en la generacin de los archivos de configuracin: " + e.getLocalizedMessage();
    } catch (TransformerException e) {
        errorMessage = "Error al comprimir los archivos de configuracin: " + e.getLocalizedMessage();
    } catch (IOException e) {
        errorMessage = "Error al comprimir los archivos de configuracin: " + e.getLocalizedMessage();
    } catch (Exception e) {
        errorMessage = "Error en la descarga de la configuracin: " + e.getLocalizedMessage();
    } finally {
        try {
            if (os != null) {
                os.flush();
                os.close();
            }
        } catch (IOException e) {
            errorMessage = "Error al comprimir los archivos de configuracin: " + e.getLocalizedMessage();
        }
    }

    if (errorMessage != null) {
        StatusMessages.instance().add(Severity.ERROR, errorMessage);
        log.error(errorMessage);
    } else {
        // saves the new service for each table
        try {
            ServiceType geosearchType = serviceService.getServiceType("geobusquedas");
            for (GeosearchTableVO geosearchTable : tables) {
                Table table = geosearchTable.getTable();
                Service geosearchService = new Service();
                geosearchService.setName(table.getName());
                geosearchService.setServiceUrl(PanelSettings.geosearchMaster.getUrl().concat("/").concat(core));
                geosearchService.setType(geosearchType);
                serviceService.create(geosearchService, table);
            }
        } catch (Exception e) {
            errorMessage = "";
            StatusMessages.instance().add(Severity.ERROR, errorMessage);
            log.error(errorMessage);
        }
    }
}

From source file:com.indeed.imhotep.web.QueryServlet.java

private SelectExecutionStats handleSelectStatement(final SelectRequestArgs args, final HttpServletResponse resp,
        SelectStatement parsedQuery, final ExecutionManager.QueryTracker queryTracker) throws IOException {
    // hashing is done before calling translate so only original JParsec parsing is considered
    final String queryForHashing = parsedQuery.toHashKeyString();

    final IQLQuery iqlQuery = IQLTranslator.translate(parsedQuery,
            args.interactive ? imhotepInteractiveClient : imhotepClient, args.imhotepUserName, metadata,
            imhotepLocalTempFileSizeLimit, imhotepDaemonTempFileSizeLimit);

    // TODO: handle requested format mismatch: e.g. cached CSV but asked for TSV shouldn't have to rerun the query
    final String queryHash = getQueryHash(queryForHashing, iqlQuery.getShardVersionList(), args.csv);
    final String cacheFileName = queryHash + (args.csv ? ".csv" : ".tsv");
    final boolean isCached = queryCache.isFileCached(cacheFileName);

    final QueryMetadata queryMetadata = new QueryMetadata();

    queryMetadata.addItem("IQL-Cached", isCached, true);
    final DateTime newestShard = getLatestShardVersion(iqlQuery.getShardVersionList());
    queryMetadata.addItem("IQL-Newest-Shard", newestShard, args.returnNewestShardVersion);

    final String shardList = shardListToString(iqlQuery.getShardVersionList());
    queryMetadata.addItem("IQL-Shard-List", shardList, args.returnShardlist);

    final List<Interval> timeIntervalsMissingShards = iqlQuery.getTimeIntervalsMissingShards();
    if (timeIntervalsMissingShards.size() > 0) {
        final String missingIntervals = intervalListToString(timeIntervalsMissingShards);
        queryMetadata.addItem("IQL-Missing-Shards", missingIntervals);
    }//from   w  w  w. j ava 2 s. c  om

    queryMetadata.setPendingHeaders(resp);

    if (args.headOnly) {
        return new SelectExecutionStats(true);
    }
    final ServletOutputStream outputStream = resp.getOutputStream();
    if (args.progress) {
        outputStream.print(": This is the start of the IQL Query Stream\n\n");
    }
    if (!args.asynchronous) {
        ResultServlet.setContentType(resp, args.avoidFileSave, args.csv, args.progress);
        if (!args.cacheReadDisabled && isCached) {
            log.trace("Returning cached data in " + cacheFileName);

            // read metadata from cache
            try {
                final InputStream metadataCacheStream = queryCache
                        .getInputStream(cacheFileName + METADATA_FILE_SUFFIX);
                final QueryMetadata cachedMetadata = QueryMetadata.fromStream(metadataCacheStream);
                queryMetadata.mergeIn(cachedMetadata);

                queryMetadata.setPendingHeaders(resp);
                resp.setHeader("Access-Control-Expose-Headers", StringUtils.join(resp.getHeaderNames(), ", "));
                if (args.progress) {
                    outputStream.println("event: header");
                    outputStream.print("data: ");
                    outputStream.print(queryMetadata.toJSON() + "\n\n");
                }
            } catch (Exception e) {
                log.info("Failed to load metadata cache from " + cacheFileName + METADATA_FILE_SUFFIX, e);
            }

            final InputStream cacheInputStream = queryCache.getInputStream(cacheFileName);
            final int rowsWritten = IQLQuery.copyStream(cacheInputStream, outputStream, iqlQuery.getRowLimit(),
                    args.progress);
            outputStream.close();
            return new SelectExecutionStats(isCached, rowsWritten, false, queryHash, 0);
        }
        final IQLQuery.WriteResults writeResults;
        final IQLQuery.ExecutionResult executionResult;
        try {
            // TODO: should we always get totals? opt out http param?
            executionResult = iqlQuery.execute(args.progress, outputStream, true);
            queryMetadata.addItem("IQL-Timings", executionResult.getTimings().replace('\n', '\t'),
                    args.progress);
            queryMetadata.addItem("IQL-Imhotep-Temp-Bytes-Written",
                    executionResult.getImhotepTempFilesBytesWritten(), args.progress);
            queryMetadata.addItem("IQL-Totals", Arrays.toString(executionResult.getTotals()), args.getTotals);

            queryMetadata.setPendingHeaders(resp);
            resp.setHeader("Access-Control-Expose-Headers", StringUtils.join(resp.getHeaderNames(), ", "));

            if (args.progress) {
                outputStream.println("event: header");
                outputStream.print("data: ");
                outputStream.print(queryMetadata.toJSON() + "\n\n");
            }
            final Iterator<GroupStats> groupStats = executionResult.getRows();
            final int groupingColumns = Math.max(1,
                    (parsedQuery.groupBy == null || parsedQuery.groupBy.groupings == null) ? 1
                            : parsedQuery.groupBy.groupings.size());
            final int selectColumns = Math.max(1,
                    (parsedQuery.select == null || parsedQuery.select.getProjections() == null) ? 1
                            : parsedQuery.select.getProjections().size());
            if (!args.asynchronous) {
                writeResults = iqlQuery.outputResults(groupStats, outputStream, args.csv, args.progress,
                        iqlQuery.getRowLimit(), groupingColumns, selectColumns, args.cacheWriteDisabled);
            } else {
                writeResults = new IQLQuery.WriteResults(0, null, groupStats, 0);
            }
            if (!args.cacheWriteDisabled && !isCached) {
                executorService.submit(new Callable<Void>() {
                    @Override
                    public Void call() throws Exception {
                        try {
                            try {
                                final OutputStream metadataCacheStream = queryCache
                                        .getOutputStream(cacheFileName + METADATA_FILE_SUFFIX);
                                queryMetadata.toStream(metadataCacheStream);
                                metadataCacheStream.close();
                            } catch (Exception e) {
                                log.warn("Failed to upload metadata cache: " + cacheFileName, e);
                            }
                            try {
                                uploadResultsToCache(writeResults, cacheFileName, args.csv);
                            } catch (Exception e) {
                                log.warn("Failed to upload cache: " + cacheFileName, e);
                            }
                        } finally {
                            Closeables2.closeQuietly(queryTracker, log);
                        }
                        return null;
                    }
                });
                queryTracker.markAsynchronousRelease(); // going to be closed asynchronously after cache is uploaded
            }
        } catch (ImhotepOutOfMemoryException e) {
            throw Throwables.propagate(e);
        } finally {
            Closeables2.closeQuietly(iqlQuery, log);
        }
        outputStream.close();
        return new SelectExecutionStats(isCached, writeResults, queryHash,
                executionResult.getImhotepTempFilesBytesWritten());
    } else {
        // TODO: rework the async case to use the same code path as the sync case above except running under an executor
        if (!isCached && args.cacheWriteDisabled) {
            throw new IllegalStateException("Query cache is disabled so only synchronous calls can be served");
        }

        resp.setContentType("application/json");

        if (!isCached) {
            executorService.submit(new Callable<Void>() {
                @Override
                public Void call() throws Exception {
                    try {
                        // TODO: get totals working with the cache
                        final IQLQuery.ExecutionResult executionResult = iqlQuery.execute(false, null, false);
                        final Iterator<GroupStats> groupStats = executionResult.getRows();

                        final OutputStream cacheStream = queryCache.getOutputStream(cacheFileName);
                        IQLQuery.writeRowsToStream(groupStats, cacheStream, args.csv, Integer.MAX_VALUE, false);
                        cacheStream.close(); // has to be closed
                        return null;
                    } finally {
                        Closeables2.closeQuietly(iqlQuery, log);
                        Closeables2.closeQuietly(queryTracker, log);
                    }
                }
            });
            queryTracker.markAsynchronousRelease(); // going to be closed asynchronously after cache is uploaded
        }

        final URL baseURL = new URL(args.requestURL);
        final URL resultsURL = new URL(baseURL, "results/" + cacheFileName);

        final ObjectMapper mapper = new ObjectMapper();
        final ObjectNode ret = mapper.createObjectNode();
        ret.put("filename", resultsURL.toString());
        mapper.writeValue(outputStream, ret);
        outputStream.close();
        // we don't know number of rows as it's handled asynchronously
        return new SelectExecutionStats(isCached, new IQLQuery.WriteResults(0, null, null, 0), queryHash, 0);
    }
}

From source file:au.org.ala.biocache.web.WMSController.java

/**
 * WMS service for webportal.//from w w  w .j  av a  2  s.  c o m
 *
 * @param cql_filter q value.
 * @param env        ';' delimited field:value pairs.  See Env
 * @param bboxString
 * @param width
 * @param height
 * @param cache      'on' = use cache, 'off' = do not use cache this
 *                   also removes any related cache data.
 * @param response
 * @throws Exception
 */
@RequestMapping(value = { "/webportal/wms/reflect", "/ogc/wms/reflect",
        "/mapping/wms/reflect" }, method = RequestMethod.GET)
public void generateWmsTile(SpatialSearchRequestParams requestParams,
        @RequestParam(value = "CQL_FILTER", required = false, defaultValue = "") String cql_filter,
        @RequestParam(value = "ENV", required = false, defaultValue = "") String env,
        @RequestParam(value = "SRS", required = false, defaultValue = "EPSG:900913") String srs, //default to google mercator
        @RequestParam(value = "STYLES", required = false, defaultValue = "") String styles,
        @RequestParam(value = "BBOX", required = true, defaultValue = "") String bboxString,
        @RequestParam(value = "WIDTH", required = true, defaultValue = "256") Integer width,
        @RequestParam(value = "HEIGHT", required = true, defaultValue = "256") Integer height,
        @RequestParam(value = "CACHE", required = true, defaultValue = "off") String cache,
        @RequestParam(value = "REQUEST", required = true, defaultValue = "") String requestString,
        @RequestParam(value = "OUTLINE", required = true, defaultValue = "false") boolean outlinePoints,
        @RequestParam(value = "OUTLINECOLOUR", required = true, defaultValue = "0x000000") String outlineColour,
        @RequestParam(value = "LAYERS", required = false, defaultValue = "") String layers,
        @RequestParam(value = "HQ", required = false) String[] hqs, HttpServletRequest request,
        HttpServletResponse response) throws Exception {

    //Some WMS clients are ignoring sections of the GetCapabilities....
    if ("GetLegendGraphic".equalsIgnoreCase(requestString)) {
        getLegendGraphic(env, styles, 30, 20, request, response);
        return;
    }

    Set<Integer> hq = new HashSet<Integer>();
    if (hqs != null && hqs.length > 0) {
        for (String h : hqs) {
            hq.add(Integer.parseInt(h));
        }
    }

    logger.debug("WMS tile: " + request.getQueryString());

    response.setHeader("Cache-Control", "max-age=86400"); //age == 1 day
    response.setContentType("image/png"); //only png images generated

    boolean is4326 = false;
    WmsEnv vars = new WmsEnv(env, styles);
    double[] mbbox = new double[4];
    double[] bbox = new double[4];
    double[] pbbox = new double[4];
    double[] tilebbox = new double[4];
    int size = vars.size + (vars.highlight != null ? HIGHLIGHT_RADIUS * 2 + (int) (vars.size * 0.2) : 0) + 5; //bounding box buffer

    double resolution;
    if ("EPSG:4326".equals(srs)) {
        is4326 = true;
        //bboxString = convertBBox4326To900913(bboxString);    // to work around a UDIG bug

        resolution = getBBoxes4326(bboxString, width, height, size, vars.uncertainty, mbbox, bbox, pbbox,
                tilebbox);
    } else {
        resolution = getBBoxes(bboxString, width, height, size, vars.uncertainty, mbbox, bbox, pbbox, tilebbox);
    }

    PointType pointType = getPointTypeForDegreesPerPixel(resolution);
    logger.debug("Rendering: " + pointType.name());

    String q = "";

    //CQL Filter takes precedence of the layer
    if (StringUtils.trimToNull(cql_filter) != null) {
        q = getQ(cql_filter);
    } else if (StringUtils.trimToNull(layers) != null && !"ALA:Occurrences".equalsIgnoreCase(layers)) {
        q = convertLayersParamToQ(layers);
    }

    String[] boundingBoxFqs = new String[2];
    boundingBoxFqs[0] = String.format("longitude:[%f TO %f]", bbox[0], bbox[2]);
    boundingBoxFqs[1] = String.format("latitude:[%f TO %f]", bbox[1], bbox[3]);

    int pointWidth = vars.size * 2;
    double width_mult = (width / (pbbox[2] - pbbox[0]));
    double height_mult = (height / (pbbox[1] - pbbox[3]));

    //build request
    if (q.length() > 0) {
        requestParams.setQ(q);
    } else {
        q = requestParams.getQ();
    }

    //bounding box test (q must be 'qid:' + number)
    if (q.startsWith("qid:")) {
        double[] queryBBox = ParamsCache.get(Long.parseLong(q.substring(4))).getBbox();
        if (queryBBox != null && (queryBBox[0] > bbox[2] || queryBBox[2] < bbox[0] || queryBBox[1] > bbox[3]
                || queryBBox[3] < bbox[1])) {
            displayBlankImage(response);
            return;
        }
    }

    String[] originalFqs = getFq(requestParams);

    //get from cache
    WMSTile wco = null;
    if (WMSCache.isEnabled() && cache.equalsIgnoreCase("on")) {
        wco = getWMSCacheObject(vars, pointType, requestParams, bbox);
    } else if (!cache.equalsIgnoreCase("on")) {
        WMSCache.remove(requestParams.getUrlParams(), vars.colourMode, pointType);
    }

    ImgObj imgObj = null;
    if (wco == null) {
        imgObj = wmsUncached(requestParams, vars, pointType, pbbox, mbbox, width, height, width_mult,
                height_mult, pointWidth, originalFqs, hq, boundingBoxFqs, outlinePoints, outlineColour,
                response, is4326, tilebbox);
    } else {
        imgObj = wmsCached(wco, requestParams, vars, pointType, pbbox, bbox, mbbox, width, height, width_mult,
                height_mult, pointWidth, originalFqs, hq, boundingBoxFqs, outlinePoints, outlineColour,
                response, is4326, tilebbox);
    }

    if (imgObj != null && imgObj.g != null) {
        imgObj.g.dispose();
        try {
            ServletOutputStream outStream = response.getOutputStream();
            ImageIO.write(imgObj.img, "png", outStream);
            outStream.flush();
            outStream.close();
        } catch (Exception e) {
            logger.error("Unable to write image", e);
        }
    } else {
        displayBlankImage(response);
    }
}

From source file:org.openbravo.erpCommon.utility.reporting.printing.PrintController.java

public void printReports(HttpServletResponse response, Collection<JasperPrint> jrPrintReports,
        Collection<Report> reports) {
    ServletOutputStream os = null;
    String filename = "";
    try {//from   ww  w .j a  va2 s  . c o m
        os = response.getOutputStream();
        response.setContentType("application/pdf");

        if (!multiReports && !archivedReports) {
            for (Report report : reports) {
                filename = report.getFilename();
            }
            response.setHeader("Content-disposition", "attachment" + "; filename=" + filename);
            for (JasperPrint jasperPrint : jrPrintReports) {
                ReportingUtils.saveReport(jasperPrint, ExportType.PDF, null, os);
            }
        } else {
            concatReport(reports.toArray(new Report[] {}), jrPrintReports, response);
        }
        for (Report report : reports) {
            switch (report.getDocumentType()) {
            case SALESORDER:
                PrintControllerData.updateOrderDatePrinted(this, report.getDocumentId());
            default:
                break;
            }
        }
    } catch (IOException e) {
        log4j.error(e.getMessage());
    } catch (JRException e) {
        e.printStackTrace();
    } catch (Exception e) {
        e.printStackTrace();
    } finally {
        try {
            os.close();
            response.flushBuffer();
        } catch (IOException e) {
            log4j.error(e.getMessage(), e);
        } finally {
            try {
                for (Report report : reports) {
                    // Delete temporal reports generated for the returned report in case they have been
                    // attached also
                    File file = new File(report.getTargetLocation());
                    if (file.exists() && !file.isDirectory()) {
                        file.delete();
                    }
                }
            } catch (IOException e) {
                log4j.error("Error deleting temporal reports", e);
            }
        }
    }
}