Example usage for javax.servlet.http HttpServletRequest getReader

List of usage examples for javax.servlet.http HttpServletRequest getReader

Introduction

In this page you can find the example usage for javax.servlet.http HttpServletRequest getReader.

Prototype

public BufferedReader getReader() throws IOException;

Source Link

Document

Retrieves the body of the request as character data using a BufferedReader.

Usage

From source file:org.b3log.latke.remote.RepositoryAccessor.java

/**
 * Determines whether the specified put data request is bad.
 * /*  w w  w .j  ava  2  s  . com*/
 * <p>
 * If the specified request is bad, puts {@link Keys#STATUS_CODE sc} and {@link Keys#MSG msg}
 * into the specified json object to render.
 * </p>
 * 
 * @param request the specified request
 * @param jsonObject the specified jsonObject
 * @param dataBuilder the specified data builder
 * @return {@code true} if it is bad, returns {@code false} otherwise
 */
private boolean badPutDataRequest(final HttpServletRequest request, final JSONObject jsonObject,
        final StringBuilder dataBuilder) {
    final String repositoryName = request.getParameter("repositoryName");

    if (Strings.isEmptyOrNull(repositoryName)) {
        jsonObject.put(Keys.STATUS_CODE, HttpServletResponse.SC_BAD_REQUEST);
        jsonObject.put(Keys.MSG, "Requires parameter[repositoryName]");
        return true;
    }

    String dataContent = request.getParameter("data");

    if (Strings.isEmptyOrNull(dataContent)) {
        try {
            final BufferedReader reader = request.getReader();

            dataContent = IOUtils.toString(reader);
            final String str = dataContent.split("=")[1];

            dataContent = URLDecoder.decode(str, "UTF-8");
        } catch (final IOException e) {
            LOGGER.log(Level.WARNING, e.getMessage(), e);
        }
    }

    if (Strings.isEmptyOrNull(dataContent)) {
        jsonObject.put(Keys.STATUS_CODE, HttpServletResponse.SC_BAD_REQUEST);
        jsonObject.put(Keys.MSG, "Requires parameter[data]");
        return true;
    }

    try {
        new JSONArray(dataContent);
    } catch (final JSONException e) {
        jsonObject.put(Keys.STATUS_CODE, HttpServletResponse.SC_BAD_REQUEST);
        jsonObject.put(Keys.MSG, "Parameter[data] must be a JSON object or a JSON array");
        return true;
    }

    dataBuilder.append(dataContent);

    return false;
}

From source file:org.openrdf.http.server.repository.RepositoryController.java

@Override
protected ModelAndView handleRequestInternal(HttpServletRequest request, HttpServletResponse response)
        throws Exception {
    String reqMethod = request.getMethod();
    String queryStr = request.getParameter(QUERY_PARAM_NAME);

    if (METHOD_POST.equals(reqMethod)) {
        String mimeType = HttpServerUtil.getMIMEType(request.getContentType());

        if (!(Protocol.FORM_MIME_TYPE.equals(mimeType) || Protocol.SPARQL_QUERY_MIME_TYPE.equals(mimeType))) {
            throw new ClientHTTPException(SC_UNSUPPORTED_MEDIA_TYPE, "Unsupported MIME type: " + mimeType);
        }/*from   w  w w .j ava2 s .  c o  m*/

        if (Protocol.SPARQL_QUERY_MIME_TYPE.equals(mimeType)) {
            // The query should be the entire body
            try {
                queryStr = IOUtils.toString(request.getReader());
            } catch (IOException e) {
                throw new HTTPException(HttpStatus.SC_BAD_REQUEST, "Error reading request message body", e);
            }
            if (queryStr.isEmpty())
                queryStr = null;
        }
    } else if (METHOD_DELETE.equals(reqMethod)) {
        String repId = RepositoryInterceptor.getRepositoryID(request);
        logger.info("DELETE request invoked for repository '" + repId + "'");

        if (queryStr != null) {
            logger.warn("query supplied on repository delete request, aborting delete");
            throw new HTTPException(HttpStatus.SC_BAD_REQUEST,
                    "Repository delete error: query supplied with request");
        }

        if (SystemRepository.ID.equals(repId)) {
            logger.warn("attempted delete of SYSTEM repository, aborting");
            throw new HTTPException(HttpStatus.SC_FORBIDDEN, "SYSTEM Repository can not be deleted");
        }

        try {
            // we need to forcibly close the default repository connection
            // opened for this repository by
            // the interceptor.
            RepositoryConnection repositoryCon = RepositoryInterceptor.getRepositoryConnection(request);
            synchronized (repositoryCon) {
                repositoryCon.close();
            }

            boolean success = repositoryManager.removeRepository(repId);
            if (success) {
                logger.info("DELETE request successfully completed");
                return new ModelAndView(EmptySuccessView.getInstance());
            } else {
                logger.error("error while attempting to delete repository '" + repId + "'");
                throw new HTTPException(HttpStatus.SC_BAD_REQUEST,
                        "could not locate repository configuration for repository '" + repId + "'.");
            }
        } catch (OpenRDFException e) {
            logger.error("error while attempting to delete repository '" + repId + "'", e);
            throw new ServerHTTPException("Repository delete error: " + e.getMessage(), e);
        }
    }

    Repository repository = RepositoryInterceptor.getRepository(request);

    int qryCode = 0;
    if (logger.isInfoEnabled() || logger.isDebugEnabled()) {
        qryCode = String.valueOf(queryStr).hashCode();
    }

    boolean headersOnly = false;
    if (METHOD_GET.equals(reqMethod)) {
        logger.info("GET query {}", qryCode);
    } else if (METHOD_HEAD.equals(reqMethod)) {
        logger.info("HEAD query {}", qryCode);
        headersOnly = true;
    } else if (METHOD_POST.equals(reqMethod)) {
        logger.info("POST query {}", qryCode);
    }

    logger.debug("query {} = {}", qryCode, queryStr);

    if (queryStr != null) {
        RepositoryConnection repositoryCon = RepositoryInterceptor.getRepositoryConnection(request);
        synchronized (repositoryCon) {
            Query query = getQuery(repository, repositoryCon, queryStr, request, response);

            View view;
            Object queryResult;
            FileFormatServiceRegistry<? extends FileFormat, ?> registry;

            try {
                if (query instanceof TupleQuery) {
                    TupleQuery tQuery = (TupleQuery) query;

                    queryResult = headersOnly ? null : tQuery.evaluate();
                    registry = TupleQueryResultWriterRegistry.getInstance();
                    view = TupleQueryResultView.getInstance();
                } else if (query instanceof GraphQuery) {
                    GraphQuery gQuery = (GraphQuery) query;

                    queryResult = headersOnly ? null : gQuery.evaluate();
                    registry = RDFWriterRegistry.getInstance();
                    view = GraphQueryResultView.getInstance();
                } else if (query instanceof BooleanQuery) {
                    BooleanQuery bQuery = (BooleanQuery) query;

                    queryResult = headersOnly ? null : bQuery.evaluate();
                    registry = BooleanQueryResultWriterRegistry.getInstance();
                    view = BooleanQueryResultView.getInstance();
                } else {
                    throw new ClientHTTPException(SC_BAD_REQUEST,
                            "Unsupported query type: " + query.getClass().getName());
                }
            } catch (QueryInterruptedException e) {
                logger.info("Query interrupted", e);
                throw new ServerHTTPException(SC_SERVICE_UNAVAILABLE, "Query evaluation took too long");
            } catch (QueryEvaluationException e) {
                logger.info("Query evaluation error", e);
                if (e.getCause() != null && e.getCause() instanceof HTTPException) {
                    // custom signal from the backend, throw as HTTPException
                    // directly (see SES-1016).
                    throw (HTTPException) e.getCause();
                } else {
                    throw new ServerHTTPException("Query evaluation error: " + e.getMessage());
                }
            }
            Object factory = ProtocolUtil.getAcceptableService(request, response, registry);

            Map<String, Object> model = new HashMap<String, Object>();
            model.put(QueryResultView.FILENAME_HINT_KEY, "query-result");
            model.put(QueryResultView.QUERY_RESULT_KEY, queryResult);
            model.put(QueryResultView.FACTORY_KEY, factory);
            model.put(QueryResultView.HEADERS_ONLY, headersOnly);

            return new ModelAndView(view, model);
        }
    } else {
        throw new ClientHTTPException(SC_BAD_REQUEST, "Missing parameter: " + QUERY_PARAM_NAME);
    }
}

From source file:org.geoserver.opensearch.rest.CollectionsController.java

@PutMapping(path = "{collection}/description", consumes = MediaType.TEXT_HTML_VALUE)
public void putCollectionDescription(@PathVariable(name = "collection", required = true) String collection,
        HttpServletRequest request) throws IOException {
    // check the collection is there
    queryCollection(collection, q -> {
    });/*w w w. j a  va  2 s. c om*/

    String description = IOUtils.toString(request.getReader());

    updateDescription(collection, description);
}

From source file:com.palantir.gerrit.gerritci.servlets.JobsServlet.java

public Map<String, Map<String, String>> parseJobRequest(HttpServletRequest req, String projectName)
        throws JsonSyntaxException, IOException, NoSuchProjectException, NoFilepatternException,
        GitAPIException {/*from  w  w w. j a va2  s  . co m*/
    Map<String, Map<String, String>> jobToParams = new HashMap<String, Map<String, String>>();

    File projectConfigDirectory = new File(sitePaths.etc_dir, projectName);
    if (!projectConfigDirectory.exists())
        projectConfigDirectory.mkdir();
    File projectConfigFile = new File(projectConfigDirectory, "created_jobs");
    if (!projectConfigFile.exists())
        projectConfigFile.createNewFile();

    JsonObject requestBody = (JsonObject) (new JsonParser()).parse(CharStreams.toString(req.getReader()));

    // get number of jobs
    // If all jobs are deleted, we must purge jobs
    int numOfJobs = requestBody.get("items").getAsJsonArray().size();

    ArrayList<String> receivedJobNames = new ArrayList<String>();

    if (numOfJobs < 1) {
        ArrayList<String> deletedJobs = updateProjectJobFiles(projectConfigFile, projectConfigDirectory,
                receivedJobNames);
        for (String deleted : deletedJobs) {
            jobToParams.put(deleted, null);
        }
        return jobToParams;
    }

    CurrentUser currentUser = this.projectControlFactory.controlFor(new NameKey(projectName)).getCurrentUser();
    String gitPath = getGitPath(sitePaths);
    File gitDir = new File(gitPath, projectName + ".git");
    Repository repository = new FileRepositoryBuilder().setGitDir(gitDir).build();
    ObjectInserter objectInserter = repository.newObjectInserter();
    HashMap<String, ObjectId> jobsToIds = new HashMap<String, ObjectId>();
    // assign file name and append to tree
    TreeFormatter treeFormatter = new TreeFormatter();
    // for each received job, create or rewrite its config file and add to
    // jobToParams
    for (int i = 0; i < numOfJobs; i++) {
        JsonObject jobObject = requestBody.get("items").getAsJsonArray().get(i).getAsJsonObject();
        String jobName = jobObject.get("jobName").toString();
        //Remove leading and trailing quotations ex. "jobname" becomes jobname
        jobName = jobName.substring(1, jobName.length() - 1);
        receivedJobNames.add(jobName);
        String type = jobObject.get("jobType").toString();
        type = type.substring(1, type.length() - 1);
        int numOfParams = jobObject.get("items").getAsJsonArray().size();
        JsonArray paramsArray = jobObject.get("items").getAsJsonArray();
        FileBasedConfig jobConfig = makeJobConfigFile(projectConfigDirectory, jobName, currentUser);
        Map<String, String> parsedParams = new HashMap<String, String>();
        parsedParams.put("projectName", projectName);
        for (int j = 0; j < numOfParams; j++) {
            String field = paramsArray.get(j).getAsJsonObject().get("field").toString();
            field = field.substring(1, field.length() - 1);
            String value = paramsArray.get(j).getAsJsonObject().get("value").toString();
            value = value.substring(1, value.length() - 1);
            parsedParams.put(field, value);
            // update jobconfig files
            jobConfig.setString("jobType", type, field, value);
        }
        jobConfig.save();
        jobsToIds.put(jobName, createGitFileId(repository, jobConfig, objectInserter, jobName));
        jobToParams.put(jobName, parsedParams);
    }
    for (String jobName : jobsToIds.keySet()) {
        treeFormatter.append(jobName + ".config", FileMode.REGULAR_FILE, jobsToIds.get(jobName));
    }
    ObjectId treeId = objectInserter.insert(treeFormatter);
    objectInserter.flush();
    updateProjectRef(treeId, objectInserter, repository, currentUser);
    // update or create project files for all jobs
    ArrayList<String> deletedJobs = updateProjectJobFiles(projectConfigFile, projectConfigDirectory,
            receivedJobNames);
    for (String deleted : deletedJobs) {
        jobToParams.put(deleted, null);
    }
    // returns map of job name to params
    return jobToParams;
}

From source file:org.eclipse.rdf4j.http.server.repository.RepositoryController.java

@Override
protected ModelAndView handleRequestInternal(HttpServletRequest request, HttpServletResponse response)
        throws Exception {
    String reqMethod = request.getMethod();
    String queryStr = request.getParameter(QUERY_PARAM_NAME);

    if (METHOD_POST.equals(reqMethod)) {
        String mimeType = HttpServerUtil.getMIMEType(request.getContentType());

        if (!(Protocol.FORM_MIME_TYPE.equals(mimeType) || Protocol.SPARQL_QUERY_MIME_TYPE.equals(mimeType))) {
            throw new ClientHTTPException(SC_UNSUPPORTED_MEDIA_TYPE, "Unsupported MIME type: " + mimeType);
        }// w  ww  .j  a va 2  s .c  om

        if (Protocol.SPARQL_QUERY_MIME_TYPE.equals(mimeType)) {
            // The query should be the entire body
            try {
                queryStr = IOUtils.toString(request.getReader());
            } catch (IOException e) {
                throw new HTTPException(HttpStatus.SC_BAD_REQUEST, "Error reading request message body", e);
            }
            if (queryStr.isEmpty())
                queryStr = null;
        }
    } else if (METHOD_DELETE.equals(reqMethod)) {
        String repId = RepositoryInterceptor.getRepositoryID(request);
        logger.info("DELETE request invoked for repository '" + repId + "'");

        if (queryStr != null) {
            logger.warn("query supplied on repository delete request, aborting delete");
            throw new HTTPException(HttpStatus.SC_BAD_REQUEST,
                    "Repository delete error: query supplied with request");
        }

        if (SystemRepository.ID.equals(repId)) {
            logger.warn("attempted delete of SYSTEM repository, aborting");
            throw new HTTPException(HttpStatus.SC_FORBIDDEN, "SYSTEM Repository can not be deleted");
        }

        try {
            boolean success = repositoryManager.removeRepository(repId);
            if (success) {
                logger.info("DELETE request successfully completed");
                return new ModelAndView(EmptySuccessView.getInstance());
            } else {
                logger.error("error while attempting to delete repository '" + repId + "'");
                throw new HTTPException(HttpStatus.SC_BAD_REQUEST,
                        "could not locate repository configuration for repository '" + repId + "'.");
            }
        } catch (RDF4JException e) {
            logger.error("error while attempting to delete repository '" + repId + "'", e);
            throw new ServerHTTPException("Repository delete error: " + e.getMessage(), e);
        }
    }

    Repository repository = RepositoryInterceptor.getRepository(request);

    int qryCode = 0;
    if (logger.isInfoEnabled() || logger.isDebugEnabled()) {
        qryCode = String.valueOf(queryStr).hashCode();
    }

    boolean headersOnly = false;
    if (METHOD_GET.equals(reqMethod)) {
        logger.info("GET query {}", qryCode);
    } else if (METHOD_HEAD.equals(reqMethod)) {
        logger.info("HEAD query {}", qryCode);
        headersOnly = true;
    } else if (METHOD_POST.equals(reqMethod)) {
        logger.info("POST query {}", qryCode);
    }

    logger.debug("query {} = {}", qryCode, queryStr);

    if (queryStr != null) {
        RepositoryConnection repositoryCon = RepositoryInterceptor.getRepositoryConnection(request);
        try {
            Query query = getQuery(repository, repositoryCon, queryStr, request, response);

            View view;
            Object queryResult = null;
            FileFormatServiceRegistry<? extends FileFormat, ?> registry;

            try {
                if (query instanceof TupleQuery) {
                    if (!headersOnly) {
                        TupleQuery tQuery = (TupleQuery) query;
                        long limit = ProtocolUtil.parseLongParam(request, Protocol.LIMIT_PARAM_NAME, 0);
                        long offset = ProtocolUtil.parseLongParam(request, Protocol.OFFSET_PARAM_NAME, 0);
                        boolean distinct = ProtocolUtil.parseBooleanParam(request, Protocol.DISTINCT_PARAM_NAME,
                                false);

                        final TupleQueryResult tqr = distinct ? QueryResults.distinctResults(tQuery.evaluate())
                                : tQuery.evaluate();
                        queryResult = QueryResults.limitResults(tqr, limit, offset);
                    }
                    registry = TupleQueryResultWriterRegistry.getInstance();
                    view = TupleQueryResultView.getInstance();
                } else if (query instanceof GraphQuery) {
                    if (!headersOnly) {
                        GraphQuery gQuery = (GraphQuery) query;
                        long limit = ProtocolUtil.parseLongParam(request, Protocol.LIMIT_PARAM_NAME, 0);
                        long offset = ProtocolUtil.parseLongParam(request, Protocol.OFFSET_PARAM_NAME, 0);
                        boolean distinct = ProtocolUtil.parseBooleanParam(request, Protocol.DISTINCT_PARAM_NAME,
                                false);

                        final GraphQueryResult qqr = distinct ? QueryResults.distinctResults(gQuery.evaluate())
                                : gQuery.evaluate();
                        queryResult = QueryResults.limitResults(qqr, limit, offset);
                    }
                    registry = RDFWriterRegistry.getInstance();
                    view = GraphQueryResultView.getInstance();
                } else if (query instanceof BooleanQuery) {
                    BooleanQuery bQuery = (BooleanQuery) query;

                    queryResult = headersOnly ? null : bQuery.evaluate();
                    registry = BooleanQueryResultWriterRegistry.getInstance();
                    view = BooleanQueryResultView.getInstance();
                } else {
                    throw new ClientHTTPException(SC_BAD_REQUEST,
                            "Unsupported query type: " + query.getClass().getName());
                }
            } catch (QueryInterruptedException e) {
                logger.info("Query interrupted", e);
                throw new ServerHTTPException(SC_SERVICE_UNAVAILABLE, "Query evaluation took too long");
            } catch (QueryEvaluationException e) {
                logger.info("Query evaluation error", e);
                if (e.getCause() != null && e.getCause() instanceof HTTPException) {
                    // custom signal from the backend, throw as HTTPException
                    // directly (see SES-1016).
                    throw (HTTPException) e.getCause();
                } else {
                    throw new ServerHTTPException("Query evaluation error: " + e.getMessage());
                }
            }

            Object factory = ProtocolUtil.getAcceptableService(request, response, registry);

            Map<String, Object> model = new HashMap<String, Object>();
            model.put(QueryResultView.FILENAME_HINT_KEY, "query-result");
            model.put(QueryResultView.QUERY_RESULT_KEY, queryResult);
            model.put(QueryResultView.FACTORY_KEY, factory);
            model.put(QueryResultView.HEADERS_ONLY, headersOnly);
            model.put(QueryResultView.CONNECTION_KEY, repositoryCon);

            return new ModelAndView(view, model);
        } catch (Exception e) {
            // only close the connection when an exception occurs. Otherwise, the QueryResultView will take care of closing it.
            repositoryCon.close();
            throw e;
        }
    } else {
        throw new ClientHTTPException(SC_BAD_REQUEST, "Missing parameter: " + QUERY_PARAM_NAME);
    }
}

From source file:org.ala.layers.web.ShapesService.java

@RequestMapping(value = "/shape/upload/shp", method = RequestMethod.POST)
@ResponseBody/*from  w  w  w.  j av  a2 s.  co m*/
public Map<Object, Object> uploadShapeFile(HttpServletRequest req, HttpServletResponse resp,
        @RequestParam(value = "user_id", required = false) String userId,
        @RequestParam(value = "api_key", required = false) String apiKey) throws Exception {
    // Use linked hash map to maintain key ordering
    Map<Object, Object> retMap = new LinkedHashMap<Object, Object>();

    File tmpZipFile = File.createTempFile("shpUpload", ".zip");

    if (!ServletFileUpload.isMultipartContent(req)) {
        String jsonRequestBody = IOUtils.toString(req.getReader());

        JSONRequestBodyParser reqBodyParser = new JSONRequestBodyParser();
        reqBodyParser.addParameter("user_id", String.class, false);
        reqBodyParser.addParameter("shp_file_url", String.class, false);
        reqBodyParser.addParameter("api_key", String.class, false);

        if (reqBodyParser.parseJSON(jsonRequestBody)) {

            String shpFileUrl = (String) reqBodyParser.getParsedValue("shp_file_url");
            userId = (String) reqBodyParser.getParsedValue("user_id");
            apiKey = (String) reqBodyParser.getParsedValue("api_key");

            if (!checkAPIKey(apiKey, userId)) {
                retMap.put("error", "Invalid user ID or API key");
                return retMap;
            }

            // Use shape file url from json body
            IOUtils.copy(new URL(shpFileUrl).openStream(), new FileOutputStream(tmpZipFile));
            retMap.putAll(handleZippedShapeFile(tmpZipFile));
        } else {
            retMap.put("error", StringUtils.join(reqBodyParser.getErrorMessages(), ","));
        }

    } else {
        if (!checkAPIKey(apiKey, userId)) {
            retMap.put("error", "Invalid user ID or API key");
            return retMap;
        }

        // Create a factory for disk-based file items. File size limit is
        // 50MB
        // Configure a repository (to ensure a secure temp location is used)
        File repository = new File(System.getProperty("java.io.tmpdir"));
        DiskFileItemFactory factory = new DiskFileItemFactory(1024 * 1024 * 50, repository);

        factory.setRepository(repository);

        // Create a new file upload handler
        ServletFileUpload upload = new ServletFileUpload(factory);

        // Parse the request
        List<FileItem> items = upload.parseRequest(req);

        if (items.size() == 1) {
            FileItem fileItem = items.get(0);
            IOUtils.copy(fileItem.getInputStream(), new FileOutputStream(tmpZipFile));
            retMap.putAll(handleZippedShapeFile(tmpZipFile));
        } else {
            retMap.put("error",
                    "Multiple files sent in request. A single zipped shape file should be supplied.");
        }
    }

    return retMap;
}

From source file:org.geoserver.opensearch.rest.CollectionsController.java

@PutMapping(path = "{collection}/metadata", consumes = MediaType.TEXT_XML_VALUE)
public void putCollectionMetadata(@PathVariable(name = "collection", required = true) String collection,
        HttpServletRequest request) throws IOException {
    // check the collection is there
    queryCollection(collection, q -> {
    });/*ww  w. j  a v a 2s. c  om*/

    // TODO: validate it's actual ISO metadata
    String metadata = IOUtils.toString(request.getReader());
    checkWellFormedXML(metadata);

    // prepare the update
    Filter filter = FF.equal(FF.property(COLLECTION_ID), FF.literal(collection), true);
    runTransactionOnCollectionStore(
            fs -> fs.modifyFeatures(OpenSearchAccess.METADATA_PROPERTY_NAME, metadata, filter));
}

From source file:au.org.ala.layers.web.ShapesService.java

@RequestMapping(value = "/shape/upload/shp", method = RequestMethod.POST)
@ResponseBody//  w ww  .  ja  v a 2  s  .  co m
public Map<Object, Object> uploadShapeFile(HttpServletRequest req, HttpServletResponse resp,
        @RequestParam(value = "user_id", required = false) String userId,
        @RequestParam(value = "api_key", required = false) String apiKey) throws Exception {
    // Use linked hash map to maintain key ordering
    Map<Object, Object> retMap = new LinkedHashMap<Object, Object>();

    File tmpZipFile = File.createTempFile("shpUpload", ".zip");

    if (!ServletFileUpload.isMultipartContent(req)) {
        String jsonRequestBody = IOUtils.toString(req.getReader());

        JSONRequestBodyParser reqBodyParser = new JSONRequestBodyParser();
        reqBodyParser.addParameter("user_id", String.class, false);
        reqBodyParser.addParameter("shp_file_url", String.class, false);
        reqBodyParser.addParameter("api_key", String.class, false);

        if (reqBodyParser.parseJSON(jsonRequestBody)) {

            String shpFileUrl = (String) reqBodyParser.getParsedValue("shp_file_url");
            userId = (String) reqBodyParser.getParsedValue("user_id");
            apiKey = (String) reqBodyParser.getParsedValue("api_key");

            if (!checkAPIKey(apiKey, userId)) {
                retMap.put("error", "Invalid user ID or API key");
                return retMap;
            }

            // Use shape file url from json body
            InputStream is = null;
            OutputStream os = null;
            try {
                is = new URL(shpFileUrl).openStream();
                os = new FileOutputStream(tmpZipFile);
                IOUtils.copy(is, os);
                retMap.putAll(handleZippedShapeFile(tmpZipFile));
                os.flush();
            } catch (Exception e) {
                logger.error(e.getMessage(), e);
            } finally {
                if (is != null) {
                    try {
                        is.close();
                    } catch (Exception e) {
                        logger.error(e.getMessage(), e);
                    }
                }
                if (os != null) {
                    try {
                        os.close();
                    } catch (Exception e) {
                        logger.error(e.getMessage(), e);
                    }
                }
            }

        } else {
            retMap.put("error", StringUtils.join(reqBodyParser.getErrorMessages(), ","));
        }

    } else {
        if (false && !checkAPIKey(apiKey, userId)) {
            retMap.put("error", "Invalid user ID or API key");
            return retMap;
        }

        // Create a factory for disk-based file items. File size limit is
        // 50MB
        // Configure a repository (to ensure a secure temp location is used)
        File repository = new File(System.getProperty("java.io.tmpdir"));
        DiskFileItemFactory factory = new DiskFileItemFactory(1024 * 1024 * 50, repository);

        factory.setRepository(repository);

        // Create a new file upload handler
        ServletFileUpload upload = new ServletFileUpload(factory);

        // Parse the request
        List<FileItem> items = upload.parseRequest(req);

        if (items.size() == 1) {
            FileItem fileItem = items.get(0);
            IOUtils.copy(fileItem.getInputStream(), new FileOutputStream(tmpZipFile));
            retMap.putAll(handleZippedShapeFile(tmpZipFile));
        } else {
            retMap.put("error",
                    "Multiple files sent in request. A single zipped shape file should be supplied.");
        }
    }

    return retMap;
}

From source file:org.jitsi.videobridge.rest.HandlerImpl.java

/**
 * Creates a new <tt>Conference</tt> in (the associated)
 * <tt>Videobridge</tt>.//  w w  w. j a  va2s .com
 *
 * @param baseRequest the original unwrapped {@link Request} object
 * @param request the request either as the {@code Request} object or a
 * wrapper of that request
 * @param response the response either as the {@code Response} object or a
 * wrapper of that response
 * @throws IOException
 * @throws ServletException
 */
private void doPostConferencesJSON(Request baseRequest, HttpServletRequest request,
        HttpServletResponse response) throws IOException, ServletException {
    Videobridge videobridge = getVideobridge();

    if (videobridge == null) {
        response.setStatus(HttpServletResponse.SC_SERVICE_UNAVAILABLE);
    } else if (RESTUtil.isJSONContentType(request.getContentType())) {
        Object requestJSONObject = null;
        int status = 0;

        try {
            requestJSONObject = new JSONParser().parse(request.getReader());
            if ((requestJSONObject == null) || !(requestJSONObject instanceof JSONObject)) {
                status = HttpServletResponse.SC_BAD_REQUEST;
            }
        } catch (ParseException pe) {
            status = HttpServletResponse.SC_BAD_REQUEST;
        }
        if (status == 0) {
            ColibriConferenceIQ requestConferenceIQ = JSONDeserializer
                    .deserializeConference((JSONObject) requestJSONObject);

            if ((requestConferenceIQ == null) || (requestConferenceIQ.getID() != null)) {
                status = HttpServletResponse.SC_BAD_REQUEST;
            } else {
                ColibriConferenceIQ responseConferenceIQ = null;

                try {
                    IQ responseIQ = videobridge.handleColibriConferenceIQ(requestConferenceIQ,
                            Videobridge.OPTION_ALLOW_NO_FOCUS);

                    if (responseIQ instanceof ColibriConferenceIQ) {
                        responseConferenceIQ = (ColibriConferenceIQ) responseIQ;
                    } else {
                        status = getHttpStatusCodeForResultIq(responseIQ);
                    }
                } catch (Exception e) {
                    status = HttpServletResponse.SC_INTERNAL_SERVER_ERROR;
                }
                if (status == 0 && responseConferenceIQ != null) {
                    JSONObject responseJSONObject = JSONSerializer.serializeConference(responseConferenceIQ);

                    if (responseJSONObject == null)
                        responseJSONObject = new JSONObject();

                    response.setStatus(HttpServletResponse.SC_OK);
                    responseJSONObject.writeJSONString(response.getWriter());
                }
            }
        }
        if (status != 0)
            response.setStatus(status);
    } else {
        response.setStatus(HttpServletResponse.SC_NOT_ACCEPTABLE);
    }
}

From source file:org.osaf.cosmo.mc.MorseCodeServlet.java

/**
 * Handles publish requests./*from  w  w w .  j  a  v  a2  s . c  o  m*/
 */
protected void doPut(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
    if (log.isDebugEnabled())
        log.debug("handling PUT for " + req.getPathInfo());

    CollectionPath cp = CollectionPath.parse(req.getPathInfo());
    if (cp != null) {
        String parentUid = req.getParameter(PARAM_PARENT_UID);
        if (StringUtils.isEmpty(parentUid))
            parentUid = null;
        EimmlStreamReader reader = null;
        try {
            if (!checkWritePreconditions(req, resp))
                return;

            reader = new EimmlStreamReader(req.getReader());
            if (!reader.getCollectionUuid().equals(cp.getUid())) {

                String msg = "EIMML collection uid " + reader.getCollectionUuid()
                        + " does not match target collection uid " + cp.getUid();
                handleGeneralException(new BadRequestException(msg), resp);
                return;
            }

            EimmlStreamReaderIterator i = new EimmlStreamReaderIterator(reader);
            PubRecords records = new PubRecords(i, reader.getCollectionName(), reader.getCollectionHue());

            Set<TicketType> ticketTypes = null;
            try {
                ticketTypes = parseTicketTypes(req);
            } catch (IllegalArgumentException e) {
                handleGeneralException(new BadRequestException(e), resp);
                return;
            }

            PubCollection pubCollection = controller.publishCollection(cp.getUid(), parentUid, records,
                    ticketTypes);

            resp.setStatus(HttpServletResponse.SC_CREATED);
            resp.addHeader(HEADER_SYNC_TOKEN, pubCollection.getToken().serialize());
            for (Ticket ticket : pubCollection.getCollection().getTickets())
                resp.addHeader(HEADER_TICKET, formatTicket(ticket));

            return;
        } catch (CosmoSecurityException e) {
            if (e instanceof ItemSecurityException) {
                InsufficientPrivilegesException ipe = new InsufficientPrivilegesException(
                        (ItemSecurityException) e);
                handleGeneralException(ipe, resp);
            } else {
                resp.sendError(HttpServletResponse.SC_FORBIDDEN, e.getMessage());
            }
            return;
        } catch (IllegalArgumentException e) {
            String msg = "Parent uid must be specified when authenticated principal is not a user";
            handleGeneralException(new BadRequestException(msg), resp);
            return;
        } catch (EimmlStreamException e) {
            Throwable cause = e.getCause();
            String msg = "Unable to read EIM stream: " + e.getMessage();
            msg += cause != null ? ": " + cause.getMessage() : "";
            handleGeneralException(new BadRequestException(msg, e), resp);
            return;
        } catch (UidInUseException e) {
            handleGeneralException(new MorseCodeException(HttpServletResponse.SC_CONFLICT, e), resp);
            return;
        } catch (ServerBusyException e) {
            log.debug("received ServerBusyException during PUT");
            resp.setIntHeader(HEADER_RETRY_AFTER, 5);
            resp.sendError(HttpServletResponse.SC_SERVICE_UNAVAILABLE, "The server was busy, try again later");
            return;
        } catch (MorseCodeException e) {
            Throwable root = e.getCause();
            if (root != null && root instanceof EimmlStreamException) {
                String msg = "Unable to read EIM stream: " + root.getMessage();
                handleGeneralException(new BadRequestException(msg, e), resp);
                return;
            }
            if (root != null && root instanceof EimSchemaException) {
                String msg = "Unable to process EIM records: " + root.getMessage();
                handleGeneralException(new BadRequestException(msg, e), resp);
                return;
            }
            handleGeneralException(e, resp);
            return;
        } catch (RuntimeException e) {
            handleGeneralException(new MorseCodeException(e), resp);
            return;
        } finally {
            if (reader != null)
                reader.close();
        }
    }
    resp.setStatus(HttpServletResponse.SC_NOT_FOUND);
}