Example usage for org.apache.commons.fileupload FileItemIterator next

List of usage examples for org.apache.commons.fileupload FileItemIterator next

Introduction

In this page you can find the example usage for org.apache.commons.fileupload FileItemIterator next.

Prototype

FileItemStream next() throws FileUploadException, IOException;

Source Link

Document

Returns the next available FileItemStream .

Usage

From source file:org.apache.juneau.examples.rest.TempDirResource.java

/**
 * [POST /upload] - Upload a file as a multipart form post.
 * Shows how to use the Apache Commons ServletFileUpload class for handling multi-part form posts.
 *//*w  w w  . j  av  a 2  s . c o  m*/
@RestMethod(name = "POST", path = "/upload", matchers = TempDirResource.MultipartFormDataMatcher.class)
public Redirect uploadFile(RestRequest req) throws Exception {
    ServletFileUpload upload = new ServletFileUpload();
    FileItemIterator iter = upload.getItemIterator(req);
    while (iter.hasNext()) {
        FileItemStream item = iter.next();
        if (item.getFieldName().equals("contents")) { //$NON-NLS-1$
            File f = new File(getRootDir(), item.getName());
            IOPipe.create(item.openStream(), new FileOutputStream(f)).closeOut().run();
        }
    }
    return new Redirect(); // Redirect to the servlet root.
}

From source file:org.apache.myfaces.webapp.filter.portlet.PortletChacheFileSizeErrorsFileUpload.java

/**
 * Similar to {@link ServletFileUpload#parseRequest(RequestContext)} but will
 * catch and swallow FileSizeLimitExceededExceptions in order to return as
 * many usable items as possible.//from  w w  w  . j  av a 2s  . c  o  m
 * 
 * @param fileUpload
 * @return List of {@link FileItem} excluding any that exceed max size.  
 * @throws FileUploadException
 */
public List parseRequestCatchingFileSizeErrors(ActionRequest request, FileUpload fileUpload)
        throws FileUploadException {
    try {
        List items = new ArrayList();

        // The line below throws a SizeLimitExceededException (wrapped by a
        // FileUploadIOException) if the request is longer than the max size
        // allowed by fileupload requests (FileUpload.getSizeMax)
        // But note that if the request does not send proper headers this check
        // just will not do anything and we still have to check it again.
        FileItemIterator iter = fileUpload.getItemIterator(new PortletRequestContext(request));

        FileItemFactory fac = fileUpload.getFileItemFactory();
        if (fac == null) {
            throw new NullPointerException("No FileItemFactory has been set.");
        }

        long maxFileSize = this.getFileSizeMax();
        long maxSize = this.getSizeMax();
        boolean checkMaxSize = false;

        if (maxFileSize == -1L) {
            //The max allowed file size should be approximate to the maxSize
            maxFileSize = maxSize;
        }
        if (maxSize != -1L) {
            checkMaxSize = true;
        }

        while (iter.hasNext()) {
            final FileItemStream item = iter.next();
            FileItem fileItem = fac.createItem(item.getFieldName(), item.getContentType(), item.isFormField(),
                    item.getName());

            long allowedLimit = 0L;
            try {
                if (maxFileSize != -1L || checkMaxSize) {
                    if (checkMaxSize) {
                        allowedLimit = maxSize > maxFileSize ? maxFileSize : maxSize;
                    } else {
                        //Just put the limit
                        allowedLimit = maxFileSize;
                    }

                    long contentLength = getContentLength(item.getHeaders());

                    //If we have a content length in the header we can use it
                    if (contentLength != -1L && contentLength > allowedLimit) {
                        throw new FileUploadIOException(new FileSizeLimitExceededException(
                                "The field " + item.getFieldName() + " exceeds its maximum permitted "
                                        + " size of " + allowedLimit + " characters.",
                                contentLength, allowedLimit));
                    }

                    //Otherwise we must limit the input as it arrives (NOTE: we cannot rely
                    //on commons upload to throw this exception as it will close the 
                    //underlying stream
                    final InputStream itemInputStream = item.openStream();

                    InputStream limitedInputStream = new LimitedInputStream(itemInputStream, allowedLimit) {
                        protected void raiseError(long pSizeMax, long pCount) throws IOException {
                            throw new FileUploadIOException(new FileSizeLimitExceededException(
                                    "The field " + item.getFieldName() + " exceeds its maximum permitted "
                                            + " size of " + pSizeMax + " characters.",
                                    pCount, pSizeMax));
                        }
                    };

                    //Copy from the limited stream
                    long bytesCopied = Streams.copy(limitedInputStream, fileItem.getOutputStream(), true);

                    // Decrement the bytesCopied values from maxSize, so the next file copied 
                    // takes into account this value when allowedLimit var is calculated
                    // Note the invariant before the line is maxSize >= bytesCopied, since if this
                    // is not true a FileUploadIOException is thrown first.
                    maxSize -= bytesCopied;
                } else {
                    //We can just copy the data
                    Streams.copy(item.openStream(), fileItem.getOutputStream(), true);
                }
            } catch (FileUploadIOException e) {
                try {
                    throw (FileUploadException) e.getCause();
                } catch (FileUploadBase.FileSizeLimitExceededException se) {
                    request.setAttribute("org.apache.myfaces.custom.fileupload.exception",
                            "fileSizeLimitExceeded");
                    String fieldName = fileItem.getFieldName();
                    request.setAttribute("org.apache.myfaces.custom.fileupload." + fieldName + ".maxSize",
                            new Integer((int) allowedLimit));
                }
            } catch (IOException e) {
                throw new IOFileUploadException("Processing of " + FileUploadBase.MULTIPART_FORM_DATA
                        + " request failed. " + e.getMessage(), e);
            }
            if (fileItem instanceof FileItemHeadersSupport) {
                final FileItemHeaders fih = item.getHeaders();
                ((FileItemHeadersSupport) fileItem).setHeaders(fih);
            }
            if (fileItem != null) {
                items.add(fileItem);
            }
        }
        return items;
    } catch (FileUploadIOException e) {
        throw (FileUploadException) e.getCause();
    } catch (IOException e) {
        throw new FileUploadException(e.getMessage(), e);
    }
}

From source file:org.apache.myfaces.webapp.filter.servlet.ServletChacheFileSizeErrorsFileUpload.java

/**
 * Similar to {@link ServletFileUpload#parseRequest(RequestContext)} but will
 * catch and swallow FileSizeLimitExceededExceptions in order to return as
 * many usable items as possible./*  w  w  w  . java2 s  .  co m*/
 * 
 * @param fileUpload
 * @return List of {@link FileItem} excluding any that exceed max size.  
 * @throws FileUploadException
 */
public List parseRequestCatchingFileSizeErrors(HttpServletRequest request, FileUpload fileUpload)
        throws FileUploadException {
    try {
        List items = new ArrayList();

        // The line below throws a SizeLimitExceededException (wrapped by a
        // FileUploadIOException) if the request is longer than the max size
        // allowed by fileupload requests (FileUpload.getSizeMax)
        // But note that if the request does not send proper headers this check
        // just will not do anything and we still have to check it again.
        FileItemIterator iter = fileUpload.getItemIterator(new ServletRequestContext(request));

        FileItemFactory fac = fileUpload.getFileItemFactory();
        if (fac == null) {
            throw new NullPointerException("No FileItemFactory has been set.");
        }

        long maxFileSize = this.getFileSizeMax();
        long maxSize = this.getSizeMax();
        boolean checkMaxSize = false;

        if (maxFileSize == -1L) {
            //The max allowed file size should be approximate to the maxSize
            maxFileSize = maxSize;
        }
        if (maxSize != -1L) {
            checkMaxSize = true;
        }

        while (iter.hasNext()) {
            final FileItemStream item = iter.next();
            FileItem fileItem = fac.createItem(item.getFieldName(), item.getContentType(), item.isFormField(),
                    item.getName());

            long allowedLimit = 0L;
            try {
                if (maxFileSize != -1L || checkMaxSize) {
                    if (checkMaxSize) {
                        allowedLimit = maxSize > maxFileSize ? maxFileSize : maxSize;
                    } else {
                        //Just put the limit
                        allowedLimit = maxFileSize;
                    }

                    long contentLength = getContentLength(item.getHeaders());

                    //If we have a content length in the header we can use it
                    if (contentLength != -1L && contentLength > allowedLimit) {
                        throw new FileUploadIOException(new FileSizeLimitExceededException(
                                "The field " + item.getFieldName() + " exceeds its maximum permitted "
                                        + " size of " + allowedLimit + " characters.",
                                contentLength, allowedLimit));
                    }

                    //Otherwise we must limit the input as it arrives (NOTE: we cannot rely
                    //on commons upload to throw this exception as it will close the 
                    //underlying stream
                    final InputStream itemInputStream = item.openStream();

                    InputStream limitedInputStream = new LimitedInputStream(itemInputStream, allowedLimit) {
                        protected void raiseError(long pSizeMax, long pCount) throws IOException {
                            throw new FileUploadIOException(new FileSizeLimitExceededException(
                                    "The field " + item.getFieldName() + " exceeds its maximum permitted "
                                            + " size of " + pSizeMax + " characters.",
                                    pCount, pSizeMax));
                        }
                    };

                    //Copy from the limited stream
                    long bytesCopied = Streams.copy(limitedInputStream, fileItem.getOutputStream(), true);

                    // Decrement the bytesCopied values from maxSize, so the next file copied 
                    // takes into account this value when allowedLimit var is calculated
                    // Note the invariant before the line is maxSize >= bytesCopied, since if this
                    // is not true a FileUploadIOException is thrown first.
                    maxSize -= bytesCopied;
                } else {
                    //We can just copy the data
                    Streams.copy(item.openStream(), fileItem.getOutputStream(), true);
                }
            } catch (FileUploadIOException e) {
                try {
                    throw (FileUploadException) e.getCause();
                } catch (FileUploadBase.FileSizeLimitExceededException se) {
                    request.setAttribute("org.apache.myfaces.custom.fileupload.exception",
                            "fileSizeLimitExceeded");
                    String fieldName = fileItem.getFieldName();
                    request.setAttribute("org.apache.myfaces.custom.fileupload." + fieldName + ".maxSize",
                            new Integer((int) allowedLimit));
                }
            } catch (IOException e) {
                throw new IOFileUploadException("Processing of " + FileUploadBase.MULTIPART_FORM_DATA
                        + " request failed. " + e.getMessage(), e);
            }
            if (fileItem instanceof FileItemHeadersSupport) {
                final FileItemHeaders fih = item.getHeaders();
                ((FileItemHeadersSupport) fileItem).setHeaders(fih);
            }
            if (fileItem != null) {
                items.add(fileItem);
            }
        }
        return items;
    } catch (FileUploadIOException e) {
        throw (FileUploadException) e.getCause();
    } catch (IOException e) {
        throw new FileUploadException(e.getMessage(), e);
    }
}

From source file:org.apache.olio.webapp.fileupload.FileUploadHandler.java

/**
 * Handles the initial fields up to the first upload field. This will
 * allow creating the database entry and obtaining the auto-generated
 * ids./*from w ww  .  j  a va 2s . com*/
 * @return A hash table with the initial field values
 */
public Hashtable<String, String> getInitialParams(HttpServletRequest request, HttpServletResponse response) {

    // print out header for
    Enumeration enumx = request.getHeaderNames();
    String key = "";
    String listx = "";
    while (enumx.hasMoreElements()) {
        key = (String) enumx.nextElement();
        listx += "\n" + key + ":" + request.getHeader(key);
    }
    logger.fine("Incoming Header Item:" + listx);
    // enable progress bar (this managed bean that is in the session could be comp specific, but I can't create the component specific
    // session object until I have the components name.  For now use static key through backing bean).
    // Use session to allow the monitoring of the fileupload based
    HttpSession session = request.getSession();

    FileUploadStatus status = new FileUploadStatus();
    session.setAttribute(FileUploadUtil.FILE_UPLOAD_STATUS, status);
    setFileUploadStatus(status);

    // Create hashtable to hold uploaded data so it can be used by custom post extension
    Hashtable<String, String> htUpload = new Hashtable<String, String>();
    // set to set hashtable for final retrieval
    status.setUploadItems(htUpload);

    // get size of upload and set status
    long totalSizeOfUpload = request.getContentLength();
    status.setTotalUploadSize(totalSizeOfUpload);

    // Check that we have a proper file upload request
    boolean isMultipart = ServletFileUpload.isMultipartContent(request);
    if (isMultipart) {

        // Streaming API typically provide better performance for file uploads.
        // Create a new file upload handler
        ServletFileUpload upload = new ServletFileUpload();

        try {
            // Now we should have the componentsName and upload directory to setup remaining upload of file items
            String compName = htUpload.get(FileUploadUtil.COMPONENT_NAME);
            status.setName(compName);

            // Parse the request and return list of "FileItem" whle updating status
            FileItemIterator iter = upload.getItemIterator(request);

            status.setReadingComplete();

            while (iter.hasNext()) {
                item = iter.next();
                if (item.isFormField()) {
                    // handle a form item being uploaded
                    String itemName = item.getFieldName();

                    // process form(non-file) item200002
                    int size = formItemFound(item, htUpload);
                    updateSessionStatus(itemName, size);

                    logger.fine("Form field item:" + itemName);

                } else {
                    // At the first find of an uploaded file, stop.
                    // We need to insert our record first in order
                    // to find the id.
                    break;
                }
            }
            itemIter = iter;
        } catch (Exception e) {
            status.setUploadError(
                    "FileUpload didn't complete successfully.  Exception received:" + e.toString());
            logger.log(Level.SEVERE, "file.upload.exception", e);
        }
    }
    fileUploadStatus = status;
    requestParams = htUpload;
    return htUpload;
}

From source file:org.apache.struts.extras.SecureJakartaStreamMultiPartRequest.java

/**
 * Processes the upload.//  w  ww .j  a va 2 s .  co  m
 *
 * @param request
 * @param saveDir
 * @throws Exception
 */
private void processUpload(HttpServletRequest request, String saveDir) throws Exception {

    // Sanity check that the request is a multi-part/form-data request.
    if (ServletFileUpload.isMultipartContent(request)) {

        // Sanity check on request size.
        boolean requestSizePermitted = isRequestSizePermitted(request);

        // Interface with Commons FileUpload API
        // Using the Streaming API
        ServletFileUpload servletFileUpload = new ServletFileUpload();
        FileItemIterator i = servletFileUpload.getItemIterator(request);

        // Iterate the file items
        while (i.hasNext()) {
            try {
                FileItemStream itemStream = i.next();

                // If the file item stream is a form field, delegate to the
                // field item stream handler
                if (itemStream.isFormField()) {
                    processFileItemStreamAsFormField(itemStream);
                }

                // Delegate the file item stream for a file field to the
                // file item stream handler, but delegation is skipped
                // if the requestSizePermitted check failed based on the
                // complete content-size of the request.
                else {

                    // prevent processing file field item if request size not allowed.
                    // also warn user in the logs.
                    if (!requestSizePermitted) {
                        addFileSkippedError(itemStream.getName(), request);
                        LOG.warn("Skipped stream '#0', request maximum size (#1) exceeded.",
                                itemStream.getName(), maxSize);
                        continue;
                    }

                    processFileItemStreamAsFileField(itemStream, saveDir);
                }
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
    }
}

From source file:org.artags.server.web.TagUploadServlet.java

@Override
public void doPost(HttpServletRequest req, HttpServletResponse res) throws ServletException, IOException {
    try {//  w  ww.  j  a v  a 2 s .  c  o m
        // Create a new file upload handler
        ServletFileUpload upload = new ServletFileUpload();
        upload.setSizeMax(1000000);
        res.setContentType(Constants.CONTENT_TYPE_TEXT);
        PrintWriter out = res.getWriter();

        byte[] image = null;
        byte[] thumbnail = null;

        Tag tag = new Tag();
        TagImage tagImage = new TagImage();
        TagThumbnail tagThumbnail = new TagThumbnail();
        String contentType = null;
        boolean bLandscape = false;

        try {
            FileItemIterator iterator = upload.getItemIterator(req);
            while (iterator.hasNext()) {
                FileItemStream item = iterator.next();
                InputStream in = item.openStream();

                if (item.isFormField()) {
                    if (Constants.PARAMATER_NAME.equals(item.getFieldName())) {
                        String name = IOUtils.toString(in, "UTF-8");
                        tag.setName(name);
                    }
                    if (Constants.PARAMATER_LAT.equals(item.getFieldName())) {
                        tag.setLat(Double.parseDouble(IOUtils.toString(in)));
                    }
                    if (Constants.PARAMATER_LON.equals(item.getFieldName())) {
                        tag.setLon(Double.parseDouble(IOUtils.toString(in)));
                    }
                    if (Constants.PARAMATER_LANDSCAPE.equals(item.getFieldName())) {
                        bLandscape = IOUtils.toString(in).equals("on");
                    }
                } else {
                    String fieldName = item.getFieldName();
                    String fileName = item.getName();
                    contentType = item.getContentType();

                    try {
                        if (fieldName.equals("thumbnail")) {
                            thumbnail = IOUtils.toByteArray(in);
                        } else {
                            image = IOUtils.toByteArray(in);
                        }
                    } finally {
                        IOUtils.closeQuietly(in);
                    }

                }
            }
        } catch (SizeLimitExceededException e) {
            out.println("You exceeded the maximum size (" + e.getPermittedSize() + ") of the file ("
                    + e.getActualSize() + ")");
        }

        contentType = (contentType != null) ? contentType : "image/jpeg";

        if (bLandscape) {
            image = rotate(image);
            if (thumbnail != null) {
                thumbnail = rotate(thumbnail);
            }
        }
        tagImage.setImage(image);
        tagImage.setContentType(contentType);
        if (thumbnail != null) {
            tagThumbnail.setImage(thumbnail);
        } else {
            tagThumbnail.setImage(createThumbnail(image));
        }

        tagThumbnail.setContentType(contentType);

        TagImageDAO daoImage = new TagImageDAO();
        daoImage.create(tagImage);

        TagThumbnailDAO daoThumbnail = new TagThumbnailDAO();
        daoThumbnail.create(tagThumbnail);

        TagDAO dao = new TagDAO();
        tag.setKeyImage(tagImage.getKey());
        tag.setKeyThumbnail(tagThumbnail.getKey());
        tag.setDate(new Date().getTime());
        tag.setDateUpdate(new Date().getTime());
        Tag newTag = dao.create(tag);
        out.print("" + newTag.getKey().getId());
        out.close();
        CacheService.instance().invalidate();

    } catch (Exception ex) {

        throw new ServletException(ex);
    }
}

From source file:org.bimserver.servlets.UploadServlet.java

@Override
public void service(HttpServletRequest request, HttpServletResponse response)
        throws ServletException, IOException {
    if (request.getHeader("Origin") != null
            && !getBimServer().getServerSettingsCache().isHostAllowed(request.getHeader("Origin"))) {
        response.setStatus(403);/*  w  ww.  j av  a 2 s.  c om*/
        return;
    }
    response.setHeader("Access-Control-Allow-Origin", request.getHeader("Origin"));
    response.setHeader("Access-Control-Allow-Headers", "Content-Type");

    String token = (String) request.getSession().getAttribute("token");

    ObjectNode result = OBJECT_MAPPER.createObjectNode();
    response.setContentType("text/json");
    try {
        boolean isMultipart = ServletFileUpload.isMultipartContent(request);
        long poid = -1;
        String comment = null;
        if (isMultipart) {
            ServletFileUpload upload = new ServletFileUpload();
            FileItemIterator iter = upload.getItemIterator(request);
            InputStream in = null;
            String name = "";
            long deserializerOid = -1;
            boolean merge = false;
            boolean sync = false;
            String compression = null;
            String action = null;
            long topicId = -1;
            while (iter.hasNext()) {
                FileItemStream item = iter.next();
                if (item.isFormField()) {
                    if ("action".equals(item.getFieldName())) {
                        action = Streams.asString(item.openStream());
                    } else if ("token".equals(item.getFieldName())) {
                        token = Streams.asString(item.openStream());
                    } else if ("poid".equals(item.getFieldName())) {
                        poid = Long.parseLong(Streams.asString(item.openStream()));
                    } else if ("comment".equals(item.getFieldName())) {
                        comment = Streams.asString(item.openStream());
                    } else if ("topicId".equals(item.getFieldName())) {
                        topicId = Long.parseLong(Streams.asString(item.openStream()));
                    } else if ("sync".equals(item.getFieldName())) {
                        sync = Streams.asString(item.openStream()).equals("true");
                    } else if ("merge".equals(item.getFieldName())) {
                        merge = Streams.asString(item.openStream()).equals("true");
                    } else if ("compression".equals(item.getFieldName())) {
                        compression = Streams.asString(item.openStream());
                    } else if ("deserializerOid".equals(item.getFieldName())) {
                        deserializerOid = Long.parseLong(Streams.asString(item.openStream()));
                    }
                } else {
                    name = item.getName();
                    in = item.openStream();

                    if ("file".equals(action)) {
                        ServiceInterface serviceInterface = getBimServer().getServiceFactory()
                                .get(token, AccessMethod.INTERNAL).get(ServiceInterface.class);
                        SFile file = new SFile();
                        byte[] data = IOUtils.toByteArray(in);
                        file.setData(data);
                        file.setSize(data.length);
                        file.setFilename(name);
                        file.setMime(item.getContentType());
                        result.put("fileId", serviceInterface.uploadFile(file));
                    } else if (poid != -1) {
                        InputStream realStream = null;
                        if ("gzip".equals(compression)) {
                            realStream = new GZIPInputStream(in);
                        } else if ("deflate".equals(compression)) {
                            realStream = new InflaterInputStream(in);
                        } else {
                            realStream = in;
                        }
                        InputStreamDataSource inputStreamDataSource = new InputStreamDataSource(realStream);
                        inputStreamDataSource.setName(name);
                        DataHandler ifcFile = new DataHandler(inputStreamDataSource);

                        if (token != null) {
                            if (topicId == -1) {
                                ServiceInterface service = getBimServer().getServiceFactory()
                                        .get(token, AccessMethod.INTERNAL).get(ServiceInterface.class);
                                long newTopicId = service.checkin(poid, comment, deserializerOid, -1L, name,
                                        ifcFile, merge, sync);
                                result.put("topicId", newTopicId);
                            } else {
                                ServiceInterface service = getBimServer().getServiceFactory()
                                        .get(token, AccessMethod.INTERNAL).get(ServiceInterface.class);
                                long newTopicId = service.checkinInitiated(topicId, poid, comment,
                                        deserializerOid, -1L, name, ifcFile, merge, true);
                                result.put("topicId", newTopicId);
                            }
                        }
                    } else {
                        result.put("exception", "No poid");
                    }
                }
            }
        }
    } catch (Exception e) {
        LOGGER.error("", e);
        sendException(response, e);
        return;
    }
    response.getWriter().write(result.toString());
}

From source file:org.bonitasoft.engine.api.internal.servlet.ServletCall.java

/**
 * Default constructor./*from ww w  .  ja  v a2 s  .c  om*/
 * 
 * @param request
 *        The request made to access this servletCall.
 * @param response
 *        The response to return.
 * @throws IOException
 * @throws FileUploadException
 */
public ServletCall(final HttpServletRequest request, final HttpServletResponse response)
        throws FileUploadException, IOException {
    super();
    this.request = request;
    this.response = response;
    parameters = new HashMap<String, String>();
    binaryParameters = new ArrayList<byte[]>();
    if (ServletFileUpload.isMultipartContent(request)) {
        final ServletFileUpload upload = new ServletFileUpload();
        // Parse the request
        final FileItemIterator iter = upload.getItemIterator(request);
        while (iter.hasNext()) {
            try {
                final FileItemStream item = iter.next();
                final InputStream stream = item.openStream();
                String fieldName = item.getFieldName();
                if (fieldName.startsWith(BINARY_PARAMETER)) {
                    binaryParameters.add(IOUtil.getAllContentFrom(stream));
                } else {
                    String read = IOUtil.read(stream);
                    parameters.put(fieldName, read);
                }
                stream.close();
            } catch (final Exception t) {
                throw new IOException(t);
            }
        }
    } else {
        final Map<String, String[]> parameterMap = this.request.getParameterMap();
        final Set<Entry<String, String[]>> entrySet = parameterMap.entrySet();
        for (final Entry<String, String[]> entry : entrySet) {
            parameters.put(entry.getKey(), entry.getValue()[0]);
        }
    }
}

From source file:org.brutusin.rpc.http.RpcServlet.java

/**
 *
 * @param req//from w w w. j av a 2  s  .  c om
 * @return
 * @throws IOException
 */
private static Map<String, String[]> parseMultipartParameters(HttpServletRequest req) throws IOException {
    if (isMultipartContent(req)) {
        Map<String, String[]> multipartParameters = new HashMap();
        Map<String, List<String>> map = new HashMap();
        try {
            ServletFileUpload upload = new ServletFileUpload();
            FileItemIterator iter = upload.getItemIterator(req);
            req.setAttribute(REQ_ATT_MULTIPART_ITERATOR, iter);
            while (iter.hasNext()) {
                FileItemStream item = iter.next();
                if (!item.isFormField()) {
                    req.setAttribute(REQ_ATT_MULTIPART_CURRENT_ITEM, item);
                    break;
                }
                List<String> list = map.get(item.getFieldName());
                if (list == null) {
                    list = new ArrayList();
                    map.put(item.getFieldName(), list);
                }
                String encoding = req.getCharacterEncoding();
                if (encoding == null) {
                    encoding = "UTF-8";
                }
                list.add(Miscellaneous.toString(item.openStream(), encoding));
            }
        } catch (FileUploadException ex) {
            throw new RuntimeException(ex);
        }
        for (Map.Entry<String, List<String>> entrySet : map.entrySet()) {
            String key = entrySet.getKey();
            List<String> value = entrySet.getValue();
            multipartParameters.put(key, value.toArray(new String[value.size()]));
        }
        return multipartParameters;
    }
    return null;
}

From source file:org.brutusin.rpc.http.RpcServlet.java

/**
 *
 * @param req/*w  ww  . ja v  a 2s.  c o  m*/
 * @param rpcRequest
 * @param service
 * @return
 * @throws Exception
 */
private Map<String, InputStream> getStreams(HttpServletRequest req, RpcRequest rpcRequest, HttpAction service)
        throws Exception {
    if (!FileUploadBase.isMultipartContent(new ServletRequestContext(req))) {
        return null;
    }
    int streamsNumber = getInputStreamsNumber(rpcRequest, service);
    boolean isResponseStreamed = service.isBinaryResponse();
    FileItemIterator iter = (FileItemIterator) req.getAttribute(REQ_ATT_MULTIPART_ITERATOR);
    int count = 0;
    final Map<String, InputStream> map = new HashMap();
    final File tempDirectory;
    if (streamsNumber > 1 || streamsNumber == 1 && isResponseStreamed) {
        tempDirectory = createTempUploadDirectory();
        req.setAttribute(REQ_ATT_TEMPORARY_FOLDER, tempDirectory);
    } else {
        tempDirectory = null;
    }
    FileItemStream item = (FileItemStream) req.getAttribute(REQ_ATT_MULTIPART_CURRENT_ITEM);
    long availableLength = RpcConfig.getInstance().getMaxRequestSize();
    while (item != null) {
        count++;
        long maxLength = Math.min(availableLength, RpcConfig.getInstance().getMaxFileSize());
        if (count < streamsNumber || isResponseStreamed) { // if response is streamed all inputstreams have to be readed first
            File file = new File(tempDirectory, item.getFieldName());
            FileOutputStream fos = new FileOutputStream(file);
            try {
                Miscellaneous.pipeSynchronously(new LimitedLengthInputStream(item.openStream(), maxLength),
                        fos);
            } catch (MaxLengthExceededException ex) {
                if (maxLength == RpcConfig.getInstance().getMaxFileSize()) {
                    throw new MaxLengthExceededException(
                            "Upload part '" + item.getFieldName() + "' exceeds maximum length ("
                                    + RpcConfig.getInstance().getMaxFileSize() + " bytes)",
                            RpcConfig.getInstance().getMaxFileSize());
                } else {
                    throw new MaxLengthExceededException("Request exceeds maximum length ("
                            + RpcConfig.getInstance().getMaxRequestSize() + " bytes)",
                            RpcConfig.getInstance().getMaxRequestSize());
                }
            }
            map.put(item.getFieldName(), new MetaDataInputStream(new FileInputStream(file), item.getName(),
                    item.getContentType(), file.length(), null));
            availableLength -= file.length();
        } else if (count == streamsNumber) {
            map.put(item.getFieldName(),
                    new MetaDataInputStream(new LimitedLengthInputStream(item.openStream(), maxLength),
                            item.getName(), item.getContentType(), null, null));
            break;
        }
        req.setAttribute(REQ_ATT_MULTIPART_CURRENT_ITEM, item);
        if (iter.hasNext()) {
            item = iter.next();
        } else {
            item = null;
        }
    }
    if (count != streamsNumber) {
        throw new IllegalArgumentException("Invalid multipart request received. Number of uploaded files ("
                + count + ") does not match expected (" + streamsNumber + ")");
    }
    return map;
}