List of usage examples for org.apache.commons.fileupload FileItemStream getFieldName
String getFieldName();
From source file:org.codelabor.system.file.web.servlet.FileUploadStreamServlet.java
@Override protected void upload(HttpServletRequest request, HttpServletResponse response) throws Exception { WebApplicationContext ctx = WebApplicationContextUtils .getRequiredWebApplicationContext(this.getServletContext()); FileManager fileManager = (FileManager) ctx.getBean("fileManager"); boolean isMultipart = ServletFileUpload.isMultipartContent(request); Map<String, Object> paramMap = RequestUtils.getParameterMap(request); if (logger.isDebugEnabled()) { logger.debug(paramMap.toString()); }/* w w w . j a v a 2s . com*/ String mapId = (String) paramMap.get("mapId"); RepositoryType acceptedRepositoryType = repositoryType; String requestedRepositoryType = (String) paramMap.get("repositoryType"); if (StringUtils.isNotEmpty(requestedRepositoryType)) { acceptedRepositoryType = RepositoryType.valueOf(requestedRepositoryType); } if (isMultipart) { ServletFileUpload upload = new ServletFileUpload(); upload.setFileSizeMax(fileSizeMax); upload.setSizeMax(requestSizeMax); upload.setHeaderEncoding(characterEncoding); upload.setProgressListener(new FileUploadProgressListener()); try { FileItemIterator iter = upload.getItemIterator(request); while (iter.hasNext()) { FileItemStream fileItemSteam = iter.next(); if (logger.isDebugEnabled()) { logger.debug(fileItemSteam.toString()); } FileDTO fileDTO = null; if (fileItemSteam.isFormField()) { paramMap.put(fileItemSteam.getFieldName(), Streams.asString(fileItemSteam.openStream(), characterEncoding)); } else { if (fileItemSteam.getName() == null || fileItemSteam.getName().length() == 0) continue; // set DTO fileDTO = new FileDTO(); fileDTO.setMapId(mapId); fileDTO.setRealFilename(FilenameUtils.getName(fileItemSteam.getName())); if (acceptedRepositoryType == RepositoryType.FILE_SYSTEM) { fileDTO.setUniqueFilename(getUniqueFilename()); } fileDTO.setContentType(fileItemSteam.getContentType()); fileDTO.setRepositoryPath(realRepositoryPath); if (logger.isDebugEnabled()) { logger.debug(fileDTO.toString()); } UploadUtils.processFile(acceptedRepositoryType, fileItemSteam.openStream(), fileDTO); } if (fileDTO != null) fileManager.insertFile(fileDTO); } } catch (FileUploadException e) { e.printStackTrace(); logger.error(e.getMessage()); } catch (Exception e) { e.printStackTrace(); logger.error(e.getMessage()); } } else { paramMap = RequestUtils.getParameterMap(request); } try { processParameters(paramMap); } catch (Exception e) { e.printStackTrace(); logger.error(e.getMessage()); } dispatch(request, response, forwardPathUpload); }
From source file:org.collectionspace.chain.controller.WebUIRequest.java
private void initRequest(UIUmbrella umbrella, HttpServletRequest request, HttpServletResponse response, List<String> p) throws IOException, UIException { this.request = request; this.response = response; boolean isMultipart = ServletFileUpload.isMultipartContent(request); if (isMultipart) { // Create a new file upload handler ServletFileUpload upload = new ServletFileUpload(); // Parse the request FileItemIterator iter;/* www. j a v a2 s . c om*/ try { iter = upload.getItemIterator(request); while (iter.hasNext()) { FileItemStream item = iter.next(); String name = item.getFieldName(); //InputStream stream = item.openStream(); if (item.isFormField()) { // System.out.println("Form field " + name + " with value " // + Streams.asString(stream) + " detected."); } else { // System.out.println("File field " + name + " with file name " // + item.getName() + " detected."); // Process the input stream contentHeaders = item.getHeaders(); uploadName = item.getName(); ByteArrayOutputStream byteOut = new ByteArrayOutputStream(); if (item != null) { InputStream stream = item.openStream(); IOUtils.copy(stream, byteOut); new TeeInputStream(stream, byteOut); } bytebody = byteOut.toByteArray(); } } } catch (FileUploadException e) { // TODO Auto-generated catch block e.printStackTrace(); } } else { body = IOUtils.toString(request.getInputStream(), "UTF-8"); } this.ppath = p.toArray(new String[0]); if (!(umbrella instanceof WebUIUmbrella)) throw new UIException("Bad umbrella"); this.umbrella = (WebUIUmbrella) umbrella; session = calculateSessionId(); }
From source file:org.csi.yucca.storage.datamanagementapi.service.MetadataService.java
@POST @Path("/{tenant}") @Produces(MediaType.APPLICATION_JSON)//from ww w.j a v a2s . c om public String createMetadata(@PathParam("tenant") String tenant, @Context HttpServletRequest request) throws NumberFormatException, UnknownHostException { log.debug("[MetadataService::createMetadata] - START"); String datasetMetadata = null; String encoding = null; String formatType = null; String csvSeparator = null; boolean skipFirstRow = false; String csvData = null; String fileName = null; try { ServletFileUpload upload = new ServletFileUpload(); FileItemIterator iterator = upload.getItemIterator(request); while (iterator.hasNext()) { FileItemStream item = iterator.next(); if (IMPORT_BULKDATASET_METADATA_REQ_KEY.equals(item.getFieldName())) datasetMetadata = read(item.openStream()); else if (IMPORT_BULKDATASET_ENCODING_REQ_KEY.equals(item.getFieldName())) encoding = read(item.openStream()); else if (IMPORT_BULKDATASET_FORMAT_TYPE_REQ_KEY.equals(item.getFieldName())) formatType = read(item.openStream()); else if (IMPORT_BULKDATASET_CSV_SEP_REQ_KEY.equals(item.getFieldName())) csvSeparator = read(item.openStream()); else if (IMPORT_BULKDATASET_CSV_SKIP_FIRS_ROW_REQ_KEY.equals(item.getFieldName())) skipFirstRow = new Boolean(read(item.openStream())); else if (IMPORT_BULKDATASET_FILE_REQ_KEY.equals(item.getFieldName())) { csvData = readFileRows(item.openStream(), encoding); fileName = item.getName(); } } } catch (Exception e) { e.printStackTrace(); } log.debug("[MetadataService::createMetadata] - encoding: " + encoding + ", formatType: " + formatType + ", csvSeparator: " + csvSeparator); Metadata metadata = Metadata.fromJson(datasetMetadata); if (fileName != null) metadata.getInfo().addFilename(fileName); CreateDatasetResponse createDatasetResponse = new CreateDatasetResponse(); metadata.setDatasetVersion(1); if (metadata.getConfigData() == null) metadata.setConfigData(new ConfigData()); metadata.getConfigData().setType(Metadata.CONFIG_DATA_TYPE_DATASET); metadata.getConfigData().setSubtype(Metadata.CONFIG_DATA_SUBTYPE_BULK_DATASET); metadata.getConfigData().setCurrent(1); if (metadata.getInfo() == null) metadata.setInfo(new Info()); if (metadata.getInfo().getFields() != null) { for (Field field : metadata.getInfo().getFields()) { field.setFieldName(Util.cleanStringCamelCase(field.getFieldName())); if (field != null && field.getDataType() == null) field.setDataType("string"); } } metadata.getInfo().setRegistrationDate(new Date()); try { List<SDPBulkInsertException> checkFileToWriteErrors = null; MongoDBDataUpload dataUpload = new MongoDBDataUpload(); if (csvData != null) { checkFileToWriteErrors = dataUpload.checkFileToWrite(csvData, csvSeparator, metadata, skipFirstRow); } if (checkFileToWriteErrors != null && checkFileToWriteErrors.size() > 0) { for (SDPBulkInsertException error : checkFileToWriteErrors) { createDatasetResponse.addErrorMessage(new ErrorMessage(error.getErrorCode(), error.getErrorMessage(), error.getErrorDetail())); } } else { MongoClient mongo = MongoSingleton.getMongoClient(); String supportDb = Config.getInstance().getDbSupport(); String supportDatasetCollection = Config.getInstance().getCollectionSupportDataset(); MongoDBMetadataDAO metadataDAO = new MongoDBMetadataDAO(mongo, supportDb, supportDatasetCollection); String supportApiCollection = Config.getInstance().getCollectionSupportApi(); MongoDBApiDAO apiDAO = new MongoDBApiDAO(mongo, supportDb, supportApiCollection); BasicDBObject searchTenantQuery = new BasicDBObject(); searchTenantQuery.put("tenantCode", tenant); DBCollection tenantCollection = mongo.getDB(supportDb).getCollection("tenant"); DBObject tenantData = tenantCollection.find(searchTenantQuery).one(); Long idTenant = ((Number) tenantData.get("idTenant")).longValue(); int maxDatasetNum = ((Number) tenantData.get("maxDatasetNum")).intValue(); if (maxDatasetNum > 0) { int numCurrentDataset = metadataDAO.countAllMetadata(tenant, true); log.info("[MetadataService::createMetadata] - tenant=" + tenant + " maxDatasetNum=" + maxDatasetNum + " numCurrentDataset=" + numCurrentDataset); //TODO if (numCurrentDataset >= maxDatasetNum) throw new MaxDatasetNumException("too many dataset"); } metadata.getConfigData().setIdTenant(idTenant); // binary metadata: create a metadata record specific for attachment Metadata binaryMetadata = null; if (metadata.getInfo().getFields() != null) { for (Field field : metadata.getInfo().getFields()) { if (field.getDataType().equals("binary")) { binaryMetadata = Metadata.createBinaryMetadata(metadata); break; } } } if (binaryMetadata != null) { Metadata binaryMetadataCreated = metadataDAO.createMetadata(binaryMetadata, null); metadata.getInfo().setBinaryDatasetVersion(binaryMetadataCreated.getDatasetVersion()); metadata.getInfo().setBinaryIdDataset(binaryMetadataCreated.getIdDataset()); } List<Tenantsharing> lista = new ArrayList<Tenantsharing>(); if (metadata.getInfo().getTenantssharing() != null) { Set<String> tenantSet = new TreeSet<String>(); for (Tenantsharing tenantInList : metadata.getInfo().getTenantssharing().getTenantsharing()) { if (!tenantInList.getTenantCode().equals(metadata.getConfigData().getTenantCode()) && !tenantSet.contains(metadata.getConfigData().getTenantCode()) && tenantInList.getIsOwner() != 1) { lista.add(tenantInList); tenantSet.add(tenantInList.getTenantCode()); } } } Tenantsharing owner = new Tenantsharing(); owner.setIdTenant(metadata.getConfigData().getIdTenant()); owner.setIsOwner(1); owner.setTenantCode(metadata.getConfigData().getTenantCode()); owner.setTenantName(metadata.getConfigData().getTenantCode()); // owner.setTenantDescription(metadata.getConfigData().get); lista.add(owner); Tenantsharing arrayTenant[] = new Tenantsharing[lista.size()]; arrayTenant = lista.toArray(arrayTenant); if (metadata.getInfo().getTenantssharing() == null) { Tenantssharing tenantssharing = new Tenantssharing(); metadata.getInfo().setTenantssharing(tenantssharing); } metadata.getInfo().getTenantssharing().setTenantsharing(arrayTenant); // opendata if (!"public".equals(metadata.getInfo().getVisibility())) { metadata.setOpendata(null); } Metadata metadataCreated = metadataDAO.createMetadata(metadata, null); MyApi api = MyApi.createFromMetadataDataset(metadataCreated); api.getConfigData().setType(Metadata.CONFIG_DATA_TYPE_API); api.getConfigData().setSubtype(Metadata.CONFIG_DATA_SUBTYPE_API_MULTI_BULK); MyApi apiCreated = apiDAO.createApi(api); createDatasetResponse.setMetadata(metadataCreated); createDatasetResponse.setApi(apiCreated); /* * Create api in the store */ String apiName = ""; try { apiName = StoreService.createApiforBulk(metadata, false, datasetMetadata); } catch (Exception duplicate) { if (duplicate.getMessage().toLowerCase().contains("duplicate")) { try { apiName = StoreService.createApiforBulk(metadata, true, datasetMetadata); } catch (Exception e) { log.error( "[MetadataService::createMetadata] - ERROR to update API in Store for Bulk. Message: " + duplicate.getMessage()); } } else { log.error( "[MetadataService::createMetadata] - ERROR in create or update API in Store for Bulk. Message: " + duplicate.getMessage()); } } try { StoreService.publishStore("1.0", apiName, "admin"); Set<String> tenantSet = new TreeSet<String>(); if (metadata.getInfo().getTenantssharing() != null) { for (Tenantsharing tenantSh : metadata.getInfo().getTenantssharing().getTenantsharing()) { tenantSet.add(tenantSh.getTenantCode()); String appName = "userportal_" + tenantSh.getTenantCode(); StoreService.addSubscriptionForTenant(apiName, appName); } } if (!tenantSet.contains(metadata.getConfigData().getTenantCode())) { String appName = "userportal_" + metadata.getConfigData().getTenantCode(); StoreService.addSubscriptionForTenant(apiName, appName); } } catch (Exception e) { log.error("[MetadataService::createMetadata] - ERROR in publish Api in store - message: " + e.getMessage()); } if (csvData != null) { try { dataUpload.writeFileToMongo(mongo, "DB_" + tenant, "data", metadataCreated); } catch (Exception e) { log.error("[MetadataService::createMetadata] - writeFileToMongo ERROR: " + e.getMessage()); createDatasetResponse.addErrorMessage(new ErrorMessage(e)); e.printStackTrace(); } } } } catch (MaxDatasetNumException ex) { log.error("[MetadataService::createMetadata] - MaxDatasetNumException ERROR: ", ex); createDatasetResponse.addErrorMessage(new ErrorMessage(ex)); } return createDatasetResponse.toJson(); }
From source file:org.csi.yucca.storage.datamanagementapi.service.MetadataService.java
@POST @Path("/add/{tenant}/{datasetCode}") @Produces(MediaType.APPLICATION_JSON)//from w w w.jav a 2s. c o m public String addData(@PathParam("tenant") String tenant, @PathParam("datasetCode") String datasetCode, @Context HttpServletRequest request) throws NumberFormatException, UnknownHostException { log.debug("[MetadataService::addData] - START"); String encoding = null; String formatType = null; String csvSeparator = null; boolean skipFirstRow = false; String csvData = null; String fileName = null; try { ServletFileUpload upload = new ServletFileUpload(); FileItemIterator iterator = upload.getItemIterator(request); while (iterator.hasNext()) { FileItemStream item = iterator.next(); if (IMPORT_BULKDATASET_ENCODING_REQ_KEY.equals(item.getFieldName())) encoding = read(item.openStream()); else if (IMPORT_BULKDATASET_FORMAT_TYPE_REQ_KEY.equals(item.getFieldName())) formatType = read(item.openStream()); else if (IMPORT_BULKDATASET_CSV_SEP_REQ_KEY.equals(item.getFieldName())) csvSeparator = read(item.openStream()); else if (IMPORT_BULKDATASET_CSV_SKIP_FIRS_ROW_REQ_KEY.equals(item.getFieldName())) skipFirstRow = new Boolean(read(item.openStream())); else if (IMPORT_BULKDATASET_FILE_REQ_KEY.equals(item.getFieldName())) { csvData = readFileRows(item.openStream(), encoding); fileName = item.getName(); } } } catch (Exception e) { e.printStackTrace(); } log.debug("[MetadataService::addData] - encoding: " + encoding + ", formatType: " + formatType + ", csvSeparator: " + csvSeparator); MongoClient mongo = MongoSingleton.getMongoClient(); String supportDb = Config.getInstance().getDbSupport(); String supportDatasetCollection = Config.getInstance().getCollectionSupportDataset(); MongoDBMetadataDAO metadataDAO = new MongoDBMetadataDAO(mongo, supportDb, supportDatasetCollection); Metadata existingMetadata = metadataDAO.readCurrentMetadataByCode(datasetCode); existingMetadata.getInfo().addFilename(fileName); metadataDAO.updateMetadata(existingMetadata); UpdateDatasetResponse updateDatasetResponse = new UpdateDatasetResponse(); MongoDBDataUpload dataUpload = new MongoDBDataUpload(); List<SDPBulkInsertException> checkFileToWriteErrors = dataUpload.checkFileToWrite(csvData, csvSeparator, existingMetadata, skipFirstRow); if (checkFileToWriteErrors != null && checkFileToWriteErrors.size() > 0) { for (SDPBulkInsertException error : checkFileToWriteErrors) { updateDatasetResponse.addErrorMessage( new ErrorMessage(error.getErrorCode(), error.getErrorMessage(), error.getErrorDetail())); } } else { try { dataUpload.writeFileToMongo(mongo, "DB_" + tenant, "data", existingMetadata); } catch (Exception e) { log.error("[MetadataService::addData] - writeFileToMongo ERROR: " + e.getMessage()); updateDatasetResponse.addErrorMessage(new ErrorMessage(e)); e.printStackTrace(); } } return updateDatasetResponse.toJson(); }
From source file:org.cvit.cabig.dmr.cmef.server.SubmitJobResource.java
@Post("multi") public Representation submitJob(Representation formRep) { RestletFileUpload upload = new RestletFileUpload(); ComputationJob job = new ComputationJob(); boolean inIframe = false; try {//from w w w .j a va 2 s. c o m FileItemIterator items = upload.getItemIterator(formRep); List<ParameterValue> values = new ArrayList<ParameterValue>(); job.setParameterValues(values); State state = State.TITLE; while (items.hasNext()) { FileItemStream item = items.next(); InputStream itemStream = item.openStream(); switch (state) { case TITLE: job.setTitle(Streams.asString(itemStream)); state = State.DESC; break; case DESC: job.setDescription(Streams.asString(itemStream)); state = State.COMMENTS; break; case COMMENTS: job.setComment(Streams.asString(itemStream)); state = State.PARAMS; break; case PARAMS: if (item.getFieldName().equals("iframe")) { inIframe = Boolean.parseBoolean(Streams.asString(itemStream)); } else { Parameter param = new Parameter(); param.setName(parseParamName(item.getFieldName())); ParameterValue value = new ParameterValue(); if (item.isFormField()) { value.setValue(Streams.asString(itemStream)); } else { value.setValue(storeFile(item.getName(), itemStream).getSource()); } value.setJob(job); value.setParameter(param); param.setValue(value); values.add(value); } break; } } } catch (Exception e) { throw new ResourceException(Status.SERVER_ERROR_INTERNAL, "Exception processing submit job form: " + e.getMessage(), e); } job = addNewJob(job); ComputationJob startedJob = startJob(job); if (inIframe) { return new StringRepresentation(buildIframeResponse(job), MediaType.TEXT_HTML); } else { Reference jobRef = getNamespace().jobRef(entryName, modelName, getResolver().getJobName(startedJob.getId()), true); redirectSeeOther(jobRef); return new StringRepresentation("Job submitted, URL: " + jobRef.toString() + "."); } }
From source file:org.daxplore.presenter.server.servlets.AdminUploadServlet.java
@Override public void doPost(HttpServletRequest request, HttpServletResponse response) { try {/*from www .jav a2 s.c om*/ long time = System.nanoTime(); int statusCode = HttpServletResponse.SC_OK; response.setContentType("text/html; charset=UTF-8"); ServletFileUpload upload = new ServletFileUpload(); PersistenceManager pm = null; String prefix = null; try { FileItemIterator fileIterator = upload.getItemIterator(request); String fileName = ""; byte[] fileData = null; while (fileIterator.hasNext()) { FileItemStream item = fileIterator.next(); try (InputStream stream = item.openStream()) { if (item.isFormField()) { if (item.getFieldName().equals("prefix")) { prefix = Streams.asString(stream); } else { throw new BadRequestException("Form contains extra fields"); } } else { fileName = item.getName(); fileData = IOUtils.toByteArray(stream); } } } if (SharedResourceTools.isSyntacticallyValidPrefix(prefix)) { if (fileData != null && !fileName.equals("")) { pm = PMF.get().getPersistenceManager(); unzipAll(pm, prefix, fileData); } else { throw new BadRequestException("No file uploaded"); } } else { throw new BadRequestException("Request made with invalid prefix: '" + prefix + "'"); } logger.log(Level.INFO, "Unpacked new data for prefix '" + prefix + "' in " + ((System.nanoTime() - time) / 1000000000.0) + " seconds"); } catch (FileUploadException | IOException | BadRequestException e) { logger.log(Level.WARNING, e.getMessage(), e); statusCode = HttpServletResponse.SC_BAD_REQUEST; } catch (InternalServerException e) { logger.log(Level.SEVERE, e.getMessage(), e); statusCode = HttpServletResponse.SC_INTERNAL_SERVER_ERROR; } catch (DeadlineExceededException e) { logger.log(Level.SEVERE, "Timeout when uploading new data for prefix '" + prefix + "'", e); // the server is currently unavailable because it is overloaded (hopefully) statusCode = HttpServletResponse.SC_SERVICE_UNAVAILABLE; } finally { if (pm != null) { pm.close(); } } response.setStatus(statusCode); try (PrintWriter resWriter = response.getWriter()) { if (resWriter != null) { resWriter.write(Integer.toString(statusCode)); resWriter.close(); } } } catch (IOException | RuntimeException e) { logger.log(Level.SEVERE, "Unexpected exception: " + e.getMessage(), e); response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); } }
From source file:org.duracloud.duradmin.spaces.controller.ContentItemUploadController.java
@RequestMapping(value = "/spaces/content/upload", method = RequestMethod.POST) public ModelAndView handleRequest(HttpServletRequest request, HttpServletResponse response) throws Exception { try {//from w ww . j av a 2 s. c o m log.debug("handling request..."); ServletFileUpload upload = new ServletFileUpload(); FileItemIterator iter = upload.getItemIterator(request); String spaceId = null; String storeId = null; String contentId = null; List<ContentItem> results = new ArrayList<ContentItem>(); while (iter.hasNext()) { FileItemStream item = iter.next(); if (item.isFormField()) { String value = Streams.asString(item.openStream(), "UTF-8"); if (item.getFieldName().equals("spaceId")) { log.debug("setting spaceId: {}", value); spaceId = value; } else if (item.getFieldName().equals("storeId")) { storeId = value; } else if (item.getFieldName().equals("contentId")) { contentId = value; } } else { log.debug("setting fileStream: {}", item); if (StringUtils.isBlank(spaceId)) { throw new IllegalArgumentException("space id required."); } ContentItem ci = new ContentItem(); if (StringUtils.isBlank(contentId)) { contentId = item.getName(); } ci.setContentId(contentId); ci.setSpaceId(spaceId); ci.setStoreId(storeId); ci.setContentMimetype(item.getContentType()); ContentStore contentStore = contentStoreManager.getContentStore(ci.getStoreId()); ContentItemUploadTask task = new ContentItemUploadTask(ci, contentStore, item.openStream(), request.getUserPrincipal().getName()); task.execute(); ContentItem result = new ContentItem(); Authentication auth = (Authentication) SecurityContextHolder.getContext().getAuthentication(); SpaceUtil.populateContentItem(ContentItemController.getBaseURL(request), result, ci.getSpaceId(), ci.getContentId(), contentStore, auth); results.add(result); contentId = null; } } return new ModelAndView("javascriptJsonView", "results", results); } catch (Exception ex) { ex.printStackTrace(); throw ex; } }
From source file:org.eclipse.rdf4j.workbench.util.WorkbenchRequest.java
private Map<String, String> getMultipartParameterMap() throws RepositoryException, IOException, FileUploadException { Map<String, String> parameters = new HashMap<String, String>(); ServletFileUpload upload = new ServletFileUpload(); FileItemIterator iter = upload.getItemIterator(this); while (iter.hasNext()) { FileItemStream item = iter.next(); String name = item.getFieldName(); if ("content".equals(name)) { content = item.openStream(); contentFileName = item.getName(); break; } else {/* w ww. ja v a 2 s . c om*/ parameters.put(name, firstLine(item)); } } return parameters; }
From source file:org.eclipse.scout.rt.ui.html.json.UploadRequestHandler.java
protected void readUploadData(HttpServletRequest httpReq, long maxSize, Map<String, String> uploadProperties, List<BinaryResource> uploadResources) throws FileUploadException, IOException { ServletFileUpload upload = new ServletFileUpload(); upload.setHeaderEncoding(StandardCharsets.UTF_8.name()); upload.setSizeMax(maxSize);//from www . j a va 2 s . c om for (FileItemIterator it = upload.getItemIterator(httpReq); it.hasNext();) { FileItemStream item = it.next(); String name = item.getFieldName(); InputStream stream = item.openStream(); if (item.isFormField()) { // Handle non-file fields (interpreted as properties) uploadProperties.put(name, Streams.asString(stream, StandardCharsets.UTF_8.name())); } else { // Handle files String filename = item.getName(); if (StringUtility.hasText(filename)) { String[] parts = StringUtility.split(filename, "[/\\\\]"); filename = parts[parts.length - 1]; } String contentType = item.getContentType(); byte[] content = IOUtility.getContent(stream); // Info: we cannot set the charset property for uploaded files here, because we simply don't know it. // the only thing we could do is to guess the charset (encoding) by reading the byte contents of // uploaded text files (for binary file types the encoding is not relevant). However: currently we // do not set the charset at all. uploadResources.add(new BinaryResource(filename, contentType, content)); } } }
From source file:org.ejbca.ui.web.HttpUpload.java
/** * Creates a new upload state and receives all file and parameter data. * This constructor can only be called once per request. * /* w w w .j a v a2 s .c o m*/ * Use getParameterMap() and getFileMap() on the new object to access the data. * * @param request The servlet request object. * @param fileFields The names of the file fields to receive uploaded data from. * @param maxbytes Maximum file size. * @throws IOException if there are network problems, etc. * @throws FileUploadException if the request is invalid. */ @SuppressWarnings("unchecked") // Needed in some environments, and detected as unnecessary in others. Do not remove! public HttpUpload(HttpServletRequest request, String[] fileFields, int maxbytes) throws IOException, FileUploadException { if (ServletFileUpload.isMultipartContent(request)) { final Map<String, ArrayList<String>> paramTemp = new HashMap<String, ArrayList<String>>(); fileMap = new HashMap<String, byte[]>(); final ServletFileUpload upload = new ServletFileUpload(); final FileItemIterator iter = upload.getItemIterator(request); while (iter.hasNext()) { final FileItemStream item = iter.next(); final String name = item.getFieldName(); if (item.isFormField()) { ArrayList<String> values = paramTemp.get(name); if (values == null) { values = new ArrayList<String>(); paramTemp.put(name, values); } values.add(Streams.asString(item.openStream(), request.getCharacterEncoding())); } else if (ArrayUtils.contains(fileFields, name)) { byte[] data = getFileBytes(item, maxbytes); if (data != null && data.length > 0) { fileMap.put(name, data); } } } // Convert to String,String[] map parameterMap = new ParameterMap(); for (Entry<String, ArrayList<String>> entry : paramTemp.entrySet()) { final ArrayList<String> values = entry.getValue(); final String[] valuesArray = new String[values.size()]; parameterMap.put(entry.getKey(), values.toArray(valuesArray)); } } else { parameterMap = new ParameterMap(request.getParameterMap()); fileMap = new HashMap<String, byte[]>(); } }