List of usage examples for org.apache.commons.fileupload FileItemStream getName
String getName();
From source file:org.bimserver.servlets.UploadServlet.java
@Override public void service(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { if (request.getHeader("Origin") != null && !getBimServer().getServerSettingsCache().isHostAllowed(request.getHeader("Origin"))) { response.setStatus(403);/* ww w .ja v a 2 s. co m*/ return; } response.setHeader("Access-Control-Allow-Origin", request.getHeader("Origin")); response.setHeader("Access-Control-Allow-Headers", "Content-Type"); String token = (String) request.getSession().getAttribute("token"); ObjectNode result = OBJECT_MAPPER.createObjectNode(); response.setContentType("text/json"); try { boolean isMultipart = ServletFileUpload.isMultipartContent(request); long poid = -1; String comment = null; if (isMultipart) { ServletFileUpload upload = new ServletFileUpload(); FileItemIterator iter = upload.getItemIterator(request); InputStream in = null; String name = ""; long deserializerOid = -1; boolean merge = false; boolean sync = false; String compression = null; String action = null; long topicId = -1; while (iter.hasNext()) { FileItemStream item = iter.next(); if (item.isFormField()) { if ("action".equals(item.getFieldName())) { action = Streams.asString(item.openStream()); } else if ("token".equals(item.getFieldName())) { token = Streams.asString(item.openStream()); } else if ("poid".equals(item.getFieldName())) { poid = Long.parseLong(Streams.asString(item.openStream())); } else if ("comment".equals(item.getFieldName())) { comment = Streams.asString(item.openStream()); } else if ("topicId".equals(item.getFieldName())) { topicId = Long.parseLong(Streams.asString(item.openStream())); } else if ("sync".equals(item.getFieldName())) { sync = Streams.asString(item.openStream()).equals("true"); } else if ("merge".equals(item.getFieldName())) { merge = Streams.asString(item.openStream()).equals("true"); } else if ("compression".equals(item.getFieldName())) { compression = Streams.asString(item.openStream()); } else if ("deserializerOid".equals(item.getFieldName())) { deserializerOid = Long.parseLong(Streams.asString(item.openStream())); } } else { name = item.getName(); in = item.openStream(); if ("file".equals(action)) { ServiceInterface serviceInterface = getBimServer().getServiceFactory() .get(token, AccessMethod.INTERNAL).get(ServiceInterface.class); SFile file = new SFile(); byte[] data = IOUtils.toByteArray(in); file.setData(data); file.setSize(data.length); file.setFilename(name); file.setMime(item.getContentType()); result.put("fileId", serviceInterface.uploadFile(file)); } else if (poid != -1) { InputStream realStream = null; if ("gzip".equals(compression)) { realStream = new GZIPInputStream(in); } else if ("deflate".equals(compression)) { realStream = new InflaterInputStream(in); } else { realStream = in; } InputStreamDataSource inputStreamDataSource = new InputStreamDataSource(realStream); inputStreamDataSource.setName(name); DataHandler ifcFile = new DataHandler(inputStreamDataSource); if (token != null) { if (topicId == -1) { ServiceInterface service = getBimServer().getServiceFactory() .get(token, AccessMethod.INTERNAL).get(ServiceInterface.class); long newTopicId = service.checkin(poid, comment, deserializerOid, -1L, name, ifcFile, merge, sync); result.put("topicId", newTopicId); } else { ServiceInterface service = getBimServer().getServiceFactory() .get(token, AccessMethod.INTERNAL).get(ServiceInterface.class); long newTopicId = service.checkinInitiated(topicId, poid, comment, deserializerOid, -1L, name, ifcFile, merge, true); result.put("topicId", newTopicId); } } } else { result.put("exception", "No poid"); } } } } } catch (Exception e) { LOGGER.error("", e); sendException(response, e); return; } response.getWriter().write(result.toString()); }
From source file:org.brutusin.rpc.http.RpcServlet.java
/** * * @param req//from www.jav a 2s . c o m * @param rpcRequest * @param service * @return * @throws Exception */ private Map<String, InputStream> getStreams(HttpServletRequest req, RpcRequest rpcRequest, HttpAction service) throws Exception { if (!FileUploadBase.isMultipartContent(new ServletRequestContext(req))) { return null; } int streamsNumber = getInputStreamsNumber(rpcRequest, service); boolean isResponseStreamed = service.isBinaryResponse(); FileItemIterator iter = (FileItemIterator) req.getAttribute(REQ_ATT_MULTIPART_ITERATOR); int count = 0; final Map<String, InputStream> map = new HashMap(); final File tempDirectory; if (streamsNumber > 1 || streamsNumber == 1 && isResponseStreamed) { tempDirectory = createTempUploadDirectory(); req.setAttribute(REQ_ATT_TEMPORARY_FOLDER, tempDirectory); } else { tempDirectory = null; } FileItemStream item = (FileItemStream) req.getAttribute(REQ_ATT_MULTIPART_CURRENT_ITEM); long availableLength = RpcConfig.getInstance().getMaxRequestSize(); while (item != null) { count++; long maxLength = Math.min(availableLength, RpcConfig.getInstance().getMaxFileSize()); if (count < streamsNumber || isResponseStreamed) { // if response is streamed all inputstreams have to be readed first File file = new File(tempDirectory, item.getFieldName()); FileOutputStream fos = new FileOutputStream(file); try { Miscellaneous.pipeSynchronously(new LimitedLengthInputStream(item.openStream(), maxLength), fos); } catch (MaxLengthExceededException ex) { if (maxLength == RpcConfig.getInstance().getMaxFileSize()) { throw new MaxLengthExceededException( "Upload part '" + item.getFieldName() + "' exceeds maximum length (" + RpcConfig.getInstance().getMaxFileSize() + " bytes)", RpcConfig.getInstance().getMaxFileSize()); } else { throw new MaxLengthExceededException("Request exceeds maximum length (" + RpcConfig.getInstance().getMaxRequestSize() + " bytes)", RpcConfig.getInstance().getMaxRequestSize()); } } map.put(item.getFieldName(), new MetaDataInputStream(new FileInputStream(file), item.getName(), item.getContentType(), file.length(), null)); availableLength -= file.length(); } else if (count == streamsNumber) { map.put(item.getFieldName(), new MetaDataInputStream(new LimitedLengthInputStream(item.openStream(), maxLength), item.getName(), item.getContentType(), null, null)); break; } req.setAttribute(REQ_ATT_MULTIPART_CURRENT_ITEM, item); if (iter.hasNext()) { item = iter.next(); } else { item = null; } } if (count != streamsNumber) { throw new IllegalArgumentException("Invalid multipart request received. Number of uploaded files (" + count + ") does not match expected (" + streamsNumber + ")"); } return map; }
From source file:org.celstec.arlearn2.upload.BlobStoreServlet.java
protected void doPost(HttpServletRequest req, HttpServletResponse res) throws ServletException, IOException { try {// w w w. j ava 2s . com Long runId = null; String account = null; ServletFileUpload upload = new ServletFileUpload(); res.setContentType("text/plain"); FileItemIterator iterator = upload.getItemIterator(req); while (iterator.hasNext()) { FileItemStream item = iterator.next(); InputStream stream = item.openStream(); if (item.isFormField()) { if ("runId".equals(item.getFieldName())) { runId = Long.parseLong(Streams.asString(stream)); } if ("account".equals(item.getFieldName())) { account = Streams.asString(stream); } } else { BlobKey blobkey = storeBlob(item.getContentType(), item.getName(), stream); if (blobkey != null) { System.out.println(blobkey); // File exists BlobKey oldkey = FilePathManager.getBlobKey(account, runId, item.getName()); if (oldkey != null) { FilePathManager.delete(oldkey); blobstoreService.delete(oldkey); } FilePathManager.addFile(runId, account, item.getName(), blobkey); } else { blobkey.toString(); } } } } catch (Exception ex) { throw new ServletException(ex); } }
From source file:org.celstec.arlearn2.upload.BlobStoreServletIncremental.java
protected void doPost(HttpServletRequest req, HttpServletResponse res) throws ServletException, IOException { try {/*w w w. ja v a 2 s. co m*/ Long runId = null; String account = null; String serverPath = null; boolean last = false; ServletFileUpload upload = new ServletFileUpload(); res.setContentType("text/plain"); FileItemIterator iterator = upload.getItemIterator(req); System.out.println("before while"); while (iterator.hasNext()) { System.out.println("in while"); FileItemStream item = iterator.next(); InputStream stream = item.openStream(); if (item.isFormField()) { if ("runId".equals(item.getFieldName())) { runId = Long.parseLong(Streams.asString(stream)); System.out.println("runid is " + runId); } if ("account".equals(item.getFieldName())) { account = Streams.asString(stream); System.out.println("account is " + account); } if ("last".equals(item.getFieldName())) { last = Boolean.parseBoolean(Streams.asString(stream)); System.out.println("last is " + last); } if ("serverPath".equals(item.getFieldName())) { serverPath = Streams.asString(stream); System.out.println("serverPath is " + serverPath); } } else { log.warning("Got an uploaded file: " + item.getFieldName() + ", name = " + item.getName()); AppEngineFile file = storeBlob(item.getContentType(), item.getName(), stream, last, serverPath); BlobKey blobkey = fileService.getBlobKey(file); if (blobkey != null) { // File exists BlobKey oldkey = FilePathManager.getBlobKey(account, runId, item.getName()); if (oldkey != null) { FilePathManager.delete(oldkey); blobstoreService.delete(oldkey); } FilePathManager.addFile(runId, account, item.getName(), blobkey); System.out.println(blobkey.toString()); } res.getWriter().write(file.getFullPath()); // else { // blobkey.toString(); // } } } } catch (Exception ex) { throw new ServletException(ex); } }
From source file:org.codelabor.system.file.web.servlet.FileUploadStreamServlet.java
@Override protected void upload(HttpServletRequest request, HttpServletResponse response) throws Exception { WebApplicationContext ctx = WebApplicationContextUtils .getRequiredWebApplicationContext(this.getServletContext()); FileManager fileManager = (FileManager) ctx.getBean("fileManager"); boolean isMultipart = ServletFileUpload.isMultipartContent(request); Map<String, Object> paramMap = RequestUtils.getParameterMap(request); if (logger.isDebugEnabled()) { logger.debug(paramMap.toString()); }// www .ja v a2 s.c om String mapId = (String) paramMap.get("mapId"); RepositoryType acceptedRepositoryType = repositoryType; String requestedRepositoryType = (String) paramMap.get("repositoryType"); if (StringUtils.isNotEmpty(requestedRepositoryType)) { acceptedRepositoryType = RepositoryType.valueOf(requestedRepositoryType); } if (isMultipart) { ServletFileUpload upload = new ServletFileUpload(); upload.setFileSizeMax(fileSizeMax); upload.setSizeMax(requestSizeMax); upload.setHeaderEncoding(characterEncoding); upload.setProgressListener(new FileUploadProgressListener()); try { FileItemIterator iter = upload.getItemIterator(request); while (iter.hasNext()) { FileItemStream fileItemSteam = iter.next(); if (logger.isDebugEnabled()) { logger.debug(fileItemSteam.toString()); } FileDTO fileDTO = null; if (fileItemSteam.isFormField()) { paramMap.put(fileItemSteam.getFieldName(), Streams.asString(fileItemSteam.openStream(), characterEncoding)); } else { if (fileItemSteam.getName() == null || fileItemSteam.getName().length() == 0) continue; // set DTO fileDTO = new FileDTO(); fileDTO.setMapId(mapId); fileDTO.setRealFilename(FilenameUtils.getName(fileItemSteam.getName())); if (acceptedRepositoryType == RepositoryType.FILE_SYSTEM) { fileDTO.setUniqueFilename(getUniqueFilename()); } fileDTO.setContentType(fileItemSteam.getContentType()); fileDTO.setRepositoryPath(realRepositoryPath); if (logger.isDebugEnabled()) { logger.debug(fileDTO.toString()); } UploadUtils.processFile(acceptedRepositoryType, fileItemSteam.openStream(), fileDTO); } if (fileDTO != null) fileManager.insertFile(fileDTO); } } catch (FileUploadException e) { e.printStackTrace(); logger.error(e.getMessage()); } catch (Exception e) { e.printStackTrace(); logger.error(e.getMessage()); } } else { paramMap = RequestUtils.getParameterMap(request); } try { processParameters(paramMap); } catch (Exception e) { e.printStackTrace(); logger.error(e.getMessage()); } dispatch(request, response, forwardPathUpload); }
From source file:org.collectionspace.chain.controller.WebUIRequest.java
private void initRequest(UIUmbrella umbrella, HttpServletRequest request, HttpServletResponse response, List<String> p) throws IOException, UIException { this.request = request; this.response = response; boolean isMultipart = ServletFileUpload.isMultipartContent(request); if (isMultipart) { // Create a new file upload handler ServletFileUpload upload = new ServletFileUpload(); // Parse the request FileItemIterator iter;//from w ww. ja v a 2 s . com try { iter = upload.getItemIterator(request); while (iter.hasNext()) { FileItemStream item = iter.next(); String name = item.getFieldName(); //InputStream stream = item.openStream(); if (item.isFormField()) { // System.out.println("Form field " + name + " with value " // + Streams.asString(stream) + " detected."); } else { // System.out.println("File field " + name + " with file name " // + item.getName() + " detected."); // Process the input stream contentHeaders = item.getHeaders(); uploadName = item.getName(); ByteArrayOutputStream byteOut = new ByteArrayOutputStream(); if (item != null) { InputStream stream = item.openStream(); IOUtils.copy(stream, byteOut); new TeeInputStream(stream, byteOut); } bytebody = byteOut.toByteArray(); } } } catch (FileUploadException e) { // TODO Auto-generated catch block e.printStackTrace(); } } else { body = IOUtils.toString(request.getInputStream(), "UTF-8"); } this.ppath = p.toArray(new String[0]); if (!(umbrella instanceof WebUIUmbrella)) throw new UIException("Bad umbrella"); this.umbrella = (WebUIUmbrella) umbrella; session = calculateSessionId(); }
From source file:org.csi.yucca.storage.datamanagementapi.service.MetadataService.java
@POST @Path("/{tenant}") @Produces(MediaType.APPLICATION_JSON)//from ww w. jav a 2s .c o m public String createMetadata(@PathParam("tenant") String tenant, @Context HttpServletRequest request) throws NumberFormatException, UnknownHostException { log.debug("[MetadataService::createMetadata] - START"); String datasetMetadata = null; String encoding = null; String formatType = null; String csvSeparator = null; boolean skipFirstRow = false; String csvData = null; String fileName = null; try { ServletFileUpload upload = new ServletFileUpload(); FileItemIterator iterator = upload.getItemIterator(request); while (iterator.hasNext()) { FileItemStream item = iterator.next(); if (IMPORT_BULKDATASET_METADATA_REQ_KEY.equals(item.getFieldName())) datasetMetadata = read(item.openStream()); else if (IMPORT_BULKDATASET_ENCODING_REQ_KEY.equals(item.getFieldName())) encoding = read(item.openStream()); else if (IMPORT_BULKDATASET_FORMAT_TYPE_REQ_KEY.equals(item.getFieldName())) formatType = read(item.openStream()); else if (IMPORT_BULKDATASET_CSV_SEP_REQ_KEY.equals(item.getFieldName())) csvSeparator = read(item.openStream()); else if (IMPORT_BULKDATASET_CSV_SKIP_FIRS_ROW_REQ_KEY.equals(item.getFieldName())) skipFirstRow = new Boolean(read(item.openStream())); else if (IMPORT_BULKDATASET_FILE_REQ_KEY.equals(item.getFieldName())) { csvData = readFileRows(item.openStream(), encoding); fileName = item.getName(); } } } catch (Exception e) { e.printStackTrace(); } log.debug("[MetadataService::createMetadata] - encoding: " + encoding + ", formatType: " + formatType + ", csvSeparator: " + csvSeparator); Metadata metadata = Metadata.fromJson(datasetMetadata); if (fileName != null) metadata.getInfo().addFilename(fileName); CreateDatasetResponse createDatasetResponse = new CreateDatasetResponse(); metadata.setDatasetVersion(1); if (metadata.getConfigData() == null) metadata.setConfigData(new ConfigData()); metadata.getConfigData().setType(Metadata.CONFIG_DATA_TYPE_DATASET); metadata.getConfigData().setSubtype(Metadata.CONFIG_DATA_SUBTYPE_BULK_DATASET); metadata.getConfigData().setCurrent(1); if (metadata.getInfo() == null) metadata.setInfo(new Info()); if (metadata.getInfo().getFields() != null) { for (Field field : metadata.getInfo().getFields()) { field.setFieldName(Util.cleanStringCamelCase(field.getFieldName())); if (field != null && field.getDataType() == null) field.setDataType("string"); } } metadata.getInfo().setRegistrationDate(new Date()); try { List<SDPBulkInsertException> checkFileToWriteErrors = null; MongoDBDataUpload dataUpload = new MongoDBDataUpload(); if (csvData != null) { checkFileToWriteErrors = dataUpload.checkFileToWrite(csvData, csvSeparator, metadata, skipFirstRow); } if (checkFileToWriteErrors != null && checkFileToWriteErrors.size() > 0) { for (SDPBulkInsertException error : checkFileToWriteErrors) { createDatasetResponse.addErrorMessage(new ErrorMessage(error.getErrorCode(), error.getErrorMessage(), error.getErrorDetail())); } } else { MongoClient mongo = MongoSingleton.getMongoClient(); String supportDb = Config.getInstance().getDbSupport(); String supportDatasetCollection = Config.getInstance().getCollectionSupportDataset(); MongoDBMetadataDAO metadataDAO = new MongoDBMetadataDAO(mongo, supportDb, supportDatasetCollection); String supportApiCollection = Config.getInstance().getCollectionSupportApi(); MongoDBApiDAO apiDAO = new MongoDBApiDAO(mongo, supportDb, supportApiCollection); BasicDBObject searchTenantQuery = new BasicDBObject(); searchTenantQuery.put("tenantCode", tenant); DBCollection tenantCollection = mongo.getDB(supportDb).getCollection("tenant"); DBObject tenantData = tenantCollection.find(searchTenantQuery).one(); Long idTenant = ((Number) tenantData.get("idTenant")).longValue(); int maxDatasetNum = ((Number) tenantData.get("maxDatasetNum")).intValue(); if (maxDatasetNum > 0) { int numCurrentDataset = metadataDAO.countAllMetadata(tenant, true); log.info("[MetadataService::createMetadata] - tenant=" + tenant + " maxDatasetNum=" + maxDatasetNum + " numCurrentDataset=" + numCurrentDataset); //TODO if (numCurrentDataset >= maxDatasetNum) throw new MaxDatasetNumException("too many dataset"); } metadata.getConfigData().setIdTenant(idTenant); // binary metadata: create a metadata record specific for attachment Metadata binaryMetadata = null; if (metadata.getInfo().getFields() != null) { for (Field field : metadata.getInfo().getFields()) { if (field.getDataType().equals("binary")) { binaryMetadata = Metadata.createBinaryMetadata(metadata); break; } } } if (binaryMetadata != null) { Metadata binaryMetadataCreated = metadataDAO.createMetadata(binaryMetadata, null); metadata.getInfo().setBinaryDatasetVersion(binaryMetadataCreated.getDatasetVersion()); metadata.getInfo().setBinaryIdDataset(binaryMetadataCreated.getIdDataset()); } List<Tenantsharing> lista = new ArrayList<Tenantsharing>(); if (metadata.getInfo().getTenantssharing() != null) { Set<String> tenantSet = new TreeSet<String>(); for (Tenantsharing tenantInList : metadata.getInfo().getTenantssharing().getTenantsharing()) { if (!tenantInList.getTenantCode().equals(metadata.getConfigData().getTenantCode()) && !tenantSet.contains(metadata.getConfigData().getTenantCode()) && tenantInList.getIsOwner() != 1) { lista.add(tenantInList); tenantSet.add(tenantInList.getTenantCode()); } } } Tenantsharing owner = new Tenantsharing(); owner.setIdTenant(metadata.getConfigData().getIdTenant()); owner.setIsOwner(1); owner.setTenantCode(metadata.getConfigData().getTenantCode()); owner.setTenantName(metadata.getConfigData().getTenantCode()); // owner.setTenantDescription(metadata.getConfigData().get); lista.add(owner); Tenantsharing arrayTenant[] = new Tenantsharing[lista.size()]; arrayTenant = lista.toArray(arrayTenant); if (metadata.getInfo().getTenantssharing() == null) { Tenantssharing tenantssharing = new Tenantssharing(); metadata.getInfo().setTenantssharing(tenantssharing); } metadata.getInfo().getTenantssharing().setTenantsharing(arrayTenant); // opendata if (!"public".equals(metadata.getInfo().getVisibility())) { metadata.setOpendata(null); } Metadata metadataCreated = metadataDAO.createMetadata(metadata, null); MyApi api = MyApi.createFromMetadataDataset(metadataCreated); api.getConfigData().setType(Metadata.CONFIG_DATA_TYPE_API); api.getConfigData().setSubtype(Metadata.CONFIG_DATA_SUBTYPE_API_MULTI_BULK); MyApi apiCreated = apiDAO.createApi(api); createDatasetResponse.setMetadata(metadataCreated); createDatasetResponse.setApi(apiCreated); /* * Create api in the store */ String apiName = ""; try { apiName = StoreService.createApiforBulk(metadata, false, datasetMetadata); } catch (Exception duplicate) { if (duplicate.getMessage().toLowerCase().contains("duplicate")) { try { apiName = StoreService.createApiforBulk(metadata, true, datasetMetadata); } catch (Exception e) { log.error( "[MetadataService::createMetadata] - ERROR to update API in Store for Bulk. Message: " + duplicate.getMessage()); } } else { log.error( "[MetadataService::createMetadata] - ERROR in create or update API in Store for Bulk. Message: " + duplicate.getMessage()); } } try { StoreService.publishStore("1.0", apiName, "admin"); Set<String> tenantSet = new TreeSet<String>(); if (metadata.getInfo().getTenantssharing() != null) { for (Tenantsharing tenantSh : metadata.getInfo().getTenantssharing().getTenantsharing()) { tenantSet.add(tenantSh.getTenantCode()); String appName = "userportal_" + tenantSh.getTenantCode(); StoreService.addSubscriptionForTenant(apiName, appName); } } if (!tenantSet.contains(metadata.getConfigData().getTenantCode())) { String appName = "userportal_" + metadata.getConfigData().getTenantCode(); StoreService.addSubscriptionForTenant(apiName, appName); } } catch (Exception e) { log.error("[MetadataService::createMetadata] - ERROR in publish Api in store - message: " + e.getMessage()); } if (csvData != null) { try { dataUpload.writeFileToMongo(mongo, "DB_" + tenant, "data", metadataCreated); } catch (Exception e) { log.error("[MetadataService::createMetadata] - writeFileToMongo ERROR: " + e.getMessage()); createDatasetResponse.addErrorMessage(new ErrorMessage(e)); e.printStackTrace(); } } } } catch (MaxDatasetNumException ex) { log.error("[MetadataService::createMetadata] - MaxDatasetNumException ERROR: ", ex); createDatasetResponse.addErrorMessage(new ErrorMessage(ex)); } return createDatasetResponse.toJson(); }
From source file:org.csi.yucca.storage.datamanagementapi.service.MetadataService.java
@POST @Path("/add/{tenant}/{datasetCode}") @Produces(MediaType.APPLICATION_JSON)//w w w .ja va 2s . c o m public String addData(@PathParam("tenant") String tenant, @PathParam("datasetCode") String datasetCode, @Context HttpServletRequest request) throws NumberFormatException, UnknownHostException { log.debug("[MetadataService::addData] - START"); String encoding = null; String formatType = null; String csvSeparator = null; boolean skipFirstRow = false; String csvData = null; String fileName = null; try { ServletFileUpload upload = new ServletFileUpload(); FileItemIterator iterator = upload.getItemIterator(request); while (iterator.hasNext()) { FileItemStream item = iterator.next(); if (IMPORT_BULKDATASET_ENCODING_REQ_KEY.equals(item.getFieldName())) encoding = read(item.openStream()); else if (IMPORT_BULKDATASET_FORMAT_TYPE_REQ_KEY.equals(item.getFieldName())) formatType = read(item.openStream()); else if (IMPORT_BULKDATASET_CSV_SEP_REQ_KEY.equals(item.getFieldName())) csvSeparator = read(item.openStream()); else if (IMPORT_BULKDATASET_CSV_SKIP_FIRS_ROW_REQ_KEY.equals(item.getFieldName())) skipFirstRow = new Boolean(read(item.openStream())); else if (IMPORT_BULKDATASET_FILE_REQ_KEY.equals(item.getFieldName())) { csvData = readFileRows(item.openStream(), encoding); fileName = item.getName(); } } } catch (Exception e) { e.printStackTrace(); } log.debug("[MetadataService::addData] - encoding: " + encoding + ", formatType: " + formatType + ", csvSeparator: " + csvSeparator); MongoClient mongo = MongoSingleton.getMongoClient(); String supportDb = Config.getInstance().getDbSupport(); String supportDatasetCollection = Config.getInstance().getCollectionSupportDataset(); MongoDBMetadataDAO metadataDAO = new MongoDBMetadataDAO(mongo, supportDb, supportDatasetCollection); Metadata existingMetadata = metadataDAO.readCurrentMetadataByCode(datasetCode); existingMetadata.getInfo().addFilename(fileName); metadataDAO.updateMetadata(existingMetadata); UpdateDatasetResponse updateDatasetResponse = new UpdateDatasetResponse(); MongoDBDataUpload dataUpload = new MongoDBDataUpload(); List<SDPBulkInsertException> checkFileToWriteErrors = dataUpload.checkFileToWrite(csvData, csvSeparator, existingMetadata, skipFirstRow); if (checkFileToWriteErrors != null && checkFileToWriteErrors.size() > 0) { for (SDPBulkInsertException error : checkFileToWriteErrors) { updateDatasetResponse.addErrorMessage( new ErrorMessage(error.getErrorCode(), error.getErrorMessage(), error.getErrorDetail())); } } else { try { dataUpload.writeFileToMongo(mongo, "DB_" + tenant, "data", existingMetadata); } catch (Exception e) { log.error("[MetadataService::addData] - writeFileToMongo ERROR: " + e.getMessage()); updateDatasetResponse.addErrorMessage(new ErrorMessage(e)); e.printStackTrace(); } } return updateDatasetResponse.toJson(); }
From source file:org.cvit.cabig.dmr.cmef.server.SubmitJobResource.java
@Post("multi") public Representation submitJob(Representation formRep) { RestletFileUpload upload = new RestletFileUpload(); ComputationJob job = new ComputationJob(); boolean inIframe = false; try {/*from www.j ava2s .c o m*/ FileItemIterator items = upload.getItemIterator(formRep); List<ParameterValue> values = new ArrayList<ParameterValue>(); job.setParameterValues(values); State state = State.TITLE; while (items.hasNext()) { FileItemStream item = items.next(); InputStream itemStream = item.openStream(); switch (state) { case TITLE: job.setTitle(Streams.asString(itemStream)); state = State.DESC; break; case DESC: job.setDescription(Streams.asString(itemStream)); state = State.COMMENTS; break; case COMMENTS: job.setComment(Streams.asString(itemStream)); state = State.PARAMS; break; case PARAMS: if (item.getFieldName().equals("iframe")) { inIframe = Boolean.parseBoolean(Streams.asString(itemStream)); } else { Parameter param = new Parameter(); param.setName(parseParamName(item.getFieldName())); ParameterValue value = new ParameterValue(); if (item.isFormField()) { value.setValue(Streams.asString(itemStream)); } else { value.setValue(storeFile(item.getName(), itemStream).getSource()); } value.setJob(job); value.setParameter(param); param.setValue(value); values.add(value); } break; } } } catch (Exception e) { throw new ResourceException(Status.SERVER_ERROR_INTERNAL, "Exception processing submit job form: " + e.getMessage(), e); } job = addNewJob(job); ComputationJob startedJob = startJob(job); if (inIframe) { return new StringRepresentation(buildIframeResponse(job), MediaType.TEXT_HTML); } else { Reference jobRef = getNamespace().jobRef(entryName, modelName, getResolver().getJobName(startedJob.getId()), true); redirectSeeOther(jobRef); return new StringRepresentation("Job submitted, URL: " + jobRef.toString() + "."); } }
From source file:org.daxplore.presenter.server.servlets.AdminUploadServlet.java
@Override public void doPost(HttpServletRequest request, HttpServletResponse response) { try {/*from w w w. j a v a 2s . com*/ long time = System.nanoTime(); int statusCode = HttpServletResponse.SC_OK; response.setContentType("text/html; charset=UTF-8"); ServletFileUpload upload = new ServletFileUpload(); PersistenceManager pm = null; String prefix = null; try { FileItemIterator fileIterator = upload.getItemIterator(request); String fileName = ""; byte[] fileData = null; while (fileIterator.hasNext()) { FileItemStream item = fileIterator.next(); try (InputStream stream = item.openStream()) { if (item.isFormField()) { if (item.getFieldName().equals("prefix")) { prefix = Streams.asString(stream); } else { throw new BadRequestException("Form contains extra fields"); } } else { fileName = item.getName(); fileData = IOUtils.toByteArray(stream); } } } if (SharedResourceTools.isSyntacticallyValidPrefix(prefix)) { if (fileData != null && !fileName.equals("")) { pm = PMF.get().getPersistenceManager(); unzipAll(pm, prefix, fileData); } else { throw new BadRequestException("No file uploaded"); } } else { throw new BadRequestException("Request made with invalid prefix: '" + prefix + "'"); } logger.log(Level.INFO, "Unpacked new data for prefix '" + prefix + "' in " + ((System.nanoTime() - time) / 1000000000.0) + " seconds"); } catch (FileUploadException | IOException | BadRequestException e) { logger.log(Level.WARNING, e.getMessage(), e); statusCode = HttpServletResponse.SC_BAD_REQUEST; } catch (InternalServerException e) { logger.log(Level.SEVERE, e.getMessage(), e); statusCode = HttpServletResponse.SC_INTERNAL_SERVER_ERROR; } catch (DeadlineExceededException e) { logger.log(Level.SEVERE, "Timeout when uploading new data for prefix '" + prefix + "'", e); // the server is currently unavailable because it is overloaded (hopefully) statusCode = HttpServletResponse.SC_SERVICE_UNAVAILABLE; } finally { if (pm != null) { pm.close(); } } response.setStatus(statusCode); try (PrintWriter resWriter = response.getWriter()) { if (resWriter != null) { resWriter.write(Integer.toString(statusCode)); resWriter.close(); } } } catch (IOException | RuntimeException e) { logger.log(Level.SEVERE, "Unexpected exception: " + e.getMessage(), e); response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); } }