List of usage examples for org.springframework.web.multipart.support DefaultMultipartHttpServletRequest getParameter
@Override
@Nullable
public String getParameter(String name)
From source file:com.example.domain.menu.PlaceMenu.java
public PlaceMenu(DefaultMultipartHttpServletRequest dmhsRequest) { this.menuName = dmhsRequest.getParameter("menuName"); this.menuDescription = dmhsRequest.getParameter("menuDescription"); this.menuPrice = Integer.valueOf(dmhsRequest.getParameter("menuPrice")); }
From source file:org.exem.flamingo.web.filesystem.s3.S3BrowserController.java
@RequestMapping(value = "upload", method = RequestMethod.POST, consumes = MediaType.MULTIPART_FORM_DATA_VALUE) @ResponseStatus(HttpStatus.OK)//ww w .j a v a 2 s . c o m public Response uploadObject(HttpServletRequest req) throws IOException { DefaultMultipartHttpServletRequest request = (DefaultMultipartHttpServletRequest) req; String bucketName = request.getParameter(S3Constansts.BUCKETNAME); String key = request.getParameter(S3Constansts.KEY); MultipartFile file = request.getFile(S3Constansts.FILE); s3BrowserService.upload(bucketName, key, file); Response response = new Response(); response.setSuccess(true); return response; }
From source file:org.exem.flamingo.web.filesystem.hdfs.HdfsBrowserController.java
/** * ? ?? HDFS ? .// w w w . j ava2 s .c o m * * @return REST Response JAXB Object */ @RequestMapping(value = "upload", method = RequestMethod.POST, consumes = { "multipart/form-data" }) @ResponseStatus(HttpStatus.OK) public ResponseEntity<String> upload(HttpServletRequest req) throws IOException { Response response = new Response(); if (!(req instanceof DefaultMultipartHttpServletRequest)) { response.setSuccess(false); response.getError().setCause("Request is not a file upload."); response.getError().setMessage("Failed to upload a file."); String json = new ObjectMapper().writeValueAsString(response); return new ResponseEntity(json, HttpStatus.BAD_REQUEST); } InputStream inputStream; DefaultMultipartHttpServletRequest request = (DefaultMultipartHttpServletRequest) req; String username = SessionUtils.getUsername(); String dstPath = request.getParameter("dstPath"); MultipartFile uploadedFile = request.getFile("fileName"); String originalFilename = uploadedFile.getOriginalFilename(); String pathToUpload = getPathFilter(dstPath); String fullyQualifiedPath = pathToUpload.equals("/") ? pathToUpload + originalFilename : pathToUpload + SystemUtils.FILE_SEPARATOR + originalFilename; inputStream = uploadedFile.getInputStream(); //TODO HDFS ? ? /*List<String> paths = hdfsBrowserAuthService.getHdfsBrowserPatternAll(username); String hdfsPathPattern = hdfsBrowserAuthService.validateHdfsPathPattern(pathToUpload, paths); Map fileMap = new HashMap(); fileMap.put("username", username); fileMap.put("hdfsPathPattern", hdfsPathPattern); fileMap.put("condition", "uploadFile"); hdfsBrowserAuthService.getHdfsBrowserUserDirAuth(fileMap);*/ // Engine? Remote? ?. boolean isRemoteEngine = Boolean.parseBoolean(isRemote); // Remote ? ? ?, Remote? Store and Forward if (!isRemoteEngine) { fileSystemService.validatePath(pathToUpload); String namenodeAgentUrl = MessageFormatter .arrayFormat("http://{}:{}/remote/agent/transfer/upload?fullyQualifiedPath={}&username={}", new Object[] { namenodeAgentAddress, namenodeAgentPort, URLEncoder.encode(fullyQualifiedPath, "UTF-8"), username }) .getMessage(); HttpPost httpPost = new HttpPost(namenodeAgentUrl); HttpEntity reqEntity = new InputStreamEntity(inputStream); httpPost.setEntity(reqEntity); HttpResponse execute = httpClient.execute(httpPost); if (execute.getStatusLine().getStatusCode() == 500) { response.setSuccess(false); response.getError().setMessage("?? ?? ."); } else if (execute.getStatusLine().getStatusCode() == 600) { response.setSuccess(false); response.getError().setMessage("(/) ? ."); } else { response.setSuccess(true); } inputStream.close(); httpPost.releaseConnection(); } else { boolean saved; byte[] bytes = FileCopyUtils.copyToByteArray(inputStream); fileSystemService.validateBeforeUpload(pathToUpload, fullyQualifiedPath, bytes, username); saved = fileSystemService.save(pathToUpload, fullyQualifiedPath, bytes, username); response.setSuccess(saved); } response.getMap().put("directory", pathToUpload); String json = new ObjectMapper().writeValueAsString(response); HttpStatus statusCode = HttpStatus.OK; return new ResponseEntity(json, statusCode); }
From source file:com.ephesoft.dcma.workflow.service.webservices.EphesoftWebServiceAPI.java
@RequestMapping(value = "/extractFieldFromHocr", method = RequestMethod.POST) @ResponseBody//from ww w .j a v a 2 s. c o m public void extractFieldFromHocr(final HttpServletRequest req, final HttpServletResponse resp) { logger.info("Start processing web service for extractFieldFromHocr."); String respStr = WebServiceUtil.EMPTY_STRING; String workingDir = WebServiceUtil.EMPTY_STRING; if (req instanceof DefaultMultipartHttpServletRequest) { try { final String webServiceFolderPath = bsService.getWebServicesFolderPath(); workingDir = WebServiceUtil.createWebServiceWorkingDir(webServiceFolderPath); InputStream instream = null; OutputStream outStream = null; final DefaultMultipartHttpServletRequest multipartReq = (DefaultMultipartHttpServletRequest) req; String fieldValue = WebServiceUtil.EMPTY_STRING; for (final Enumeration<String> params = multipartReq.getParameterNames(); params .hasMoreElements();) { final String paramName = params.nextElement(); if (paramName.equalsIgnoreCase("fieldValue")) { fieldValue = multipartReq.getParameter(paramName); break; } } if (fieldValue == null || fieldValue.isEmpty()) { respStr = "Field Value not specified."; } if (respStr.isEmpty()) { final MultiValueMap<String, MultipartFile> fileMap = multipartReq.getMultiFileMap(); if (fileMap.size() == 1) { String hocrFileName = ""; for (final String fileName : fileMap.keySet()) { // only single html file is expected as input if (fileName.toLowerCase().indexOf(FileType.HTML.getExtension()) > -1) { // only HTML file is expected hocrFileName = fileName; final MultipartFile f = multipartReq.getFile(fileName); instream = f.getInputStream(); final File file = new File(workingDir + File.separator + fileName); outStream = new FileOutputStream(file); final byte buf[] = new byte[1024]; int len; while ((len = instream.read(buf)) > 0) { outStream.write(buf, 0, len); } if (instream != null) { instream.close(); } if (outStream != null) { outStream.close(); } break; } else { respStr = "Improper input to server. Expected only one html file. Returning without processing the results."; } } if (respStr.isEmpty()) { String fileName = workingDir + File.separator + hocrFileName; // generate hocr file from html file. HocrPages hocrPages = new HocrPages(); List<HocrPage> hocrPageList = hocrPages.getHocrPage(); HocrPage hocrPage = new HocrPage(); String pageID = "PG0"; hocrPage.setPageID(pageID); hocrPageList.add(hocrPage); bsService.hocrGenerationAPI(workingDir, "PG0", fileName, hocrPage); List<KVExtraction> kvExtractionList = kvFieldService.createKeyValueFieldAPI(fieldValue, hocrPage); final KVExtractionFieldPatterns patterns = new KVExtractionFieldPatterns(); final List<KVExtractionFieldPattern> pattern = patterns.getKVExtractionFieldPattern(); for (final KVExtraction eachKVExtraction : kvExtractionList) { final KVExtractionFieldPattern kvField = new KVExtractionFieldPattern(); kvField.setDistance(eachKVExtraction.getDistance()); kvField.setFetchValue(eachKVExtraction.getFetchValue().name()); kvField.setKeyPattern(eachKVExtraction.getKeyPattern()); kvField.setLength(eachKVExtraction.getLength()); kvField.setLocation(eachKVExtraction.getLocationType().name()); kvField.setMultiplier(eachKVExtraction.getMultiplier()); kvField.setNoOfWords(eachKVExtraction.getNoOfWords() == null ? 0 : eachKVExtraction.getNoOfWords()); kvField.setValuePattern(eachKVExtraction.getValuePattern()); kvField.setWidth(eachKVExtraction.getWidth()); kvField.setXOffset(eachKVExtraction.getXoffset()); kvField.setYOffset(eachKVExtraction.getYoffset()); pattern.add(kvField); } StreamResult result; try { result = new StreamResult(resp.getOutputStream()); resp.setStatus(HttpServletResponse.SC_OK); batchSchemaDao.getJAXB2Template().getJaxb2Marshaller().marshal(patterns, result); } catch (final IOException e) { try { resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, "Internal Server error.Please check logs for further details." + e.getMessage()); } catch (final IOException ioe) { } } } } else { respStr = "Improper input to server. Expected only one html file. Returning without processing the results."; } } } catch (final DCMAException dcmae) { respStr = "Error in processing request. Detailed exception is " + dcmae; } catch (final Exception e) { respStr = "Internal Server error.Please check logs for further details." + e; if (!workingDir.isEmpty()) { FileUtils.deleteDirectoryAndContentsRecursive(new File(workingDir).getParentFile()); } } } else { respStr = "Improper input to server. Expected multipart request. Returing without processing the results."; } if (!workingDir.isEmpty()) { FileUtils.deleteDirectoryAndContentsRecursive(new File(workingDir).getParentFile()); } if (!respStr.isEmpty()) { try { resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, respStr); } catch (final IOException ioe) { } } }
From source file:com.ephesoft.dcma.workflow.service.webservices.EphesoftWebServiceAPI.java
@RequestMapping(value = "/classifyBarcodeImage", method = RequestMethod.POST) @ResponseBody//from w w w . j a v a 2 s .c o m public void classifyBarcodeImage(final HttpServletRequest req, final HttpServletResponse resp) { logger.info("Start processing web service for classifyBarcode."); String respStr = WebServiceUtil.EMPTY_STRING; String workingDir = WebServiceUtil.EMPTY_STRING; if (req instanceof DefaultMultipartHttpServletRequest) { try { final String webServiceFolderPath = bsService.getWebServicesFolderPath(); workingDir = WebServiceUtil.createWebServiceWorkingDir(webServiceFolderPath); InputStream instream = null; OutputStream outStream = null; final DefaultMultipartHttpServletRequest multipartReq = (DefaultMultipartHttpServletRequest) req; String batchClassId = WebServiceUtil.EMPTY_STRING; for (final Enumeration<String> params = multipartReq.getParameterNames(); params .hasMoreElements();) { final String paramName = params.nextElement(); if (paramName.equalsIgnoreCase("batchClassId")) { batchClassId = multipartReq.getParameter(paramName); break; } } Map<BarcodeProperties, String> batchClassConfigMap = new HashMap<BarcodeProperties, String>(); if (batchClassId == null || batchClassId.isEmpty()) { respStr = "Batch Class identifier not specified."; } else { BatchClass bc = bcService.getBatchClassByIdentifier(batchClassId); if (bc == null) { respStr = "Batch class with the specified identifier does not exist."; } else { BatchPlugin barcodeReader = batchClassPPService.getPluginProperties(batchClassId, ICommonConstants.BARCODE_READER_PLUGIN); BatchPlugin docAssemblyPlugin = batchClassPPService.getPluginProperties(batchClassId, DocumentAssemblerConstants.DOCUMENT_ASSEMBLER_PLUGIN); if (barcodeReader == null || docAssemblyPlugin == null) { respStr = "Either Barcode Reader plugin or document assembly plugin does not exist for the specified batch id."; } else if (docAssemblyPlugin.getPluginConfigurations( DocumentAssemblerProperties.DA_BARCODE_CONFIDENCE) == null) { respStr = "Incomplete properties of the Document assembler plugin for the specified batch id."; } else if (barcodeReader .getPluginConfigurations(BarcodeProperties.BARCODE_VALID_EXTNS) == null || barcodeReader .getPluginConfigurations(BarcodeProperties.BARCODE_READER_TYPES) == null || barcodeReader.getPluginConfigurations(BarcodeProperties.MAX_CONFIDENCE) == null || barcodeReader .getPluginConfigurations(BarcodeProperties.MIN_CONFIDENCE) == null) { respStr = "Incomplete properties of the Barcode reader plugin for the specified batch id."; } } } if (respStr.isEmpty()) { batchClassConfigMap.put(BarcodeProperties.BARCODE_VALID_EXTNS, batchClassPPService.getPropertyValue(batchClassId, ICommonConstants.BARCODE_READER_PLUGIN, BarcodeProperties.BARCODE_VALID_EXTNS)); batchClassConfigMap.put(BarcodeProperties.BARCODE_READER_TYPES, batchClassPPService.getPropertyValue(batchClassId, ICommonConstants.BARCODE_READER_PLUGIN, BarcodeProperties.BARCODE_READER_TYPES)); batchClassConfigMap.put(BarcodeProperties.MAX_CONFIDENCE, batchClassPPService.getPropertyValue(batchClassId, ICommonConstants.BARCODE_READER_PLUGIN, BarcodeProperties.MAX_CONFIDENCE)); batchClassConfigMap.put(BarcodeProperties.MIN_CONFIDENCE, batchClassPPService.getPropertyValue(batchClassId, ICommonConstants.BARCODE_READER_PLUGIN, BarcodeProperties.MIN_CONFIDENCE)); final MultiValueMap<String, MultipartFile> fileMap = multipartReq.getMultiFileMap(); if (fileMap.size() == 1) { String tiffFileName = WebServiceUtil.EMPTY_STRING; for (final String fileName : fileMap.keySet()) { // only single tiff/tif file is expected as input if ((fileName.toLowerCase().indexOf(FileType.TIF.getExtension()) > -1 || fileName.toLowerCase().indexOf(FileType.TIFF.getExtension()) > -1)) { final MultipartFile f = multipartReq.getFile(fileName); instream = f.getInputStream(); final File file = new File(workingDir + File.separator + fileName); outStream = new FileOutputStream(file); final byte buf[] = new byte[1024]; int len; while ((len = instream.read(buf)) > 0) { outStream.write(buf, 0, len); } if (instream != null) { instream.close(); } if (outStream != null) { outStream.close(); } if (TIFFUtil.getTIFFPageCount(file.getAbsolutePath()) > 1) { respStr = "Improper input to server. Expected only one single page tiff file. Returning without processing the results."; } tiffFileName = file.getName(); break; } else { respStr = "Improper input to server. Expected only one tiff file. Returning without processing the results."; } } if (respStr.isEmpty()) { ObjectFactory objectFactory = new ObjectFactory(); Pages pages = new Pages(); List<Page> listOfPages = pages.getPage(); List<Document> xmlDocuments = new ArrayList<Document>(); Document doc = objectFactory.createDocument(); xmlDocuments.add(doc); doc.setPages(pages); Page pageType = objectFactory.createPage(); pageType.setIdentifier(EphesoftProperty.PAGE.getProperty() + "0"); pageType.setNewFileName(tiffFileName); listOfPages.add(pageType); String batchInstanceIdentifier = new File(workingDir).getName() + Math.random(); barcodeService.extractPageBarCodeAPI(xmlDocuments, batchInstanceIdentifier, workingDir, batchClassConfigMap); try { // invoke the document assembler plugin xmlDocuments = docAssembler.createDocumentAPI(DocumentClassificationFactory.BARCODE, batchClassId, listOfPages); Documents docs = new Documents(); docs.getDocument().addAll(xmlDocuments); StreamResult result; try { result = new StreamResult(resp.getOutputStream()); resp.setStatus(HttpServletResponse.SC_OK); batchSchemaDao.getJAXB2Template().getJaxb2Marshaller().marshal(docs, result); } catch (final IOException e) { respStr = "Internal Server error.Please check logs for further details." + e; } } catch (final DCMAApplicationException e) { respStr = "Error while executing plugin. Detailed exception is " + e; } } } else { respStr = "Improper input to server. Expected only one html file. Returning without processing the results."; } } } catch (final XmlMappingException xmle) { respStr = "Error in mapping input XML in the desired format. Please send it in the specified format. Detailed exception is " + xmle; } catch (final DCMAException dcmae) { respStr = "Error in processing request. Detailed exception is " + dcmae; } catch (final Exception e) { respStr = "Internal Server error.Please check logs for further details." + e; if (!workingDir.isEmpty()) { FileUtils.deleteDirectoryAndContentsRecursive(new File(workingDir).getParentFile()); } } } else { respStr = "Improper input to server. Expected multipart request. Returing without processing the results."; } if (!workingDir.isEmpty()) { FileUtils.deleteDirectoryAndContentsRecursive(new File(workingDir).getParentFile()); } if (!respStr.isEmpty()) { try { resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, respStr); } catch (final IOException ioe) { } } }
From source file:com.ephesoft.dcma.workflow.service.webservices.EphesoftWebServiceAPI.java
@RequestMapping(value = "/classifyImage", method = RequestMethod.POST) @ResponseBody//from ww w. jav a 2 s.c o m public void classifyImage(final HttpServletRequest req, final HttpServletResponse resp) { logger.info("Start processing web service for classifyImage."); String respStr = WebServiceUtil.EMPTY_STRING; String workingDir = WebServiceUtil.EMPTY_STRING; if (req instanceof DefaultMultipartHttpServletRequest) { try { final String webServiceFolderPath = bsService.getWebServicesFolderPath(); workingDir = WebServiceUtil.createWebServiceWorkingDir(webServiceFolderPath); InputStream instream = null; OutputStream outStream = null; final DefaultMultipartHttpServletRequest multipartReq = (DefaultMultipartHttpServletRequest) req; String batchClassId = WebServiceUtil.EMPTY_STRING; for (final Enumeration<String> params = multipartReq.getParameterNames(); params .hasMoreElements();) { final String paramName = params.nextElement(); if (paramName.equalsIgnoreCase("batchClassId")) { batchClassId = multipartReq.getParameter(paramName); break; } } if (batchClassId == null || batchClassId.isEmpty()) { respStr = "Batch Class identifier not specified."; } else { BatchClass bc = bcService.getBatchClassByIdentifier(batchClassId); if (bc == null) { respStr = "Batch class with the specified identifier does not exist."; } else { BatchPlugin createThumbPlugin = batchClassPPService.getPluginProperties(batchClassId, ImageMagicKConstants.CREATE_THUMBNAILS_PLUGIN); BatchPlugin classifyImgPlugin = batchClassPPService.getPluginProperties(batchClassId, ImageMagicKConstants.CLASSIFY_IMAGES_PLUGIN); BatchPlugin docAssemblyPlugin = batchClassPPService.getPluginProperties(batchClassId, DocumentAssemblerConstants.DOCUMENT_ASSEMBLER_PLUGIN); if (createThumbPlugin == null || classifyImgPlugin == null || docAssemblyPlugin == null) { respStr = "Either create Thumbnails plugin or Classify Image plugin or document assembly plugin does not exist for the specified batch id."; } else if (createThumbPlugin.getPluginConfigurations( ImageMagicProperties.CREATE_THUMBNAILS_OUTPUT_IMAGE_PARAMETERS) == null || createThumbPlugin.getPluginConfigurations( ImageMagicProperties.CREATE_THUMBNAILS_COMP_THUMB_WIDTH) == null || createThumbPlugin.getPluginConfigurations( ImageMagicProperties.CREATE_THUMBNAILS_COMP_THUMB_HEIGHT) == null) { respStr = "Create Thumbnails Height or width or output image parameters does not exist for the specified batch id."; } else if (classifyImgPlugin .getPluginConfigurations(ImageMagicProperties.CLASSIFY_IMAGES_COMP_METRIC) == null || classifyImgPlugin.getPluginConfigurations( ImageMagicProperties.CLASSIFY_IMAGES_MAX_RESULTS) == null || classifyImgPlugin.getPluginConfigurations( ImageMagicProperties.CLASSIFY_IMAGES_FUZZ_PERCNT) == null) { respStr = "Classify Images comp metric or fuzz percent or max results does not exist for the specified batch id."; } else if (docAssemblyPlugin .getPluginConfigurations(DocumentAssemblerProperties.DA_RULE_FP_MP_LP) == null || docAssemblyPlugin .getPluginConfigurations(DocumentAssemblerProperties.DA_RULE_FP) == null || docAssemblyPlugin .getPluginConfigurations(DocumentAssemblerProperties.DA_RULE_MP) == null || docAssemblyPlugin .getPluginConfigurations(DocumentAssemblerProperties.DA_RULE_LP) == null || docAssemblyPlugin .getPluginConfigurations(DocumentAssemblerProperties.DA_RULE_FP_LP) == null || docAssemblyPlugin .getPluginConfigurations(DocumentAssemblerProperties.DA_RULE_FP_MP) == null || docAssemblyPlugin.getPluginConfigurations( DocumentAssemblerProperties.DA_RULE_MP_LP) == null) { respStr = "Incomplete properties of the Document assembler plugin for the specified batch id."; } } } if (respStr.isEmpty()) { final String outputParams = batchClassPPService.getPropertyValue(batchClassId, ImageMagicKConstants.CREATE_THUMBNAILS_PLUGIN, ImageMagicProperties.CREATE_THUMBNAILS_OUTPUT_IMAGE_PARAMETERS); final MultiValueMap<String, MultipartFile> fileMap = multipartReq.getMultiFileMap(); if (fileMap.size() == 1) { String[][] sListOfTiffFiles = new String[fileMap.size()][3]; for (final String fileName : fileMap.keySet()) { // only single tiff file is expected as input if ((fileName.toLowerCase().indexOf(FileType.TIF.getExtension()) > -1 || fileName.toLowerCase().indexOf(FileType.TIFF.getExtension()) > -1)) { final MultipartFile f = multipartReq.getFile(fileName); instream = f.getInputStream(); final File file = new File(workingDir + File.separator + fileName); outStream = new FileOutputStream(file); final byte buf[] = new byte[1024]; int len; while ((len = instream.read(buf)) > 0) { outStream.write(buf, 0, len); } if (instream != null) { instream.close(); } if (outStream != null) { outStream.close(); } if (TIFFUtil.getTIFFPageCount(file.getAbsolutePath()) > 1) { respStr = "Improper input to server. Expected only one single page tiff file. Returning without processing the results."; } break; } else { respStr = "Improper input to server. Expected only one tiff file. Returning without processing the results."; } } if (respStr.isEmpty()) { String compareThumbnailH = batchClassPPService.getPropertyValue(batchClassId, ImageMagicKConstants.CREATE_THUMBNAILS_PLUGIN, ImageMagicProperties.CREATE_THUMBNAILS_COMP_THUMB_HEIGHT); String compareThumbnailW = batchClassPPService.getPropertyValue(batchClassId, ImageMagicKConstants.CREATE_THUMBNAILS_PLUGIN, ImageMagicProperties.CREATE_THUMBNAILS_COMP_THUMB_WIDTH); String batchId = new File(workingDir).getName() + Math.random(); ObjectFactory objectFactory = new ObjectFactory(); Pages pages = new Pages(); List<Page> listOfPages = pages.getPage(); String[] imageFiles = new File(workingDir).list(new CustomFileFilter(false, FileType.TIFF.getExtensionWithDot(), FileType.TIF.getExtensionWithDot())); for (int i = 0; i < imageFiles.length; i++) { String fileName = workingDir + File.separator + imageFiles[i]; String thumbFileName = "th" + FileType.TIF.getExtensionWithDot(); String fileTiffPath = workingDir + File.separator + thumbFileName; sListOfTiffFiles[i][0] = fileName; sListOfTiffFiles[i][1] = fileTiffPath; sListOfTiffFiles[i][2] = Integer.toString(i); Page pageType = objectFactory.createPage(); pageType.setIdentifier(EphesoftProperty.PAGE.getProperty() + i); pageType.setNewFileName(fileName); pageType.setOldFileName(fileName); pageType.setDirection(Direction.NORTH); pageType.setIsRotated(false); pageType.setComparisonThumbnailFileName(thumbFileName); listOfPages.add(pageType); } final BatchInstanceThread threadList = imService.createCompThumbForImage(batchId, workingDir, sListOfTiffFiles, outputParams, compareThumbnailH, compareThumbnailW); try { threadList.execute(); // invoke the Classification Image plugin String imMetric = batchClassPPService.getPropertyValue(batchClassId, ImageMagicKConstants.CLASSIFY_IMAGES_PLUGIN, ImageMagicProperties.CLASSIFY_IMAGES_COMP_METRIC); String imFuzz = batchClassPPService.getPropertyValue(batchClassId, ImageMagicKConstants.CLASSIFY_IMAGES_PLUGIN, ImageMagicProperties.CLASSIFY_IMAGES_FUZZ_PERCNT); String maxVal = batchClassPPService.getPropertyValue(batchClassId, ImageMagicKConstants.CLASSIFY_IMAGES_PLUGIN, ImageMagicProperties.CLASSIFY_IMAGES_MAX_RESULTS); imService.classifyImagesAPI(maxVal, imMetric, imFuzz, batchId, batchClassId, workingDir, listOfPages); // invoke the document assembler plugin List<Document> doc = docAssembler.createDocumentAPI( DocumentClassificationFactory.IMAGE, batchClassId, listOfPages); Documents docs = new Documents(); docs.getDocument().addAll(doc); StreamResult result; try { result = new StreamResult(resp.getOutputStream()); batchSchemaDao.getJAXB2Template().getJaxb2Marshaller().marshal(docs, result); } catch (final IOException e) { respStr = "Internal Server error.Please check logs for further details." + e; } } catch (final DCMAApplicationException e) { threadList.remove(); respStr = "Error while executing threadpool. Detailed exception is " + e; } catch (final DCMAException e) { threadList.remove(); respStr = "Error while executing threadpool. Detailed exception is " + e; } } } else { respStr = "Improper input to server. Expected only one tiff file. Returning without processing the results."; } } } catch (final Exception e) { respStr = "Internal Server error.Please check logs for further details." + e; if (!workingDir.isEmpty()) { FileUtils.deleteDirectoryAndContentsRecursive(new File(workingDir).getParentFile()); } } } else { respStr = "Improper input to server. Expected multipart request. Returning without processing the results."; } if (!workingDir.isEmpty()) { FileUtils.deleteDirectoryAndContentsRecursive(new File(workingDir).getParentFile()); } if (!respStr.isEmpty()) { try { resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, respStr); } catch (final IOException ioe) { } } }
From source file:com.ephesoft.dcma.workflow.service.webservices.EphesoftWebServiceAPI.java
@RequestMapping(value = "/classifyHocr", method = RequestMethod.POST) @ResponseBody/*from www . j a va2 s . c o m*/ public void classifyHocr(final HttpServletRequest req, final HttpServletResponse resp) { logger.info("Start processing web service for classifyHocr."); String respStr = WebServiceUtil.EMPTY_STRING; String workingDir = WebServiceUtil.EMPTY_STRING; if (req instanceof DefaultMultipartHttpServletRequest) { try { final String webServiceFolderPath = bsService.getWebServicesFolderPath(); workingDir = WebServiceUtil.createWebServiceWorkingDir(webServiceFolderPath); InputStream instream = null; OutputStream outStream = null; final DefaultMultipartHttpServletRequest multipartReq = (DefaultMultipartHttpServletRequest) req; String batchClassId = WebServiceUtil.EMPTY_STRING; for (final Enumeration<String> params = multipartReq.getParameterNames(); params .hasMoreElements();) { final String paramName = params.nextElement(); if (paramName.equalsIgnoreCase("batchClassId")) { batchClassId = multipartReq.getParameter(paramName); break; } } Map<LuceneProperties, String> batchClassConfigMap = new HashMap<LuceneProperties, String>(); if (batchClassId == null || batchClassId.isEmpty()) { respStr = "Batch Class identifier not specified."; } else { BatchClass bc = bcService.getBatchClassByIdentifier(batchClassId); if (bc == null) { respStr = "Batch class with the specified identifier does not exist."; } else { BatchPlugin searchClassPlugin = batchClassPPService.getPluginProperties(batchClassId, ICommonConstants.SEARCH_CLASSIFICATION_PLUGIN); BatchPlugin docAssemblyPlugin = batchClassPPService.getPluginProperties(batchClassId, DocumentAssemblerConstants.DOCUMENT_ASSEMBLER_PLUGIN); if (searchClassPlugin == null || docAssemblyPlugin == null) { respStr = "Either Search Classification plugin or document assembly plugin does not exist for the specified batch id."; } else if (docAssemblyPlugin .getPluginConfigurations(DocumentAssemblerProperties.DA_RULE_FP_MP_LP) == null || docAssemblyPlugin .getPluginConfigurations(DocumentAssemblerProperties.DA_RULE_FP) == null || docAssemblyPlugin .getPluginConfigurations(DocumentAssemblerProperties.DA_RULE_MP) == null || docAssemblyPlugin .getPluginConfigurations(DocumentAssemblerProperties.DA_RULE_LP) == null || docAssemblyPlugin .getPluginConfigurations(DocumentAssemblerProperties.DA_RULE_FP_LP) == null || docAssemblyPlugin .getPluginConfigurations(DocumentAssemblerProperties.DA_RULE_FP_MP) == null || docAssemblyPlugin.getPluginConfigurations( DocumentAssemblerProperties.DA_RULE_MP_LP) == null) { respStr = "Incomplete properties of the Document assembler plugin for the specified batch id."; } else if (searchClassPlugin .getPluginConfigurations(LuceneProperties.LUCENE_VALID_EXTNS) == null || searchClassPlugin .getPluginConfigurations(LuceneProperties.LUCENE_INDEX_FIELDS) == null || searchClassPlugin .getPluginConfigurations(LuceneProperties.LUCENE_STOP_WORDS) == null || searchClassPlugin .getPluginConfigurations(LuceneProperties.LUCENE_MIN_TERM_FREQ) == null || searchClassPlugin .getPluginConfigurations(LuceneProperties.LUCENE_MIN_DOC_FREQ) == null || searchClassPlugin .getPluginConfigurations(LuceneProperties.LUCENE_MIN_WORD_LENGTH) == null || searchClassPlugin .getPluginConfigurations(LuceneProperties.LUCENE_MAX_QUERY_TERMS) == null || searchClassPlugin .getPluginConfigurations(LuceneProperties.LUCENE_TOP_LEVEL_FIELD) == null || searchClassPlugin .getPluginConfigurations(LuceneProperties.LUCENE_NO_OF_PAGES) == null || searchClassPlugin .getPluginConfigurations(LuceneProperties.LUCENE_MAX_RESULT_COUNT) == null || searchClassPlugin.getPluginConfigurations( LuceneProperties.LUCENE_FIRST_PAGE_CONF_WEIGHTAGE) == null || searchClassPlugin.getPluginConfigurations( LuceneProperties.LUCENE_MIDDLE_PAGE_CONF_WEIGHTAGE) == null || searchClassPlugin.getPluginConfigurations( LuceneProperties.LUCENE_LAST_PAGE_CONF_WEIGHTAGE) == null) { respStr = "Incomplete properties of the Search Classification plugin for the specified batch id."; } } } if (respStr.isEmpty()) { batchClassConfigMap.put(LuceneProperties.LUCENE_VALID_EXTNS, batchClassPPService.getPropertyValue(batchClassId, ICommonConstants.SEARCH_CLASSIFICATION_PLUGIN, LuceneProperties.LUCENE_VALID_EXTNS)); batchClassConfigMap.put(LuceneProperties.LUCENE_INDEX_FIELDS, batchClassPPService.getPropertyValue(batchClassId, ICommonConstants.SEARCH_CLASSIFICATION_PLUGIN, LuceneProperties.LUCENE_INDEX_FIELDS)); batchClassConfigMap.put(LuceneProperties.LUCENE_STOP_WORDS, batchClassPPService.getPropertyValue(batchClassId, ICommonConstants.SEARCH_CLASSIFICATION_PLUGIN, LuceneProperties.LUCENE_STOP_WORDS)); batchClassConfigMap.put(LuceneProperties.LUCENE_MIN_TERM_FREQ, batchClassPPService.getPropertyValue(batchClassId, ICommonConstants.SEARCH_CLASSIFICATION_PLUGIN, LuceneProperties.LUCENE_MIN_TERM_FREQ)); batchClassConfigMap.put(LuceneProperties.LUCENE_MIN_DOC_FREQ, batchClassPPService.getPropertyValue(batchClassId, ICommonConstants.SEARCH_CLASSIFICATION_PLUGIN, LuceneProperties.LUCENE_MIN_DOC_FREQ)); batchClassConfigMap.put(LuceneProperties.LUCENE_MIN_WORD_LENGTH, batchClassPPService.getPropertyValue(batchClassId, ICommonConstants.SEARCH_CLASSIFICATION_PLUGIN, LuceneProperties.LUCENE_MIN_WORD_LENGTH)); batchClassConfigMap.put(LuceneProperties.LUCENE_MAX_QUERY_TERMS, batchClassPPService.getPropertyValue(batchClassId, ICommonConstants.SEARCH_CLASSIFICATION_PLUGIN, LuceneProperties.LUCENE_MAX_QUERY_TERMS)); batchClassConfigMap.put(LuceneProperties.LUCENE_TOP_LEVEL_FIELD, batchClassPPService.getPropertyValue(batchClassId, ICommonConstants.SEARCH_CLASSIFICATION_PLUGIN, LuceneProperties.LUCENE_TOP_LEVEL_FIELD)); batchClassConfigMap.put(LuceneProperties.LUCENE_NO_OF_PAGES, batchClassPPService.getPropertyValue(batchClassId, ICommonConstants.SEARCH_CLASSIFICATION_PLUGIN, LuceneProperties.LUCENE_NO_OF_PAGES)); batchClassConfigMap.put(LuceneProperties.LUCENE_MAX_RESULT_COUNT, batchClassPPService.getPropertyValue(batchClassId, ICommonConstants.SEARCH_CLASSIFICATION_PLUGIN, LuceneProperties.LUCENE_MAX_RESULT_COUNT)); batchClassConfigMap.put(LuceneProperties.LUCENE_FIRST_PAGE_CONF_WEIGHTAGE, batchClassPPService.getPropertyValue(batchClassId, ICommonConstants.SEARCH_CLASSIFICATION_PLUGIN, LuceneProperties.LUCENE_FIRST_PAGE_CONF_WEIGHTAGE)); batchClassConfigMap.put(LuceneProperties.LUCENE_MIDDLE_PAGE_CONF_WEIGHTAGE, batchClassPPService.getPropertyValue(batchClassId, ICommonConstants.SEARCH_CLASSIFICATION_PLUGIN, LuceneProperties.LUCENE_MIDDLE_PAGE_CONF_WEIGHTAGE)); batchClassConfigMap.put(LuceneProperties.LUCENE_LAST_PAGE_CONF_WEIGHTAGE, batchClassPPService.getPropertyValue(batchClassId, ICommonConstants.SEARCH_CLASSIFICATION_PLUGIN, LuceneProperties.LUCENE_LAST_PAGE_CONF_WEIGHTAGE)); final MultiValueMap<String, MultipartFile> fileMap = multipartReq.getMultiFileMap(); if (fileMap.size() == 1) { String hocrFileName = ""; for (final String fileName : fileMap.keySet()) { // only single html file is expected as input if (fileName.toLowerCase().indexOf(FileType.HTML.getExtension()) > -1) { // only HTML file is expected hocrFileName = fileName; final MultipartFile f = multipartReq.getFile(fileName); instream = f.getInputStream(); final File file = new File(workingDir + File.separator + fileName); outStream = new FileOutputStream(file); final byte buf[] = new byte[1024]; int len; while ((len = instream.read(buf)) > 0) { outStream.write(buf, 0, len); } if (instream != null) { instream.close(); } if (outStream != null) { outStream.close(); } break; } else { respStr = "Improper input to server. Expected only one html file. Returning without processing the results."; } } if (respStr.isEmpty()) { ObjectFactory objectFactory = new ObjectFactory(); Pages pages = new Pages(); List<Page> listOfPages = pages.getPage(); List<Document> xmlDocuments = new ArrayList<Document>(); Document doc = objectFactory.createDocument(); xmlDocuments.add(doc); doc.setPages(pages); String fileName = workingDir + File.separator + hocrFileName; // generate hocr file from html file. HocrPages hocrPages = new HocrPages(); List<HocrPage> hocrPageList = hocrPages.getHocrPage(); HocrPage hocrPage = new HocrPage(); String pageID = "PG0"; hocrPage.setPageID(pageID); hocrPageList.add(hocrPage); bsService.hocrGenerationAPI(workingDir, "PG0", fileName, hocrPage); Page pageType = objectFactory.createPage(); pageType.setIdentifier(EphesoftProperty.PAGE.getProperty() + "0"); pageType.setHocrFileName(hocrFileName); listOfPages.add(pageType); scService.generateConfidenceScoreAPI(xmlDocuments, hocrPages, workingDir, batchClassConfigMap, batchClassId); try { // invoke the document assembler plugin xmlDocuments = docAssembler.createDocumentAPI( DocumentClassificationFactory.SEARCHCLASSIFICATION, batchClassId, listOfPages); Documents docs = new Documents(); docs.getDocument().addAll(xmlDocuments); StreamResult result; try { result = new StreamResult(resp.getOutputStream()); resp.setStatus(HttpServletResponse.SC_OK); batchSchemaDao.getJAXB2Template().getJaxb2Marshaller().marshal(docs, result); } catch (final IOException e) { respStr = "Internal Server error.Please check logs for further details." + e; } } catch (final DCMAApplicationException e) { respStr = "Error while executing plugin. Detailed exception is " + e; } } } else { respStr = "Improper input to server. Expected only one html file. Returning without processing the results."; } } } catch (final XmlMappingException xmle) { respStr = "Error in mapping input XML in the desired format. Please send it in the specified format. Detailed exception is " + xmle; } catch (final DCMAException dcmae) { respStr = "Error in processing request. Detailed exception is " + dcmae; } catch (final Exception e) { respStr = "Internal Server error.Please check logs for further details." + e; if (!workingDir.isEmpty()) { FileUtils.deleteDirectoryAndContentsRecursive(new File(workingDir).getParentFile()); } } } else { respStr = "Improper input to server. Expected multipart request. Returing without processing the results."; } if (!workingDir.isEmpty()) { FileUtils.deleteDirectoryAndContentsRecursive(new File(workingDir).getParentFile()); } if (!respStr.isEmpty()) { try { resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, respStr); } catch (final IOException ioe) { } } }
From source file:com.ephesoft.dcma.workflow.service.webservices.EphesoftWebServiceAPI.java
@RequestMapping(value = "/splitMultipageFile", method = RequestMethod.POST) @ResponseBody// w w w.j a v a 2s .c om public void splitMultipageFile(final HttpServletRequest req, final HttpServletResponse resp) { String respStr = WebServiceUtil.EMPTY_STRING; String workingDir = WebServiceUtil.EMPTY_STRING; try { if (req instanceof DefaultMultipartHttpServletRequest) { logger.info("Start spliting multipage file"); final String webServiceFolderPath = bsService.getWebServicesFolderPath(); workingDir = WebServiceUtil.createWebServiceWorkingDir(webServiceFolderPath); final String outputDir = WebServiceUtil.createWebServiceOutputDir(workingDir); InputStream instream = null; OutputStream outStream = null; final DefaultMultipartHttpServletRequest multiPartRequest = (DefaultMultipartHttpServletRequest) req; final BatchInstanceThread threadList = new BatchInstanceThread( new File(workingDir).getName() + Math.random()); String inputParams = WebServiceUtil.EMPTY_STRING, outputParams = WebServiceUtil.EMPTY_STRING; boolean isGSTool = false; for (final Enumeration<String> params = multiPartRequest.getParameterNames(); params .hasMoreElements();) { final String paramName = params.nextElement(); if (paramName.equalsIgnoreCase("isGhostscript")) { isGSTool = Boolean.parseBoolean(multiPartRequest.getParameter(paramName)); logger.info("Value for isGhostscript parameter is " + isGSTool); continue; } if (paramName.equalsIgnoreCase("inputParams")) { inputParams = multiPartRequest.getParameter(paramName); logger.info("Value for inputParams parameter is " + inputParams); continue; } if (paramName.equalsIgnoreCase("outputParams")) { outputParams = multiPartRequest.getParameter(paramName); logger.info("Value for outputParams parameter is " + outputParams); continue; } } final MultiValueMap<String, MultipartFile> fileMap = multiPartRequest.getMultiFileMap(); // perform validation on input fields String results = WebServiceUtil.validateSplitAPI(fileMap, isGSTool, outputParams, inputParams); if (!results.isEmpty()) { respStr = results; } else { for (final String fileName : fileMap.keySet()) { if (fileName.toLowerCase().indexOf(FileType.PDF.getExtension()) > -1 || fileName.toLowerCase().indexOf(FileType.TIF.getExtension()) > -1 || fileName.toLowerCase().indexOf(FileType.TIFF.getExtension()) > -1) { // only tiffs and RSP file is expected if (isGSTool && (fileName.toLowerCase().indexOf(FileType.TIF.getExtension()) > -1 || fileName.toLowerCase().indexOf(FileType.TIFF.getExtension()) > -1)) { respStr = "Only PDF files expected with GhostScript tool."; break; } final MultipartFile multipartFile = multiPartRequest.getFile(fileName); instream = multipartFile.getInputStream(); final File file = new File(workingDir + File.separator + fileName); outStream = new FileOutputStream(file); final byte[] buf = new byte[WebServiceUtil.bufferSize]; int len; while ((len = instream.read(buf)) > 0) { outStream.write(buf, 0, len); } if (instream != null) { instream.close(); } if (outStream != null) { outStream.close(); } } else { respStr = "Files other than tiff, tif and pdf formats are provided."; break; } } if (respStr.isEmpty()) { for (final String fileName : fileMap.keySet()) { final File file = new File(workingDir + File.separator + fileName); if (isGSTool) { logger.info( "Start spliting multipage file using ghost script for file :" + fileName); imService.convertPdfToSinglePageTiffsUsingGSAPI(inputParams, file, outputParams, new File(outputDir + File.separator + fileName), threadList); } else { logger.info( "Start spliting multipage file using image magick for file :" + fileName); imService.convertPdfOrMultiPageTiffToTiffUsingIM(inputParams, file, outputParams, new File(outputDir + File.separator + fileName), threadList); } } try { logger.info("Executing batch instance thread using thread pool"); threadList.execute(); } catch (final DCMAApplicationException e) { threadList.remove(); FileUtils.deleteDirectoryAndContentsRecursive(new File(workingDir).getParentFile()); throw new Exception(e.getMessage(), e); } ServletOutputStream out = null; ZipOutputStream zout = null; final String zipFileName = WebServiceUtil.serverOutputFolderName; resp.setContentType("application/x-zip\r\n"); resp.setHeader("Content-Disposition", "attachment; filename=\"" + zipFileName + FileType.ZIP.getExtensionWithDot() + "\"\r\n"); try { out = resp.getOutputStream(); zout = new ZipOutputStream(out); FileUtils.zipDirectory(outputDir, zout, zipFileName); resp.setStatus(HttpServletResponse.SC_OK); } catch (final IOException e) { respStr = "Unable to process web service request.Please check you ghostscipt or imagemagick configuration."; } finally { if (zout != null) { zout.close(); } if (out != null) { out.flush(); } FileUtils.deleteDirectoryAndContentsRecursive(new File(workingDir).getParentFile()); } } } } else { respStr = "Improper input to server. Expected multipart request. Returning without processing the results."; } } catch (Exception e) { respStr = "Internal Server error.Please check logs for further details." + e; } if (!workingDir.isEmpty()) { FileUtils.deleteDirectoryAndContentsRecursive(new File(workingDir).getParentFile()); } if (!respStr.isEmpty()) { try { resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, respStr); } catch (final IOException ioe) { } } }
From source file:com.ephesoft.dcma.workflow.service.webservices.EphesoftWebServiceAPI.java
@RequestMapping(value = "/extractFuzzyDB", method = RequestMethod.POST) @ResponseBody/* ww w . j a va2s .co m*/ public void extractFuzzyDB(final HttpServletRequest req, final HttpServletResponse resp) { logger.info("Start processing web service for extract fuzzy DB for given HOCR file"); String respStr = ""; String workingDir = ""; Documents documents = null; if (req instanceof DefaultMultipartHttpServletRequest) { try { final String webServiceFolderPath = bsService.getWebServicesFolderPath(); workingDir = WebServiceUtil.createWebServiceWorkingDir(webServiceFolderPath); final String outputDir = WebServiceUtil.createWebServiceOutputDir(workingDir); InputStream instream = null; OutputStream outStream = null; final DefaultMultipartHttpServletRequest multiPartRequest = (DefaultMultipartHttpServletRequest) req; final MultiValueMap<String, MultipartFile> fileMap = multiPartRequest.getMultiFileMap(); String htmlFile = WebServiceUtil.EMPTY_STRING; if (fileMap.size() == 1) { for (final String fileName : fileMap.keySet()) { if (fileName.endsWith(FileType.HTML.getExtensionWithDot())) { htmlFile = fileName; } else { respStr = "Invalid file. Please passed the valid html file"; break; } final MultipartFile multiPartFile = multiPartRequest.getFile(fileName); instream = multiPartFile.getInputStream(); final File file = new File(workingDir + File.separator + fileName); outStream = new FileOutputStream(file); final byte[] buf = new byte[WebServiceUtil.bufferSize]; int len; while ((len = instream.read(buf)) > 0) { outStream.write(buf, 0, len); } if (instream != null) { instream.close(); } if (outStream != null) { outStream.close(); } } } else { respStr = "Invalid number of files. We are supposed only one file"; } String batchClassIdentifier = WebServiceUtil.EMPTY_STRING; String documentType = WebServiceUtil.EMPTY_STRING; String hocrFileName = WebServiceUtil.EMPTY_STRING; for (final Enumeration<String> params = multiPartRequest.getParameterNames(); params .hasMoreElements();) { final String paramName = params.nextElement(); if (paramName.equalsIgnoreCase("documentType")) { documentType = multiPartRequest.getParameter(paramName); logger.info("Value for documentType parameter is " + documentType); continue; } if (paramName.equalsIgnoreCase("batchClassIdentifier")) { batchClassIdentifier = multiPartRequest.getParameter(paramName); logger.info("Value for batchClassIdentifier parameter is " + batchClassIdentifier); continue; } if (paramName.equalsIgnoreCase("hocrFile")) { hocrFileName = multiPartRequest.getParameter(paramName); logger.info("Value for hocrFile parameter is " + hocrFileName); continue; } } if (!hocrFileName.equalsIgnoreCase(htmlFile)) { respStr = "Please passed the valid hocr File"; } String results = WebServiceUtil.validateExtractFuzzyDBAPI(workingDir, hocrFileName, batchClassIdentifier, documentType); BatchClass batchClass = bcService.getBatchClassByIdentifier(batchClassIdentifier); if (batchClass == null) { respStr = "Please enter valid batch class identifier"; } else { BatchPlugin fuzzyDBPlugin = batchClassPPService.getPluginProperties(batchClassIdentifier, "FUZZYDB"); if (fuzzyDBPlugin == null) { respStr = "Fuzzy DB plugin is not configured for batch class : " + batchClassIdentifier + " . Please select proper batch class"; } else if (fuzzyDBPlugin.getPluginConfigurations(FuzzyDBProperties.FUZZYDB_STOP_WORDS) == null || fuzzyDBPlugin .getPluginConfigurations(FuzzyDBProperties.FUZZYDB_MIN_TERM_FREQ) == null || fuzzyDBPlugin .getPluginConfigurations(FuzzyDBProperties.FUZZYDB_MIN_WORD_LENGTH) == null || fuzzyDBPlugin .getPluginConfigurations(FuzzyDBProperties.FUZZYDB_MAX_QUERY_TERMS) == null || fuzzyDBPlugin.getPluginConfigurations(FuzzyDBProperties.FUZZYDB_NO_OF_PAGES) == null || fuzzyDBPlugin.getPluginConfigurations(FuzzyDBProperties.FUZZYDB_DB_DRIVER) == null || fuzzyDBPlugin .getPluginConfigurations(FuzzyDBProperties.FUZZYDB_CONNECTION_URL) == null || fuzzyDBPlugin.getPluginConfigurations(FuzzyDBProperties.FUZZYDB_DB_USER_NAME) == null || fuzzyDBPlugin.getPluginConfigurations(FuzzyDBProperties.FUZZYDB_DB_PASSWORD) == null || fuzzyDBPlugin .getPluginConfigurations(FuzzyDBProperties.FUZZYDB_THRESHOLD_VALUE) == null) { respStr = "Incomplete properties of the Fuzzy DB plugin for the specified batch class id."; } } List<com.ephesoft.dcma.da.domain.FieldType> allFdTypes = fieldTypeService .getFdTypeByDocTypeNameForBatchClass(documentType, batchClassIdentifier); if (allFdTypes == null) { respStr = "Please enter valid document type"; } if (!results.isEmpty()) { respStr = results; } else { try { HocrPages hocrPages = new HocrPages(); List<HocrPage> hocrPageList = hocrPages.getHocrPage(); HocrPage hocrPage = new HocrPage(); String pageID = "PG0"; hocrPage.setPageID(pageID); hocrPageList.add(hocrPage); bsService.hocrGenerationAPI(workingDir, pageID, workingDir + File.separator + hocrFileName, hocrPage); documents = fuzzyDBSearchService.extractDataBaseFields(batchClassIdentifier, documentType, hocrPages); } catch (final DCMAException e) { respStr = "Exception while extracting field using fuzzy db" + e; } } if (documents != null) { File outputxmlFile = new File(outputDir + File.separator + "OutputXML.xml"); FileOutputStream stream = new FileOutputStream(outputxmlFile); StreamResult result = new StreamResult(stream); batchSchemaDao.getJAXB2Template().getJaxb2Marshaller().marshal(documents, result); ServletOutputStream out = null; ZipOutputStream zout = null; final String zipFileName = WebServiceUtil.serverOutputFolderName; resp.setContentType("application/x-zip\r\n"); resp.setHeader("Content-Disposition", "attachment; filename=\"" + zipFileName + FileType.ZIP.getExtensionWithDot() + "\"\r\n"); try { out = resp.getOutputStream(); zout = new ZipOutputStream(out); FileUtils.zipDirectory(outputDir, zout, zipFileName); resp.setStatus(HttpServletResponse.SC_OK); } catch (final IOException e) { resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, "Error in creating output zip file.Please try again." + e.getMessage()); } finally { if (zout != null) { zout.close(); } if (out != null) { out.flush(); } FileUtils.deleteDirectoryAndContentsRecursive(new File(workingDir).getParentFile()); } } } catch (final XmlMappingException xmle) { respStr = "Error in mapping input XML in the desired format. Please send it in the specified format. Detailed exception is " + xmle; } catch (final DCMAException dcmae) { respStr = "Error in processing request. Detailed exception is " + dcmae; } catch (final Exception e) { respStr = "Internal Server error.Please check logs for further details." + e; if (!workingDir.isEmpty()) { FileUtils.deleteDirectoryAndContentsRecursive(new File(workingDir).getParentFile()); } } } else { respStr = "Improper input to server. Expected multipart request. Returning without processing the results."; } if (!respStr.isEmpty()) { try { FileUtils.deleteDirectoryAndContentsRecursive(new File(workingDir).getParentFile()); resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, respStr); } catch (final IOException ioe) { } } }
From source file:com.ephesoft.dcma.workflow.service.webservices.EphesoftWebServiceAPI.java
@RequestMapping(value = "/convertTiffToPdf", method = RequestMethod.POST) @ResponseBody//from w w w .ja v a 2s . co m public void convertTiffToPdf(final HttpServletRequest req, final HttpServletResponse resp) { logger.info("Start processing web service for extract fuzzy DB for given HOCR file"); String respStr = ""; String workingDir = ""; if (req instanceof DefaultMultipartHttpServletRequest) { try { final String webServiceFolderPath = bsService.getWebServicesFolderPath(); workingDir = WebServiceUtil.createWebServiceWorkingDir(webServiceFolderPath); final String outputDir = WebServiceUtil.createWebServiceOutputDir(workingDir); InputStream instream = null; OutputStream outStream = null; final DefaultMultipartHttpServletRequest multiPartRequest = (DefaultMultipartHttpServletRequest) req; final MultiValueMap<String, MultipartFile> fileMap = multiPartRequest.getMultiFileMap(); if (!fileMap.keySet().isEmpty()) { for (final String fileName : fileMap.keySet()) { if (fileName.endsWith(FileType.TIF.getExtensionWithDot()) || fileName.endsWith(FileType.TIFF.getExtensionWithDot())) { } else { respStr = "Invalid file. Please passed the valid tif/tiff file"; break; } final MultipartFile multiPartFile = multiPartRequest.getFile(fileName); instream = multiPartFile.getInputStream(); final File file = new File(workingDir + File.separator + fileName); outStream = new FileOutputStream(file); final byte[] buf = new byte[WebServiceUtil.bufferSize]; int len; while ((len = instream.read(buf)) > 0) { outStream.write(buf, 0, len); } if (instream != null) { instream.close(); } if (outStream != null) { outStream.close(); } } } else { respStr = "Please passed the input files for processing"; } if (respStr.isEmpty()) { String inputParams = WebServiceUtil.EMPTY_STRING; String outputParams = WebServiceUtil.EMPTY_STRING; String pdfGeneratorEngine = WebServiceUtil.EMPTY_STRING; for (final Enumeration<String> params = multiPartRequest.getParameterNames(); params .hasMoreElements();) { final String paramName = params.nextElement(); if (paramName.equalsIgnoreCase("inputParams")) { inputParams = multiPartRequest.getParameter(paramName); logger.info("Value for batchClassIdentifier parameter is " + inputParams); continue; } if (paramName.equalsIgnoreCase("outputParams")) { outputParams = multiPartRequest.getParameter(paramName); logger.info("Value for hocrFile parameter is " + outputParams); continue; } if (paramName.equalsIgnoreCase("pdfGeneratorEngine")) { pdfGeneratorEngine = multiPartRequest.getParameter(paramName); logger.info("Value for hocrFile parameter is " + pdfGeneratorEngine); continue; } } respStr = WebServiceUtil.validateConvertTiffToPdfAPI(pdfGeneratorEngine, inputParams, outputParams); if (respStr.isEmpty()) { Set<String> outputFileList = new HashSet<String>(); File file = new File(workingDir); String[] fileList = file.list(new CustomFileFilter(false, FileType.TIF.getExtensionWithDot(), FileType.TIFF.getExtensionWithDot())); BatchInstanceThread batchInstanceThread = new BatchInstanceThread(workingDir); for (String inputFile : fileList) { String[] fileArray = new String[2]; String outputFile = inputFile.substring(0, inputFile.lastIndexOf(WebServiceUtil.DOT)) + FileType.PDF.getExtensionWithDot(); fileArray[0] = workingDir + File.separator + inputFile; fileArray[1] = workingDir + File.separator + outputFile; outputFileList.add(outputFile); imService.createTifToPDF(pdfGeneratorEngine, fileArray, batchInstanceThread, inputParams, outputParams); } batchInstanceThread.execute(); for (String outputFile : outputFileList) { FileUtils.copyFile(new File(workingDir + File.separator + outputFile), new File(outputDir + File.separator + outputFile)); } ServletOutputStream out = null; ZipOutputStream zout = null; final String zipFileName = WebServiceUtil.serverOutputFolderName; resp.setContentType("application/x-zip\r\n"); resp.setHeader("Content-Disposition", "attachment; filename=\"" + zipFileName + FileType.ZIP.getExtensionWithDot() + "\"\r\n"); try { out = resp.getOutputStream(); zout = new ZipOutputStream(out); FileUtils.zipDirectory(outputDir, zout, zipFileName); resp.setStatus(HttpServletResponse.SC_OK); } catch (final IOException e) { resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, "Error in creating output zip file.Please try again." + e.getMessage()); } finally { if (zout != null) { zout.close(); } if (out != null) { out.flush(); } FileUtils.deleteDirectoryAndContentsRecursive(new File(workingDir).getParentFile()); } } } } catch (final XmlMappingException xmle) { respStr = "Error in mapping input XML in the desired format. Please send it in the specified format. Detailed exception is " + xmle; } catch (final DCMAException dcmae) { respStr = "Error in processing request. Detailed exception is " + dcmae; } catch (final Exception e) { respStr = "Internal Server error.Please check logs for further details." + e; if (!workingDir.isEmpty()) { FileUtils.deleteDirectoryAndContentsRecursive(new File(workingDir).getParentFile()); } } } else { respStr = "Improper input to server. Expected multipart request. Returning without processing the results."; } if (!respStr.isEmpty()) { try { FileUtils.deleteDirectoryAndContentsRecursive(new File(workingDir).getParentFile()); resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, respStr); } catch (final IOException ioe) { } } }