List of usage examples for org.apache.commons.httpclient DefaultHttpMethodRetryHandler DefaultHttpMethodRetryHandler
public DefaultHttpMethodRetryHandler(int paramInt, boolean paramBoolean)
From source file:com.kaltura.client.KalturaClientBase.java
private PostMethod createPostMethod(KalturaParams kparams, KalturaFiles kfiles, String url) { PostMethod method = new PostMethod(url); method.setRequestHeader("Accept", "text/xml,application/xml,*/*"); method.setRequestHeader("Accept-Charset", "utf-8,ISO-8859-1;q=0.7,*;q=0.5"); if (!kfiles.isEmpty()) { method = this.getPostMultiPartWithFiles(method, kparams, kfiles); } else {/* w ww . j av a 2s . com*/ method = this.addParams(method, kparams); } if (isAcceptGzipEncoding()) { method.addRequestHeader(HTTP_HEADER_ACCEPT_ENCODING, ENCODING_GZIP); } // Provide custom retry handler is necessary method.getParams().setParameter(HttpMethodParams.RETRY_HANDLER, new DefaultHttpMethodRetryHandler(3, false)); return method; }
From source file:com.borhan.client.BorhanClientBase.java
private PostMethod createPostMethod(BorhanParams kparams, BorhanFiles kfiles, String url) throws UnsupportedEncodingException { PostMethod method = new PostMethod(url); method.setRequestHeader("Accept", "text/xml,application/xml,*/*"); method.setRequestHeader("Accept-Charset", "utf-8,ISO-8859-1;q=0.7,*;q=0.5"); if (!kfiles.isEmpty()) { method = this.getPostMultiPartWithFiles(method, kparams, kfiles); } else {/*w ww.jav a 2s.c o m*/ method = this.addParams(method, kparams); } if (isAcceptGzipEncoding()) { method.addRequestHeader(HTTP_HEADER_ACCEPT_ENCODING, ENCODING_GZIP); } // Provide custom retry handler is necessary method.getParams().setParameter(HttpMethodParams.RETRY_HANDLER, new DefaultHttpMethodRetryHandler(3, false)); return method; }
From source file:de.ingrid.portal.interfaces.impl.WMSInterfaceImpl.java
public String getWMCDocument(String sessionId) { String response = null;/*from ww w . ja va 2 s .c o m*/ try { String urlStr = config.getString("interface_url", "http://localhost/mapbender/php/mod_portalCommunication_gt.php"); if (urlStr.indexOf("?") > 0) { urlStr = urlStr.concat("&PREQUEST=getWMC").concat("&PHPSESSID=" + sessionId); } else { urlStr = urlStr.concat("?PREQUEST=getWMC").concat("&PHPSESSID=" + sessionId); } if (log.isDebugEnabled()) { log.debug("MapBender Server Request: " + urlStr); } // Create an instance of HttpClient. HttpClient client = new HttpClient(); // Create a method instance. GetMethod method = new GetMethod(urlStr); // Provide custom retry handler is necessary method.getParams().setParameter(HttpMethodParams.RETRY_HANDLER, new DefaultHttpMethodRetryHandler(2, false)); try { // Execute the method. int statusCode = client.executeMethod(method); if (statusCode != HttpStatus.SC_OK) { log.error("Requesting WMC faild for URL: " + urlStr); } // Read the response body. byte[] responseBody = method.getResponseBody(); response = new String(responseBody); } catch (HttpException e) { log.error("Fatal protocol violation: " + e.getMessage(), e); } catch (IOException e) { log.error("Fatal transport error: " + e.getMessage(), e); } finally { // Release the connection. method.releaseConnection(); } if (log.isDebugEnabled()) { log.debug("MapBender Server Response: " + response); } Document document = DocumentHelper.parseText(response); // check for valid server response String error = document.valueOf("//portal_communication/error"); if (error != null && error.length() > 0) { throw new Exception("MapBender Server Error: " + error); } String sessionMapBender = document.valueOf("//portal_communication/session"); if (sessionMapBender != null && !sessionMapBender.equals(sessionId)) { throw new Exception( "MapBender Server Error: session Id (" + sessionId + ") from request and session Id (" + sessionMapBender + ") from MapBender are not the same."); } String urlEncodedWmc = document.valueOf("//portal_communication/wmc"); return urlEncodedWmc; } catch (Exception e) { log.error(e.toString()); } return null; }
From source file:eu.eco2clouds.scheduler.accounting.client.AccountingClientHC.java
private String putMethod(String url, String payload, String bonfireUserId, String bonfireGroupId, Boolean exception) {//from w ww . jav a2 s.co m // Create an instance of HttpClient. HttpClient client = getHttpClient(); logger.debug("Connecting to: " + url); // Create a method instance. PutMethod method = new PutMethod(url); setHeaders(method, bonfireGroupId, bonfireUserId); //method.addRequestHeader("Content-Type", SchedulerDictionary.CONTENT_TYPE_ECO2CLOUDS_XML); // Provide custom retry handler is necessary method.getParams().setParameter(HttpMethodParams.RETRY_HANDLER, new DefaultHttpMethodRetryHandler(3, false)); String response = ""; try { // We set the payload StringRequestEntity payloadEntity = new StringRequestEntity(payload, SchedulerDictionary.CONTENT_TYPE_ECO2CLOUDS_XML, "UTF-8"); method.setRequestEntity(payloadEntity); // Execute the method. int statusCode = client.executeMethod(method); if (statusCode != HttpStatus.SC_OK) { //TODO test for this case... logger.warn("Get host information of testbeds: " + url + " failed: " + method.getStatusLine()); } else { // Read the response body. byte[] responseBody = method.getResponseBody(); response = new String(responseBody); } } catch (HttpException e) { logger.warn("Fatal protocol violation: " + e.getMessage()); e.printStackTrace(); exception = true; } catch (IOException e) { logger.warn("Fatal transport error: " + e.getMessage()); e.printStackTrace(); exception = true; } finally { // Release the connection. method.releaseConnection(); } return response; }
From source file:com.seer.datacruncher.services.ServiceScheduledJob.java
@Override protected synchronized void executeInternal(JobExecutionContext arg0) throws JobExecutionException { long jobId = arg0.getJobDetail().getJobDataMap().getLong("jobId"); JobsEntity jobEntity = jobsDao.find(jobId); if (!jobEntity.isWorking()) { if (jobsDao.setWorkStatus(jobId, true)) { try { long eventTriggerId = arg0.getJobDetail().getJobDataMap().getString("eventTriggerId") == null ? -1l// w w w . jav a 2 s .c o m : Long.parseLong(arg0.getJobDetail().getJobDataMap().getString("eventTriggerId")); if (eventTriggerId > 0) { EventTriggerEntity entity = eventTriggerDao.findEventTriggerById(eventTriggerId); String className = entity.getName(); try { String sourceCode = entity.getCode(); EventTrigger eventTrigger; String response; eventTrigger = (EventTrigger) CommonUtils.getClassInstance(className, "com.seer.datacruncher.eventtrigger.EventTrigger", EventTrigger.class, sourceCode); assert eventTrigger != null; response = eventTrigger.trigger(); log.info("Response From EventTrigger(" + className + ") :" + response); } catch (Exception e) { e.printStackTrace(); log.error("EventTrigger(" + className + ") :" + e.getMessage(), e); logDao.setErrorLogMessage("EventTrigger(" + className + ") :" + e.getMessage()); } catch (NoClassDefFoundError err) { log.error("EventTrigger(" + className + ") :" + err.getMessage(), err); logDao.setErrorLogMessage("EventTrigger(" + className + ") :" + err.getMessage()); } return; } int day = arg0.getJobDetail().getJobDataMap().getString("day") == null ? -1 : Integer.parseInt(arg0.getJobDetail().getJobDataMap().getString("day")); int month = arg0.getJobDetail().getJobDataMap().getString("month") == null ? -1 : Integer.parseInt(arg0.getJobDetail().getJobDataMap().getString("month")); if ((day > 0 && day != Calendar.getInstance().get(Calendar.DAY_OF_MONTH)) || (month > 0 && month != (Calendar.getInstance().get(Calendar.MONTH) + 1))) { return; } StandardFileSystemManager fsManager = new StandardFileSystemManager(); boolean isDataStream = true; try { fsManager.init(); long schemaId = arg0.getJobDetail().getJobDataMap().getLong("schemaId"); long schedulerId = arg0.getJobDetail().getJobDataMap().getLong("schedulerId"); //long jobId = arg0.getJobDetail().getJobDataMap().getLong("jobId"); long connectionId = arg0.getJobDetail().getJobDataMap().getLong("connectionId"); String datastream = ""; int idSchemaType = schemasDao.find(schemaId).getIdSchemaType(); TasksEntity taskEntity = tasksDao.find(schedulerId); //JobsEntity jobEntity = jobsDao.find(jobId); if (taskEntity.getIsOneShoot()) { jobEntity.setIsActive(0); jobsDao.update(jobEntity); } if (idSchemaType == SchemaType.GENERATION) { StreamGenerationUtils sgu = new StreamGenerationUtils(); datastream = sgu.getStream(schemaId); log.debug("Content stream: " + schemaId); if (datastream.trim().length() > 0) { log.debug("Datastream to validate: " + datastream); DatastreamsInput datastreamsInput = new DatastreamsInput(); String result = datastreamsInput.datastreamsInput(datastream, schemaId, null); log.debug("Validation result: " + result); } else { isDataStream = false; log.debug("No datastream create"); } } if (connectionId != 0) { int serviceId = Integer .parseInt(arg0.getJobDetail().getJobDataMap().getString("serviceId")); String hostName = arg0.getJobDetail().getJobDataMap().getString("ftpServerIp"); String port = arg0.getJobDetail().getJobDataMap().getString("port"); String userName = arg0.getJobDetail().getJobDataMap().getString("userName"); String password = arg0.getJobDetail().getJobDataMap().getString("password"); String inputDirectory = arg0.getJobDetail().getJobDataMap().getString("inputDirectory"); String fileName = arg0.getJobDetail().getJobDataMap().getString("fileName"); ConnectionsEntity conn; conn = connectionsDao.find(connectionId); if (inputDirectory == null || inputDirectory.trim().length() == 0) { inputDirectory = fileName; } else if (!(conn.getIdConnType() == GenericType.uploadTypeConn && serviceId == Servers.HTTP.getDbCode())) { inputDirectory = inputDirectory + "/" + fileName; } log.info("(jobId:" + jobEntity.getName() + ") - Trying to Server polling at server [" + hostName + ":" + port + "] with user[" + userName + "]."); String url = ""; if (serviceId == Servers.SAMBA.getDbCode()) { if (!fsManager.hasProvider("smb")) { fsManager.addProvider("smb", new SmbFileProvider()); } url = "smb://" + userName + ":" + password + "@" + hostName + ":" + port + "/" + inputDirectory; } else if (serviceId == Servers.HTTP.getDbCode()) { if (!fsManager.hasProvider("http")) { fsManager.addProvider("http", new HttpFileProvider()); } url = "http://" + hostName + ":" + port + "/" + inputDirectory; } else if (serviceId == Servers.FTP.getDbCode()) { if (!fsManager.hasProvider("ftp")) { fsManager.addProvider("ftp", new FtpFileProvider()); } url = "ftp://" + userName + ":" + password + "@" + hostName + ":" + port + "/" + inputDirectory; } log.info("url:" + url); final FileObject fileObject = fsManager.resolveFile(url); if (conn.getIdConnType() == GenericType.DownloadTypeConn) { if (conn.getFileDateTime() != null && conn.getFileDateTime().getTime() == fileObject .getContent().getLastModifiedTime()) { log.info("There is no New or Updated '" + fileName + "' file on server to validate. Returning ..."); return; } else { log.info("There is New or Updated '" + fileName + "' file on server to validate. Validating ..."); ConnectionsEntity connection = connectionsDao.find(connectionId); connection.setFileDateTime( new Date(fileObject.getContent().getLastModifiedTime())); ApplicationContext ctx = AppContext.getApplicationContext(); ConnectionsDao connDao = (ctx.getBean(ConnectionsDao.class)); if (connDao != null) { connDao.update(connection); } Map<String, byte[]> resultMap = new HashMap<String, byte[]>(); byte data[] = new byte[(int) fileObject.getContent().getSize()]; fileObject.getContent().getInputStream().read(data); resultMap.put(fileObject.getName().getBaseName(), data); Set<String> keySet = resultMap.keySet(); Iterator<String> itr = keySet.iterator(); while (itr.hasNext()) { String strFileName = itr.next(); String result = ""; try { Long longSchemaId = schemaId; SchemaEntity schemaEntity = schemasDao.find(longSchemaId); if (schemaEntity == null) { result = "No schema found in database with Id [" + longSchemaId + "]"; log.error(result); logDao.setErrorLogMessage(result); } else { if (strFileName.endsWith(FileExtensionType.ZIP.getAbbreviation())) { // Case 1: When user upload a Zip file - All ZIP entries should be validate one by one ZipInputStream inStream = null; try { inStream = new ZipInputStream( new ByteArrayInputStream(resultMap.get(fileName))); ZipEntry entry; while (!(isStreamClose(inStream)) && (entry = inStream.getNextEntry()) != null) { if (!entry.isDirectory()) { DatastreamsInput datastreamsInput = new DatastreamsInput(); datastreamsInput .setUploadedFileName(entry.getName()); byte[] byteInput = IOUtils.toByteArray(inStream); result += datastreamsInput.datastreamsInput( new String(byteInput), longSchemaId, byteInput); } inStream.closeEntry(); } log.debug(result); } catch (IOException ex) { result = "Error occured during fetch records from ZIP file."; log.error(result); logDao.setErrorLogMessage(result); } finally { if (inStream != null) inStream.close(); } } else { DatastreamsInput datastreamsInput = new DatastreamsInput(); datastreamsInput.setUploadedFileName(strFileName); result = datastreamsInput.datastreamsInput( new String(resultMap.get(strFileName)), longSchemaId, resultMap.get(strFileName)); log.debug(result); } } } catch (Exception ex) { ex.printStackTrace(); result = "Exception occured during process the message for xml file " + strFileName + " Error - " + ex.getMessage(); log.error(result); logDao.setErrorLogMessage(result); } } } } else if (isDataStream && (conn.getIdConnType() == GenericType.uploadTypeConn)) { File uploadFile = File.createTempFile(fileName, ".tmp"); try { BufferedWriter bw = new BufferedWriter(new FileWriter(uploadFile)); bw.write(datastream); bw.flush(); bw.close(); } catch (IOException ioex) { log.error("Datastream file can't be created"); logDao.setErrorLogMessage("Datastream file can't be created"); return; } if (serviceId == Servers.HTTP.getDbCode()) { try { HttpClient httpclient = new HttpClient(); PostMethod method = new PostMethod(url); method.getParams().setParameter(HttpMethodParams.RETRY_HANDLER, new DefaultHttpMethodRetryHandler(3, false)); Part[] parts = new Part[] { new FilePart("file", uploadFile.getName(), uploadFile) }; method.setRequestEntity( new MultipartRequestEntity(parts, method.getParams())); method.setDoAuthentication(true); int statusCode = httpclient.executeMethod(method); String responseBody = new String(method.getResponseBody()); if (statusCode != HttpStatus.SC_OK) { throw new HttpException(method.getStatusLine().toString()); } else { System.out.println(responseBody); } method.releaseConnection(); } catch (Exception ex) { log.error("Exception occurred during uploading of file at HTTP Server: " + ex.getMessage()); logDao.setErrorLogMessage( "Exception occurred during uploading of file at HTTP Server: " + ex.getMessage()); } } else { try { FileObject localFileObject = fsManager .resolveFile(uploadFile.getAbsolutePath()); fileObject.copyFrom(localFileObject, Selectors.SELECT_SELF); System.out.println("File uploaded at : " + new Date()); if (uploadFile.exists()) { uploadFile.delete(); } } catch (Exception ex) { log.error( "Exception occurred during uploading of file: " + ex.getMessage()); logDao.setErrorLogMessage( "Exception occurred during uploading of file: " + ex.getMessage()); } } } } } catch (Exception ex) { log.error("Error " + ": " + ex.getMessage()); } finally { fsManager.close(); } } finally { jobsDao.setWorkStatus(jobId, false); } } else { log.error("Can not set " + jobEntity.getName() + "working."); } } else { log.debug("Job " + jobEntity.getName() + " is working."); } }
From source file:it.openprj.jValidator.services.ServiceScheduledJob.java
@Override protected synchronized void executeInternal(JobExecutionContext arg0) throws JobExecutionException { long jobId = arg0.getJobDetail().getJobDataMap().getLong("jobId"); JobsEntity jobEntity = jobsDao.find(jobId); if (!jobEntity.isWorking()) { if (jobsDao.setWorkStatus(jobId, true)) { try { long eventTriggerId = arg0.getJobDetail().getJobDataMap().getString("eventTriggerId") == null ? -1l/*w ww . j ava2s . c om*/ : Long.parseLong(arg0.getJobDetail().getJobDataMap().getString("eventTriggerId")); if (eventTriggerId > 0) { EventTriggerEntity entity = eventTriggerDao.findEventTriggerById(eventTriggerId); String className = entity.getName(); try { String sourceCode = entity.getCode(); EventTrigger eventTrigger; String response; eventTrigger = (EventTrigger) CommonUtils.getClassInstance(className, "it.openprj.jValidator.eventtrigger.EventTrigger", EventTrigger.class, sourceCode); assert eventTrigger != null; response = eventTrigger.trigger(); log.info("Response From EventTrigger(" + className + ") :" + response); } catch (Exception e) { e.printStackTrace(); log.error("EventTrigger(" + className + ") :" + e.getMessage(), e); logDao.setErrorLogMessage("EventTrigger(" + className + ") :" + e.getMessage()); } catch (NoClassDefFoundError err) { log.error("EventTrigger(" + className + ") :" + err.getMessage(), err); logDao.setErrorLogMessage("EventTrigger(" + className + ") :" + err.getMessage()); } return; } int day = arg0.getJobDetail().getJobDataMap().getString("day") == null ? -1 : Integer.parseInt(arg0.getJobDetail().getJobDataMap().getString("day")); int month = arg0.getJobDetail().getJobDataMap().getString("month") == null ? -1 : Integer.parseInt(arg0.getJobDetail().getJobDataMap().getString("month")); if ((day > 0 && day != Calendar.getInstance().get(Calendar.DAY_OF_MONTH)) || (month > 0 && month != (Calendar.getInstance().get(Calendar.MONTH) + 1))) { return; } StandardFileSystemManager fsManager = new StandardFileSystemManager(); boolean isDataStream = true; try { fsManager.init(); long schemaId = arg0.getJobDetail().getJobDataMap().getLong("schemaId"); long schedulerId = arg0.getJobDetail().getJobDataMap().getLong("schedulerId"); //long jobId = arg0.getJobDetail().getJobDataMap().getLong("jobId"); long connectionId = arg0.getJobDetail().getJobDataMap().getLong("connectionId"); String datastream = ""; int idSchemaType = schemasDao.find(schemaId).getIdSchemaType(); TasksEntity taskEntity = tasksDao.find(schedulerId); //JobsEntity jobEntity = jobsDao.find(jobId); if (taskEntity.getIsOneShoot()) { jobEntity.setIsActive(0); jobsDao.update(jobEntity); } if (idSchemaType == SchemaType.GENERATION) { StreamGenerationUtils sgu = new StreamGenerationUtils(); datastream = sgu.getStream(schemaId); log.debug("Content stream: " + schemaId); if (datastream.trim().length() > 0) { log.debug("Datastream to validate: " + datastream); DatastreamsInput datastreamsInput = new DatastreamsInput(); String result = datastreamsInput.datastreamsInput(datastream, schemaId, null); log.debug("Validation result: " + result); } else { isDataStream = false; log.debug("No datastream create"); } } if (connectionId != 0) { int serviceId = Integer .parseInt(arg0.getJobDetail().getJobDataMap().getString("serviceId")); String hostName = arg0.getJobDetail().getJobDataMap().getString("ftpServerIp"); String port = arg0.getJobDetail().getJobDataMap().getString("port"); String userName = arg0.getJobDetail().getJobDataMap().getString("userName"); String password = arg0.getJobDetail().getJobDataMap().getString("password"); String inputDirectory = arg0.getJobDetail().getJobDataMap().getString("inputDirectory"); String fileName = arg0.getJobDetail().getJobDataMap().getString("fileName"); ConnectionsEntity conn; conn = connectionsDao.find(connectionId); if (inputDirectory == null || inputDirectory.trim().length() == 0) { inputDirectory = fileName; } else if (!(conn.getIdConnType() == GenericType.uploadTypeConn && serviceId == Servers.HTTP.getDbCode())) { inputDirectory = inputDirectory + "/" + fileName; } log.info("(jobId:" + jobEntity.getName() + ") - Trying to Server polling at server [" + hostName + ":" + port + "] with user[" + userName + "]."); String url = ""; if (serviceId == Servers.SAMBA.getDbCode()) { if (!fsManager.hasProvider("smb")) { fsManager.addProvider("smb", new SmbFileProvider()); } url = "smb://" + userName + ":" + password + "@" + hostName + ":" + port + "/" + inputDirectory; } else if (serviceId == Servers.HTTP.getDbCode()) { if (!fsManager.hasProvider("http")) { fsManager.addProvider("http", new HttpFileProvider()); } url = "http://" + hostName + ":" + port + "/" + inputDirectory; } else if (serviceId == Servers.FTP.getDbCode()) { if (!fsManager.hasProvider("ftp")) { fsManager.addProvider("ftp", new FtpFileProvider()); } url = "ftp://" + userName + ":" + password + "@" + hostName + ":" + port + "/" + inputDirectory; } log.info("url:" + url); final FileObject fileObject = fsManager.resolveFile(url); if (conn.getIdConnType() == GenericType.DownloadTypeConn) { if (conn.getFileDateTime() != null && conn.getFileDateTime().getTime() == fileObject .getContent().getLastModifiedTime()) { log.info("There is no New or Updated '" + fileName + "' file on server to validate. Returning ..."); return; } else { log.info("There is New or Updated '" + fileName + "' file on server to validate. Validating ..."); ConnectionsEntity connection = connectionsDao.find(connectionId); connection.setFileDateTime( new Date(fileObject.getContent().getLastModifiedTime())); ApplicationContext ctx = AppContext.getApplicationContext(); ConnectionsDao connDao = (ctx.getBean(ConnectionsDao.class)); if (connDao != null) { connDao.update(connection); } Map<String, byte[]> resultMap = new HashMap<String, byte[]>(); byte data[] = new byte[(int) fileObject.getContent().getSize()]; fileObject.getContent().getInputStream().read(data); resultMap.put(fileObject.getName().getBaseName(), data); Set<String> keySet = resultMap.keySet(); Iterator<String> itr = keySet.iterator(); while (itr.hasNext()) { String strFileName = itr.next(); String result = ""; try { Long longSchemaId = schemaId; SchemaEntity schemaEntity = schemasDao.find(longSchemaId); if (schemaEntity == null) { result = "No schema found in database with Id [" + longSchemaId + "]"; log.error(result); logDao.setErrorLogMessage(result); } else { if (strFileName.endsWith(FileExtensionType.ZIP.getAbbreviation())) { // Case 1: When user upload a Zip file - All ZIP entries should be validate one by one ZipInputStream inStream = null; try { inStream = new ZipInputStream( new ByteArrayInputStream(resultMap.get(fileName))); ZipEntry entry; while (!(isStreamClose(inStream)) && (entry = inStream.getNextEntry()) != null) { if (!entry.isDirectory()) { DatastreamsInput datastreamsInput = new DatastreamsInput(); datastreamsInput .setUploadedFileName(entry.getName()); byte[] byteInput = IOUtils.toByteArray(inStream); result += datastreamsInput.datastreamsInput( new String(byteInput), longSchemaId, byteInput); } inStream.closeEntry(); } log.debug(result); } catch (IOException ex) { result = "Error occured during fetch records from ZIP file."; log.error(result); logDao.setErrorLogMessage(result); } finally { if (inStream != null) inStream.close(); } } else { DatastreamsInput datastreamsInput = new DatastreamsInput(); datastreamsInput.setUploadedFileName(strFileName); result = datastreamsInput.datastreamsInput( new String(resultMap.get(strFileName)), longSchemaId, resultMap.get(strFileName)); log.debug(result); } } } catch (Exception ex) { ex.printStackTrace(); result = "Exception occured during process the message for xml file " + strFileName + " Error - " + ex.getMessage(); log.error(result); logDao.setErrorLogMessage(result); } } } } else if (isDataStream && (conn.getIdConnType() == GenericType.uploadTypeConn)) { File uploadFile = File.createTempFile(fileName, ".tmp"); try { BufferedWriter bw = new BufferedWriter(new FileWriter(uploadFile)); bw.write(datastream); bw.flush(); bw.close(); } catch (IOException ioex) { log.error("Datastream file can't be created"); logDao.setErrorLogMessage("Datastream file can't be created"); return; } if (serviceId == Servers.HTTP.getDbCode()) { try { HttpClient httpclient = new HttpClient(); PostMethod method = new PostMethod(url); method.getParams().setParameter(HttpMethodParams.RETRY_HANDLER, new DefaultHttpMethodRetryHandler(3, false)); Part[] parts = new Part[] { new FilePart("file", uploadFile.getName(), uploadFile) }; method.setRequestEntity( new MultipartRequestEntity(parts, method.getParams())); method.setDoAuthentication(true); int statusCode = httpclient.executeMethod(method); String responseBody = new String(method.getResponseBody()); if (statusCode != HttpStatus.SC_OK) { throw new HttpException(method.getStatusLine().toString()); } else { System.out.println(responseBody); } method.releaseConnection(); } catch (Exception ex) { log.error("Exception occurred during uploading of file at HTTP Server: " + ex.getMessage()); logDao.setErrorLogMessage( "Exception occurred during uploading of file at HTTP Server: " + ex.getMessage()); } } else { try { FileObject localFileObject = fsManager .resolveFile(uploadFile.getAbsolutePath()); fileObject.copyFrom(localFileObject, Selectors.SELECT_SELF); System.out.println("File uploaded at : " + new Date()); if (uploadFile.exists()) { uploadFile.delete(); } } catch (Exception ex) { log.error( "Exception occurred during uploading of file: " + ex.getMessage()); logDao.setErrorLogMessage( "Exception occurred during uploading of file: " + ex.getMessage()); } } } } } catch (Exception ex) { log.error("Error " + ": " + ex.getMessage()); } finally { fsManager.close(); } } finally { jobsDao.setWorkStatus(jobId, false); } } else { log.error("Can not set " + jobEntity.getName() + "working."); } } else { log.debug("Job " + jobEntity.getName() + " is working."); } }
From source file:de.ingrid.portal.interfaces.impl.WMSInterfaceImpl.java
public void setWMCDocument(String wmc, String sessionId) { String urlStr = config.getString("interface_url", "http://localhost/mapbender/php/mod_portalCommunication_gt.php"); if (urlStr.indexOf("?") > 0) { urlStr = urlStr.concat("&PREQUEST=setWMC").concat("&PHPSESSID=" + sessionId); } else {//from w ww . ja v a 2 s . c o m urlStr = urlStr.concat("?PREQUEST=setWMC").concat("&PHPSESSID=" + sessionId); } // Create an instance of HttpClient. HttpClient client = new HttpClient(); // Create a method instance. PostMethod method = new PostMethod(urlStr); method.getParams().setParameter(HttpMethodParams.RETRY_HANDLER, new DefaultHttpMethodRetryHandler(2, false)); NameValuePair[] data = { new NameValuePair("wmc", wmc) }; method.setRequestBody(data); try { // Execute the method. int statusCode = client.executeMethod(method); if (statusCode != HttpStatus.SC_OK) { log.error("Sending WMC faild for URL: " + urlStr); } // Read the response body. byte[] responseBody = method.getResponseBody(); if (log.isDebugEnabled()) { log.debug("MapBender Server Response: " + new String(responseBody)); } // Deal with the response. // Use caution: ensure correct character encoding and is not binary data Document document = DocumentHelper.parseText(new String(responseBody)); // check for valid server response String error = document.valueOf("//portal_communication/error"); if (error != null && error.length() > 0) { throw new Exception("WMS Server Error: " + error); } String success = document.valueOf("//portal_communication/success"); if (error == null || success.length() == 0) { throw new Exception("WMS Server Error: Cannot find success message from server. message was: " + new String(responseBody)); } } catch (HttpException e) { log.error("Fatal protocol violation: " + e.getMessage(), e); } catch (IOException e) { log.error("Fatal transport error: " + e.getMessage(), e); } catch (Exception e) { log.error("Fatal error: " + e.getMessage(), e); } finally { // Release the connection. method.releaseConnection(); } }
From source file:com.xmlcalabash.library.HttpRequest.java
private DeleteMethod doDelete() { DeleteMethod method = new DeleteMethod(requestURI.toASCIIString()); // Provide custom retry handler is necessary method.getParams().setParameter(HttpMethodParams.RETRY_HANDLER, new DefaultHttpMethodRetryHandler(3, false)); for (Header header : headers) { method.addRequestHeader(header); }/*from w w w. ja v a 2s .c o m*/ return method; }
From source file:it.intecs.pisa.openCatalogue.solr.SolrHandler.java
public SaxonDocument getStats(String collectionId) throws UnsupportedEncodingException, IOException, SaxonApiException, Exception { HttpClient client = new HttpClient(); HttpMethod method;//www . j a v a2 s .co m String fq = !collectionId.isEmpty() ? "fq=parentIdentifier%3D" + collectionId + "&" : ""; String urlStr = this.solrHost + "/select?q=*%3A*&" + fq + "wt=xml&stats=true&" + "stats.field=beginPosition&" + "stats.field=endPosition&" + "stats.field=orbitNumber&" + "stats.field=acquisitionStation&" + "facet.field=productType&" + "facet.field=platformShortName&" + "facet.field=platformSerialIdentifier&" + "facet.field=instrument&" + "facet.field=sensorType&" + "facet.field=compositeType&" + "facet.field=processingLevel&" + "facet.field=orbitType&" + "stats.field=resolution&" + "facet.field=spectralRange&" + "stats.field=wavelengths&" + "facet.field=useLimitation&" + "facet.field=hasSecurityConstraints&" + "facet.field=organisationName&" + "facet.field=dissemination&" + "facet.field=parentIdentifier&" + "facet.field=productionStatus&" + "facet.field=acquisitionType&" + "stats.field=orbitNumber&" + "facet.field=orbitDirection&" + "stats.field=track&" + "stats.field=frame&" + "facet.field=swathIdentifier&" + "stats.field=cloudCover&" + "stats.field=snowCover&" + "facet.field=productQualityDegradation&" + "facet.field=productQualityDegradationTag&" + "facet.field=processorName&" + "facet.field=processingCenter&" + "stats.field=processingDate&" + "facet.field=sensorMode&" + "facet.field=archivingCenter&" + "facet.field=processingMode&" + "facet.field=acquisitionStation&" + "facet.field=acquisitionSubType&" + "stats.field=startTimeFromAscendingNode&" + "stats.field=completionTimeFromAscendingNode&" + "stats.field=illuminationAzimuthAngle&" + "stats.field=illuminationZenithAngle&" + "stats.field=illuminationElevationAngle&" + "facet.field=polarisationMode&" + "facet.field=polarisationChannels&" + "facet.field=antennaLookDirection&" + "stats.field=minimumIncidenceAngle&" + "stats.field=maximumIncidenceAngle&" + "stats.field=dopplerFrequency&" + "stats.field=incidenceAngleVariation&" + "rows=0&indent=true&facet=on&facet.mincount=1"; Log.debug("The following search is goint to be executed:" + urlStr); // Create a method instance. method = new GetMethod(urlStr); // Provide custom retry handler is necessary method.getParams().setParameter(HttpMethodParams.RETRY_HANDLER, new DefaultHttpMethodRetryHandler(3, false)); // Execute the method. int statusCode = client.executeMethod(method); SaxonDocument solrResponse = new SaxonDocument(method.getResponseBodyAsString()); Log.debug(solrResponse.getXMLDocumentString()); if (statusCode != HttpStatus.SC_OK) { Log.error("Method failed: " + method.getStatusLine()); String errorMessage = (String) solrResponse.evaluatePath("//lst[@name='error']/str[@name='msg']/text()", XPathConstants.STRING); Log.error(solrResponse.getXMLDocumentString()); throw new Exception(errorMessage); } return solrResponse; }
From source file:com.xmlcalabash.library.HttpRequest.java
private void doPutOrPostSinglepart(EntityEnclosingMethod method, XdmNode body) { // ATTENTION: This doesn't handle multipart, that's done entirely separately // Provide custom retry handler is necessary method.getParams().setParameter(HttpMethodParams.RETRY_HANDLER, new DefaultHttpMethodRetryHandler(3, false)); // Check for consistency of content-type contentType = body.getAttributeValue(_content_type); if (contentType == null) { throw new XProcException(step.getNode(), "Content-type on c:body is required."); }// www.ja va 2 s. c o m if (headerContentType != null && !headerContentType.equals(contentType.toLowerCase())) { throw XProcException.stepError(20); } for (Header header : headers) { method.addRequestHeader(header); } // FIXME: This sucks rocks. I want to write the data to be posted, not provide some way to read it String postContent = null; String encoding = body.getAttributeValue(_encoding); try { if ("base64".equals(encoding)) { String charset = body.getAttributeValue(_charset); // FIXME: is utf-8 the right default? if (charset == null) { charset = "utf-8"; } String escapedContent = decodeBase64(body, charset); StringWriter writer = new StringWriter(); writer.write(escapedContent); writer.close(); postContent = writer.toString(); } else { if (jsonContentType(contentType)) { postContent = XMLtoJSON.convert(body); } else if (xmlContentType(contentType)) { Serializer serializer = makeSerializer(); if (!S9apiUtils.isDocumentContent(body.axisIterator(Axis.CHILD))) { throw XProcException.stepError(22); } Vector<XdmNode> content = new Vector<XdmNode>(); XdmSequenceIterator iter = body.axisIterator(Axis.CHILD); while (iter.hasNext()) { XdmNode node = (XdmNode) iter.next(); content.add(node); } // FIXME: set serializer properties appropriately! StringWriter writer = new StringWriter(); serializer.setOutputWriter(writer); S9apiUtils.serialize(runtime, content, serializer); writer.close(); postContent = writer.toString(); } else { StringWriter writer = new StringWriter(); XdmSequenceIterator iter = body.axisIterator(Axis.CHILD); while (iter.hasNext()) { XdmNode node = (XdmNode) iter.next(); if (node.getNodeKind() != XdmNodeKind.TEXT) { throw XProcException.stepError(28); } writer.write(node.getStringValue()); } writer.close(); postContent = writer.toString(); } } StringRequestEntity requestEntity = new StringRequestEntity(postContent, contentType, "UTF-8"); method.setRequestEntity(requestEntity); } catch (IOException ioe) { throw new XProcException(ioe); } catch (SaxonApiException sae) { throw new XProcException(sae); } }