List of usage examples for org.apache.commons.httpclient HttpMethod getResponseBodyAsStream
public abstract InputStream getResponseBodyAsStream() throws IOException;
From source file:org.openrdf.repository.sparql.query.SPARQLGraphQuery.java
public void evaluate(RDFHandler handler) throws QueryEvaluationException, RDFHandlerException { boolean complete = false; try {//from w ww .j a v a 2 s .c o m HttpMethod response = getResponse(); try { RDFParser parser = getParser(response); parser.setRDFHandler(handler); parser.parse(response.getResponseBodyAsStream(), getUrl()); complete = true; } catch (HttpException e) { throw new QueryEvaluationException(e); } catch (RDFParseException e) { throw new QueryEvaluationException(e); } catch (RDFHandlerException e) { throw new QueryEvaluationException(e); } finally { if (!complete) { response.abort(); } } } catch (IOException e) { throw new QueryEvaluationException(e); } }
From source file:org.openrdf.repository.sparql.query.SPARQLTupleQuery.java
public TupleQueryResult evaluate() throws QueryEvaluationException { try {// ww w . j av a 2 s .c o m BackgroundTupleResult result = null; HttpMethod response = getResponse(); try { InputStream in = response.getResponseBodyAsStream(); result = new BackgroundTupleResult(parser, in, response); execute(result); InsertBindingSetCursor cursor = new InsertBindingSetCursor(result, getBindings()); List<String> list = new ArrayList<String>(result.getBindingNames()); list.addAll(getBindingNames()); return new TupleQueryResultImpl(list, cursor); } catch (HttpException e) { throw new QueryEvaluationException(e); } finally { if (result == null) { response.abort(); } } } catch (IOException e) { throw new QueryEvaluationException(e); } }
From source file:org.openrdf.repository.sparql.query.SPARQLTupleQuery.java
public void evaluate(TupleQueryResultHandler handler) throws QueryEvaluationException, TupleQueryResultHandlerException { try {/*from w ww. ja v a2 s. com*/ boolean complete = false; HttpMethod response = getResponse(); try { parser.setTupleQueryResultHandler(handler); parser.parse(response.getResponseBodyAsStream()); complete = true; } catch (HttpException e) { throw new QueryEvaluationException(e); } catch (QueryResultParseException e) { throw new QueryEvaluationException(e); } catch (TupleQueryResultHandlerException e) { throw new QueryEvaluationException(e); } finally { if (!complete) { response.abort(); } } } catch (IOException e) { throw new QueryEvaluationException(e); } }
From source file:org.paxle.crawler.http.impl.HttpCrawler.java
public ICrawlerDocument request(URI requestUri) { if (requestUri == null) throw new NullPointerException("URL was null"); this.logger.debug(String.format("Crawling URL '%s' ...", requestUri)); ICrawlerDocument doc = null;// w w w .j a v a 2s .c o m HttpMethod method = null; try { final ICrawlerContext ctx = this.contextLocal.getCurrentContext(); // creating an empty crawler-document doc = ctx.createDocument(); doc.setLocation(requestUri); final String uriAsciiString = requestUri.toASCIIString(); /* ============================================================================== * HTTP HEAD request * * first use the HEAD method to determine whether the MIME-type is supported * and to compare the content-length with the maximum allowed download size * (both only if the server provides this information, if not, the file is * fetched) * ============================================================================== */ method = new HeadMethod(uriAsciiString); // automatically follows redirects this.initRequestMethod(method); int statusCode = this.getHttpClient().executeMethod(method); final boolean headUnsupported = (statusCode == HttpStatus.SC_METHOD_FAILURE || statusCode == HttpStatus.SC_METHOD_NOT_ALLOWED); if (!headUnsupported) { if (statusCode != HttpStatus.SC_OK) { // RFC 2616 states that the GET and HEAD methods _must_ be supported by any // general purpose servers (which are in fact the ones we are connecting to here) if (statusCode == HttpStatus.SC_NOT_FOUND) { doc.setStatus(ICrawlerDocument.Status.NOT_FOUND); } else { doc.setStatus(ICrawlerDocument.Status.UNKNOWN_FAILURE, String.format("Server returned: %s", method.getStatusLine())); } this.logger.warn(String.format("Crawling of URL '%s' failed. Server returned: %s", requestUri, method.getStatusLine())); return doc; } // getting the mimetype and charset Header contentTypeHeader = method.getResponseHeader(HTTPHEADER_CONTENT_TYPE); if (!handleContentTypeHeader(contentTypeHeader, doc)) return doc; // reject the document if content-length is above our limit Header contentLengthHeader = method.getResponseHeader(HTTPHEADER_CONTENT_LENGTH); if (!handleContentLengthHeader(contentLengthHeader, doc)) return doc; // FIXME: we've been redirected, re-enqueue the new URL and abort processing //if (!requestUri.equals(method.getURI())) ; } /* ============================================================================== * HTTP GET request * * secondly - if everything is alright up to now - proceed with getting the * actual document * ============================================================================== */ HttpMethod getMethod = new GetMethod(uriAsciiString); // automatically follows redirects method.releaseConnection(); method = getMethod; this.initRequestMethod(method); // send the request to the server statusCode = this.getHttpClient().executeMethod(method); // check the response status code if (statusCode != HttpStatus.SC_OK) { if (statusCode == HttpStatus.SC_NOT_FOUND) { doc.setStatus(ICrawlerDocument.Status.NOT_FOUND); } else { doc.setStatus(ICrawlerDocument.Status.UNKNOWN_FAILURE, String.format("Server returned: %s", method.getStatusLine())); } this.logger.warn(String.format("Crawling of URL '%s' failed. Server returned: %s", requestUri, method.getStatusLine())); return doc; } // FIXME: we've been redirected, re-enqueue the new URL and abort processing // if (!requestUri.equals(method.getURI())) ; /* * HTTP Content-Type * - getting the mimetype and charset */ Header contentTypeHeader = method.getResponseHeader(HTTPHEADER_CONTENT_TYPE); if (!handleContentTypeHeader(contentTypeHeader, doc)) return doc; /* * HTTP Content-Length * - Reject the document if content-length is above our limit * * We do this a second time here because some servers may have set the content-length * of the head response to <code>0</code> */ Header contentLengthHeader = method.getResponseHeader(HTTPHEADER_CONTENT_LENGTH); if (!handleContentLengthHeader(contentLengthHeader, doc)) return doc; extractHttpHeaders(method, doc); // externalised into this method to cleanup here a bit // getting the response body InputStream respBody = method.getResponseBodyAsStream(); // handle the content-encoding, i.e. decompress the server's response Header contentEncodingHeader = method.getResponseHeader(HTTPHEADER_CONTENT_ENCODING); try { respBody = handleContentEncoding(contentEncodingHeader, respBody); /* Limit the max allowed length of the content to copy. -1 is used for no limit. * * We need to set a limit if: * a) the user has configured a max-download-size AND * b) the server returned no content-length header */ int copyLimit = (this.maxDownloadSize <= 0 || contentLengthHeader != null) ? -1 : this.maxDownloadSize; // copy the content to file final ICrawlerTools crawlerTools = ctx.getCrawlerTools(); crawlerTools.saveInto(doc, respBody, lrc, copyLimit); doc.setStatus(ICrawlerDocument.Status.OK); this.logger.debug(String.format("Crawling of URL '%s' finished.", requestUri)); } catch (IOException e) { String msg = e.getMessage(); if (msg == null || !msg.equals("Corrupt GZIP trailer")) throw e; setHostSetting(method.getURI().getHost(), PREF_NO_ENCODING); msg = String.format("server sent a corrupt gzip trailer at URL '%s'", requestUri); logger.warn(msg); // FIXME re-enqueue command doc.setStatus(ICrawlerDocument.Status.UNKNOWN_FAILURE, msg); } finally { respBody.close(); } } catch (NoRouteToHostException e) { this.logger.warn(String.format("Error crawling %s: %s", requestUri, e.getMessage())); doc.setStatus(ICrawlerDocument.Status.NOT_FOUND, e.getMessage()); } catch (UnknownHostException e) { this.logger.warn(String.format("Error crawling %s: Unknown host.", requestUri)); doc.setStatus(ICrawlerDocument.Status.NOT_FOUND, e.getMessage()); } catch (ConnectException e) { this.logger.warn(String.format("Error crawling %s: Unable to connect to host.", requestUri)); doc.setStatus(ICrawlerDocument.Status.NOT_FOUND, e.getMessage()); } catch (ConnectTimeoutException e) { this.logger.warn(String.format("Error crawling %s: %s.", requestUri, e.getMessage())); doc.setStatus(ICrawlerDocument.Status.NOT_FOUND, e.getMessage()); } catch (SocketTimeoutException e) { this.logger.warn(String.format("Error crawling %s: Connection timeout.", requestUri)); doc.setStatus(ICrawlerDocument.Status.NOT_FOUND, e.getMessage()); } catch (CircularRedirectException e) { this.logger.warn(String.format("Error crawling %s: %s", requestUri, e.getMessage())); doc.setStatus(ICrawlerDocument.Status.NOT_FOUND, e.getMessage()); } catch (NoHttpResponseException e) { this.logger.warn(String.format("Error crawling %s: %s", requestUri, e.getMessage())); doc.setStatus(ICrawlerDocument.Status.NOT_FOUND, e.getMessage()); } catch (ContentLengthLimitExceededException e) { this.logger.warn(String.format("Error crawling %s: %s", requestUri, e.getMessage())); doc.setStatus(ICrawlerDocument.Status.UNKNOWN_FAILURE, e.getMessage()); } catch (Throwable e) { String errorMsg; if (e instanceof HttpException) { errorMsg = "Unrecovered protocol exception: [%s] %s"; } else if (e instanceof IOException) { errorMsg = "Transport exceptions: [%s] %s"; } else { errorMsg = "Unexpected exception: [%s] %s"; } errorMsg = String.format(errorMsg, e.getClass().getName(), e.getMessage()); this.logger.error(String.format("Error crawling %s: %s", requestUri, errorMsg)); doc.setStatus(ICrawlerDocument.Status.UNKNOWN_FAILURE, errorMsg); e.printStackTrace(); } finally { if (method != null) method.releaseConnection(); } return doc; }
From source file:org.paxle.filter.robots.impl.RobotsTxtManager.java
/** * Downloads a <i>robots.txt</i> file from the given url and parses it * @param robotsUrlStr the URL to the robots.txt. This must be a http(s) resource * @return the parsed robots.txt file as a {@link RobotsTxt}-object * @throws IOException/*from w ww. j a v a 2 s.com*/ * @throws URISyntaxException */ RobotsTxt getFromWeb(URI robotsURL) throws IOException, URISyntaxException { String hostPort = this.getHostPort(robotsURL); String statusLine = null; if (!robotsURL.getScheme().startsWith("http")) { throw new IOException(String.format("Unsupported protocol: %s", robotsURL.getScheme())); } InputStream inputStream = null; HttpMethod getMethod = null; try { getMethod = new GetMethod(robotsURL.toASCIIString()); int code = this.httpClient.executeMethod(getMethod); statusLine = getMethod.getStatusLine().toString(); if (code == HttpStatus.SC_UNAUTHORIZED || code == HttpStatus.SC_FORBIDDEN) { // access to the whole website is restricted return new RobotsTxt(hostPort, RobotsTxt.RELOAD_INTERVAL_DEFAULT, statusLine, true); } else if (code == HttpStatus.SC_NOT_FOUND) { // no robots.txt provided return new RobotsTxt(hostPort, RobotsTxt.RELOAD_INTERVAL_DEFAULT, statusLine); } else if (code != HttpStatus.SC_OK) { // the robots.txt seems not to be deliverable return new RobotsTxt(hostPort, RobotsTxt.RELOAD_INTERVAL_DEFAULT, statusLine); } Header contentTypeHeader = getMethod.getResponseHeader("Content-Type"); if (contentTypeHeader != null && !contentTypeHeader.getValue().startsWith("text/plain")) { // the robots.txt seems not to be available return new RobotsTxt(hostPort, RobotsTxt.RELOAD_INTERVAL_ERROR, "Wrong mimeType " + contentTypeHeader.getValue()); } inputStream = getMethod.getResponseBodyAsStream(); RobotsTxt robotsTxt = new RobotsTxt(hostPort, RobotsTxt.RELOAD_INTERVAL_DEFAULT, statusLine); return this.parseRobotsTxt(robotsTxt, inputStream); } catch (IOException e) { long reloadInterval = RobotsTxt.RELOAD_INTERVAL_TEMP_ERROR; String status = e.getMessage(); if (e instanceof UnknownHostException) { reloadInterval = RobotsTxt.RELOAD_INTERVAL_ERROR; status = "Unknown host"; logger.info(String.format("Unknown host '%s'.", robotsURL.getHost())); } else if (e instanceof CircularRedirectException || e instanceof RedirectException || e instanceof InvalidRedirectLocationException) { reloadInterval = RobotsTxt.RELOAD_INTERVAL_ERROR; logger.info(String.format("Invalid redirection on host '%s'.", hostPort)); } else if (e instanceof SocketTimeoutException || e instanceof ConnectTimeoutException || e instanceof NoHttpResponseException) { logger.debug(String.format("TimeOut while loading robots.txt from host '%s'.", hostPort)); } else if (!(e instanceof ConnectException || e instanceof SocketException)) { logger.error("Exception while loading robots.txt from " + hostPort, e); } return new RobotsTxt(hostPort, reloadInterval, status); } catch (IllegalArgumentException e) { // occurs if redirected to an invalid URI, see https://bugs.pxl.li/view.php?id=172 // we treat it like a 404, see above logger.info(String.format("Invalid redirection URI on host '%s'.", hostPort)); return new RobotsTxt(hostPort, RobotsTxt.RELOAD_INTERVAL_DEFAULT, "Redirected to illegal URI"); } catch (IllegalStateException e) { // occurs if redirected to an URI with an invalid protocol, see https://bugs.pxl.li/view.php?id=169 // we treat it like a 404, see above logger.info(String.format("Invalid redirection URI on host '%s'.", hostPort)); return new RobotsTxt(hostPort, RobotsTxt.RELOAD_INTERVAL_DEFAULT, "Redirected to illegal URI"); } finally { if (inputStream != null) try { inputStream.close(); } catch (Exception e) { this.logger.error(e); } if (getMethod != null) getMethod.releaseConnection(); } }
From source file:org.paxle.se.provider.rsssearch.impl.gui.ConfigServlet.java
/** * load the opensearch-xml url, extract the searchurl from the xml and replace {searchTerm} by %s * the result is added to the {@link RssSearchProvider} list and all {@link RssSearchProvider}s will be reloaded * @param url//from ww w .j a v a 2s. c o m * @throws ParserConfigurationException * @throws IOException * @throws HttpException * @throws SAXException */ private void addRssUrlFromOpensearchXMLUrl(String url) throws ParserConfigurationException, IOException, HttpException, SAXException { HttpMethod hm = null; try { DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); DocumentBuilder db = dbf.newDocumentBuilder(); hm = new GetMethod(url); HttpClient hc = new HttpClient(); int status = hc.executeMethod(hm); if (status == 200) { Document d = db.parse(hm.getResponseBodyAsStream()); // <Url type="application/rss+xml" // template="http://example.com/?q={searchTerms}&pw={startPage?}&format=rss"/> NodeList elements = d.getElementsByTagName("Url"); for (int i = 0; i < elements.getLength(); i++) { NamedNodeMap nnm = elements.item(i).getAttributes(); Node typeNode = nnm.getNamedItem("type"); String type = typeNode == null ? null : typeNode.getNodeValue().toLowerCase(); if (type != null && type.equals("application/rss+xml")) { final Node templateNode = elements.item(i).getAttributes().getNamedItem("template"); String urltemplate = templateNode.getNodeValue(); urltemplate = urltemplate.replaceAll("\\{startPage\\?\\}", "1"); urltemplate = urltemplate.replaceAll("\\{searchTerms\\}", "%s"); final ArrayList<String> urls = this.pManager.getUrls(); urls.add(urltemplate); this.pManager.setUrls(urls); break; } } } } catch (IllegalArgumentException e) {// InputStream // cannot be null logger.warn("Problem adding opensearch xml", e); } finally { if (hm != null) hm.releaseConnection(); } }
From source file:org.paxle.se.provider.rsssearch.impl.RssSearchProvider.java
public void search(ISearchRequest searchRequest, List<IIndexerDocument> results) throws IOException, InterruptedException { String url = null;//w w w.j a v a 2 s. c om try { final ISearchProviderContext context = SearchProviderContext.getCurrentContext(); final String request = new RssSearchQueryFactor().transformToken(searchRequest.getSearchQuery()); final int maxCount = searchRequest.getMaxResultCount(); //creating a channel-builder ChannelBuilder builder = new ChannelBuilder(); // parsing the rss/atom feed HttpMethod hm = null; try { // opening an http connection url = new URL(String.format(feedURL, URLEncoder.encode(request, DEFAULT_CHARSET))).toExternalForm(); hm = new GetMethod(url); HttpClient hc = new HttpClient(); int status = hc.executeMethod(hm); if (status != 200) { this.logger.warn(String.format("Error while connecting to '%s'.\r\n\tServer-Status: %s", url, hm.getStatusLine())); return; } // parsing the rss/atom feed ChannelIF channel = FeedParser.parse(builder, hm.getResponseBodyAsStream()); // loop through all items Collection<ItemIF> items = channel.getItems(); Iterator<ItemIF> it = items.iterator(); int count = 0; while (it.hasNext() && count++ < maxCount) { ItemIF item = it.next(); IIndexerDocument indexerDoc = context.createDocument(); indexerDoc.set(IIndexerDocument.LOCATION, item.getLink().toString()); indexerDoc.set(IIndexerDocument.PROTOCOL, item.getLink().getProtocol()); String title = item.getTitle(); if (title != null && title.length() > 0) { indexerDoc.set(IIndexerDocument.TITLE, title); } String descr = item.getDescription(); if (descr != null && descr.length() > 0) { indexerDoc.set(IIndexerDocument.SUMMARY, descr); } String author = item.getCreator(); if (author != null && author.length() > 0) { indexerDoc.set(IIndexerDocument.AUTHOR, item.getCreator() == null ? "" : item.getCreator()); } Date lastMod = item.getDate(); if (lastMod != null) { indexerDoc.set(IIndexerDocument.LAST_MODIFIED, item.getDate()); } results.add(indexerDoc); } } catch (IOException e) { //do nothing, it just not worked (offline or rss-site problem) this.logger.warn(e); } finally { if (hm != null) hm.releaseConnection(); } } catch (Exception e) { this.logger.error(String.format("Unexpected '%s' while connecting to '%s'.", e.getClass().getName(), (url == null) ? this.feedURL : url)); } }
From source file:org.pentaho.di.baserver.utils.web.HttpConnectionHelper.java
protected String getContent(HttpMethod method, String encoding) throws IOException { String body;/*from www. j a v a 2s.c o m*/ InputStreamReader inputStreamReader; if (!Const.isEmpty(encoding)) { inputStreamReader = new InputStreamReader(method.getResponseBodyAsStream(), encoding); } else { inputStreamReader = new InputStreamReader(method.getResponseBodyAsStream()); } StringBuilder bodyBuffer = new StringBuilder(); int c; while ((c = inputStreamReader.read()) != -1) { bodyBuffer.append((char) c); } inputStreamReader.close(); body = bodyBuffer.toString(); return body; }
From source file:org.pentaho.di.trans.steps.http.HTTP.java
private Object[] callHttpService(RowMetaInterface rowMeta, Object[] rowData) throws KettleException { String url = determineUrl(rowMeta, rowData); try {//from w w w . ja v a 2s. co m if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "HTTP.Log.Connecting", url)); } // Prepare HTTP get // HttpClient httpclient = SlaveConnectionManager.getInstance().createHttpClient(); HttpMethod method = new GetMethod(url); // Set timeout if (data.realConnectionTimeout > -1) { httpclient.getHttpConnectionManager().getParams().setConnectionTimeout(data.realConnectionTimeout); } if (data.realSocketTimeout > -1) { httpclient.getHttpConnectionManager().getParams().setSoTimeout(data.realSocketTimeout); } if (!Const.isEmpty(data.realHttpLogin)) { httpclient.getParams().setAuthenticationPreemptive(true); Credentials defaultcreds = new UsernamePasswordCredentials(data.realHttpLogin, data.realHttpPassword); httpclient.getState().setCredentials(AuthScope.ANY, defaultcreds); } HostConfiguration hostConfiguration = new HostConfiguration(); if (!Const.isEmpty(data.realProxyHost)) { hostConfiguration.setProxy(data.realProxyHost, data.realProxyPort); } // Add Custom HTTP headers if (data.useHeaderParameters) { for (int i = 0; i < data.header_parameters_nrs.length; i++) { method.addRequestHeader(data.headerParameters[i].getName(), data.inputRowMeta.getString(rowData, data.header_parameters_nrs[i])); if (isDebug()) { log.logDebug(BaseMessages.getString(PKG, "HTTPDialog.Log.HeaderValue", data.headerParameters[i].getName(), data.inputRowMeta.getString(rowData, data.header_parameters_nrs[i]))); } } } InputStreamReader inputStreamReader = null; Object[] newRow = null; if (rowData != null) { newRow = rowData.clone(); } // Execute request // try { // used for calculating the responseTime long startTime = System.currentTimeMillis(); int statusCode = httpclient.executeMethod(hostConfiguration, method); // calculate the responseTime long responseTime = System.currentTimeMillis() - startTime; if (log.isDetailed()) { log.logDetailed(BaseMessages.getString(PKG, "HTTP.Log.ResponseTime", responseTime, url)); } String body = null; // The status code if (isDebug()) { logDebug(BaseMessages.getString(PKG, "HTTP.Log.ResponseStatusCode", "" + statusCode)); } if (statusCode != -1) { if (statusCode == 204) { body = ""; } else { // if the response is not 401: HTTP Authentication required if (statusCode != 401) { // guess encoding // String encoding = meta.getEncoding(); // Try to determine the encoding from the Content-Type value // if (Const.isEmpty(encoding)) { String contentType = method.getResponseHeader("Content-Type").getValue(); if (contentType != null && contentType.contains("charset")) { encoding = contentType.replaceFirst("^.*;\\s*charset\\s*=\\s*", "") .replace("\"", "").trim(); } } if (isDebug()) { log.logDebug(toString(), BaseMessages.getString(PKG, "HTTP.Log.ResponseHeaderEncoding", encoding)); } // the response if (!Const.isEmpty(encoding)) { inputStreamReader = new InputStreamReader(method.getResponseBodyAsStream(), encoding); } else { inputStreamReader = new InputStreamReader(method.getResponseBodyAsStream()); } StringBuffer bodyBuffer = new StringBuffer(); int c; while ((c = inputStreamReader.read()) != -1) { bodyBuffer.append((char) c); } inputStreamReader.close(); body = bodyBuffer.toString(); if (isDebug()) { logDebug("Response body: " + body); } } else { // the status is a 401 throw new KettleStepException( BaseMessages.getString(PKG, "HTTP.Exception.Authentication", data.realUrl)); } } } int returnFieldsOffset = rowMeta.size(); if (!Const.isEmpty(meta.getFieldName())) { newRow = RowDataUtil.addValueData(newRow, returnFieldsOffset, body); returnFieldsOffset++; } if (!Const.isEmpty(meta.getResultCodeFieldName())) { newRow = RowDataUtil.addValueData(newRow, returnFieldsOffset, new Long(statusCode)); returnFieldsOffset++; } if (!Const.isEmpty(meta.getResponseTimeFieldName())) { newRow = RowDataUtil.addValueData(newRow, returnFieldsOffset, new Long(responseTime)); } } finally { if (inputStreamReader != null) { inputStreamReader.close(); } // Release current connection to the connection pool once you are done method.releaseConnection(); if (data.realcloseIdleConnectionsTime > -1) { httpclient.getHttpConnectionManager().closeIdleConnections(data.realcloseIdleConnectionsTime); } } return newRow; } catch (UnknownHostException uhe) { throw new KettleException( BaseMessages.getString(PKG, "HTTP.Error.UnknownHostException", uhe.getMessage())); } catch (Exception e) { throw new KettleException(BaseMessages.getString(PKG, "HTTP.Log.UnableGetResult", url), e); } }
From source file:org.pentaho.pac.server.common.ThreadSafeHttpClient.java
/** * Execute the <param>method</param>, and return the server's response as a string * @param method the HttpMethod specifying the server URL and parameters to be * passed to the server.//from w w w . ja v a2 s. c om * @return a string containing the server's response * * @throws ProxyException if the attempt to communicate with the server fails, * if the attempt to read the response from the server fails, if the response * stream is unable to be converted into a String. */ private String executeMethod(HttpMethod method) throws ProxyException { InputStream responseStrm = null; try { int httpStatus = CLIENT.executeMethod(method); if (httpStatus != HttpStatus.SC_OK) { // If the response comes as unauthorized access we will throw a proxy exception explaining the reason and // what needs to be done to correct it if (httpStatus == HttpStatus.SC_UNAUTHORIZED) { throw new ProxyException( Messages.getErrorString("ThreadSafeHttpClient.ERROR_0003_AUTHORIZATION_FAILED")); } String status = method.getStatusLine().toString(); String uri = method.getURI().toString(); String errorMsg = Messages.getErrorString("ThreadSafeHttpClient.ERROR_0001_CLIENT_REQUEST_FAILED", //$NON-NLS-1$ uri, status); logger.error(errorMsg); throw new ProxyException(status); // TODO } responseStrm = method.getResponseBodyAsStream(); // trim() is necessary because some jsp's put \n\r at the beginning of // the returned text, and the xml processor chokes on \n\r at the beginning. String response = IOUtils.toString(responseStrm).trim(); return response; } catch (Exception e) { throw new ProxyException(e); } finally { method.releaseConnection(); } }