List of usage examples for org.apache.commons.httpclient.methods HeadMethod setFollowRedirects
@Override public void setFollowRedirects(boolean followRedirects)
From source file:com.gisgraphy.importer.ImporterHelper.java
/** * @param URL the HTTP URL/* w ww . ja va 2s. c o m*/ * @return The size of the HTTP file using HTTP head method * or -1 if error or the file doesn't exists */ public static long getHttpFileSize(String URL) { HeadMethod headMethod = new HeadMethod(URL); //we can not follow redirect because Geonames send a 302 found HTTP status code when a file doen't exists headMethod.setFollowRedirects(false); try { int code = client.executeMethod(headMethod); int firstDigitOfCode = code / 100; switch (firstDigitOfCode) { case 4: logger.error("Can not determine HTTP file size of " + URL + " because it does not exists (" + code + ")"); return -1; //needed to catch 3XX code because Geonames send a 302 found HTTP status code when a file doen't exists case 3: logger.error("Can not determine HTTP file size of " + URL + " because it does not exists (" + code + ")"); return -1; case 5: logger.error( "Can not determine HTTP file size of " + URL + " because the server send an error " + code); return -1; default: break; } Header[] contentLengthHeaders = headMethod.getResponseHeaders("Content-Length"); if (contentLengthHeaders.length == 1) { logger.info("HTTP file size of " + URL + " = " + contentLengthHeaders[0].getValue()); return new Long(contentLengthHeaders[0].getValue()); } else if (contentLengthHeaders.length <= 0) { return -1L; } } catch (HttpException e) { logger.error("can not execute head method for " + URL + " : " + e.getMessage(), e); } catch (IOException e) { logger.error("can not execute head method for " + URL + " : " + e.getMessage(), e); } finally { headMethod.releaseConnection(); } return -1; }
From source file:com.sittinglittleduck.DirBuster.workGenerators.WorkerGeneratorURLFuzz.java
/** Thread run method */ public void run() { /*//from w w w . j av a 2 s. co m * Read in all the items and create all the work we need to. */ BufferedReader d = null; try { manager.setURLFuzzGenFinished(false); String currentDir = "/"; int failcode = 404; String line; Vector extToCheck = new Vector(10, 5); boolean recursive = true; int passTotal = 0; try { d = new BufferedReader(new InputStreamReader(new FileInputStream(inputFile))); passTotal = 0; while ((line = d.readLine()) != null) { if (!line.startsWith("#")) { passTotal++; } } manager.setTotalPass(passTotal); } catch (FileNotFoundException ex) { ex.printStackTrace(); } catch (IOException ex) { ex.printStackTrace(); } if (manager.getAuto()) { try { URL headurl = new URL(firstPart); HeadMethod httphead = new HeadMethod(headurl.toString()); Vector HTTPheaders = manager.getHTTPHeaders(); for (int a = 0; a < HTTPheaders.size(); a++) { HTTPHeader httpHeader = (HTTPHeader) HTTPheaders.elementAt(a); httphead.setRequestHeader(httpHeader.getHeader(), httpHeader.getValue()); } httphead.setFollowRedirects(Config.followRedirects); int responceCode = httpclient.executeMethod(httphead); if (Config.debug) { System.out.println("DEBUG WokerGen: responce code for head check = " + responceCode); } if (responceCode == 501 || responceCode == 400 || responceCode == 405) { if (Config.debug) { System.out.println( "DEBUG WokerGen: Changing to GET only HEAD test returned 501(method no implmented) or a 400"); } manager.setAuto(false); } } catch (MalformedURLException e) { } catch (IOException e) { } } d = new BufferedReader(new InputStreamReader(new FileInputStream(inputFile))); System.out.println("Starting fuzz on " + firstPart + urlFuzzStart + "{dir}" + urlFuzzEnd); int filesProcessed = 0; BaseCase baseCaseObj = GenBaseCase.genURLFuzzBaseCase(manager, firstPart + urlFuzzStart, urlFuzzEnd); while ((line = d.readLine()) != null) { if (stopMe) { return; } if (!line.startsWith("#")) { String method; if (manager.getAuto() && !baseCaseObj.useContentAnalysisMode() && !baseCaseObj.isUseRegexInstead()) { method = "HEAD"; } else { method = "GET"; } // url encode all the items line = URLEncoder.encode(line); URL currentURL = new URL(firstPart + urlFuzzStart + line + urlFuzzEnd); // BaseCase baseCaseObj = new BaseCase(currentURL, failcode, true, failurl, // baseResponce); // if the base case is null then we need to switch to content anylsis mode workQueue.put(new WorkUnit(currentURL, true, method, baseCaseObj, line)); } Thread.sleep(3); } } catch (InterruptedException ex) { Logger.getLogger(WorkerGeneratorURLFuzz.class.getName()).log(Level.SEVERE, null, ex); } catch (MalformedURLException ex) { Logger.getLogger(WorkerGeneratorURLFuzz.class.getName()).log(Level.SEVERE, null, ex); } catch (IOException ex) { Logger.getLogger(WorkerGeneratorURLFuzz.class.getName()).log(Level.SEVERE, null, ex); } finally { try { d.close(); manager.setURLFuzzGenFinished(true); } catch (IOException ex) { Logger.getLogger(WorkerGeneratorURLFuzz.class.getName()).log(Level.SEVERE, null, ex); } } }
From source file:fedora.client.FedoraClient.java
public Date getLastModifiedDate(String locator) throws IOException { if (locator.startsWith(FEDORA_URI_PREFIX)) { String query = "select $date " + "from <#ri> " + "where <" + locator + "> <" + VIEW.LAST_MODIFIED_DATE.uri + "> $date"; Map<String, String> map = new HashMap<String, String>(); map.put("lang", "itql"); map.put("query", query); TupleIterator tuples = getTuples(map); try {//from w ww .j av a2s . c o m if (tuples.hasNext()) { Map<String, Node> row = tuples.next(); Literal dateLiteral = (Literal) row.get("date"); if (dateLiteral == null) { throw new IOException("A row was returned, but it did not contain a 'date' binding"); } return DateUtility.parseDateAsUTC(dateLiteral.getLexicalForm()); } else { throw new IOException("No rows were returned"); } } catch (TrippiException e) { throw new IOException(e.getMessage()); } finally { try { tuples.close(); } catch (Exception e) { } } } else { HttpClient client = getHttpClient(); HeadMethod head = new HeadMethod(locator); head.setDoAuthentication(true); head.setFollowRedirects(FOLLOW_REDIRECTS); try { int statusCode = client.executeMethod(head); if (statusCode != HttpStatus.SC_OK) { throw new IOException("Method failed: " + head.getStatusLine()); } //Header[] headers = head.getResponseHeaders(); // Retrieve just the last modified header value. Header header = head.getResponseHeader("last-modified"); if (header != null) { String lastModified = header.getValue(); return DateUtility.convertStringToDate(lastModified); } else { // return current date time return new Date(); } } finally { head.releaseConnection(); } } }
From source file:com.sittinglittleduck.DirBuster.workGenerators.WorkerGenerator.java
/** Thread run method */ public void run() { String currentDir = "/"; int failcode = 404; String line;/* ww w . j av a 2 s . c om*/ Vector extToCheck = new Vector(10, 5); boolean recursive = true; int passTotal = 0; // -------------------------------------------------- try { // find the total number of requests to be made, per pass // based on the fact there is a single entry per line BufferedReader d = new BufferedReader(new InputStreamReader(new FileInputStream(inputFile))); passTotal = 0; while ((line = d.readLine()) != null) { if (!line.startsWith("#")) { passTotal++; } } manager.setTotalPass(passTotal); } catch (FileNotFoundException ex) { ex.printStackTrace(); } catch (IOException ex) { ex.printStackTrace(); } // ------------------------------------------------- // checks if the server surports heads requests if (manager.getAuto()) { try { URL headurl = new URL(firstPart); HeadMethod httphead = new HeadMethod(headurl.toString()); // set the custom HTTP headers Vector HTTPheaders = manager.getHTTPHeaders(); for (int a = 0; a < HTTPheaders.size(); a++) { HTTPHeader httpHeader = (HTTPHeader) HTTPheaders.elementAt(a); /* * Host header has to be set in a different way! */ if (httpHeader.getHeader().startsWith("Host:")) { httphead.getParams().setVirtualHost(httpHeader.getValue()); } else { httphead.setRequestHeader(httpHeader.getHeader(), httpHeader.getValue()); } } httphead.setFollowRedirects(Config.followRedirects); int responceCode = httpclient.executeMethod(httphead); if (Config.debug) { System.out.println("DEBUG WokerGen: responce code for head check = " + responceCode); } // if the responce code is method not implemented or if the head requests return // 400! if (responceCode == 501 || responceCode == 400 || responceCode == 405) { if (Config.debug) { System.out.println( "DEBUG WokerGen: Changing to GET only HEAD test returned 501(method no implmented) or a 400"); } // switch the mode to just GET requests manager.setAuto(false); } } catch (MalformedURLException e) { // TODO deal with error } catch (IOException e) { // TODO deal with error } } // end of checks to see if server surpports head requests int counter = 0; while ((!dirQueue.isEmpty() || !workQueue.isEmpty() || !manager.areWorkersAlive()) && recursive) { // get the dir we are about to process String baseResponce = null; recursive = manager.isRecursive(); BaseCase baseCaseObj = null; // rest the skip skipCurrent = false; // deal with the dirs try { // get item from queue // System.out.println("gen about to take"); DirToCheck tempDirToCheck = dirQueue.take(); // System.out.println("gen taken"); // get dir name currentDir = tempDirToCheck.getName(); // get any extention that need to be checked extToCheck = tempDirToCheck.getExts(); manager.setCurrentlyProcessing(currentDir); } catch (InterruptedException e) { e.printStackTrace(); } started = currentDir; // generate the list of dirs if (manager.getDoDirs()) { // find the fail case for the dir URL failurl = null; try { baseResponce = null; baseCaseObj = GenBaseCase.genBaseCase(manager, firstPart + currentDir, true, null); } catch (MalformedURLException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } // end of dir fail case if (stopMe) { return; } // generate work links try { // readin dir names BufferedReader d = new BufferedReader(new InputStreamReader(new FileInputStream(inputFile))); if (Config.debug) { System.out.println("DEBUG WokerGen: Generating dir list for " + firstPart); } URL currentURL; // add the first item while doing dir's if (counter == 0) { try { String method; if (manager.getAuto() && !baseCaseObj.useContentAnalysisMode() && !baseCaseObj.isUseRegexInstead()) { method = "HEAD"; } else { method = "GET"; } currentURL = new URL(firstPart + currentDir); // System.out.println("first part = " + firstPart); // System.out.println("current dir = " + currentDir); workQueue.put(new WorkUnit(currentURL, true, "GET", baseCaseObj, null)); if (Config.debug) { System.out.println("DEBUG WokerGen: 1 adding dir to work list " + method + " " + currentDir.toString()); } } catch (MalformedURLException ex) { ex.printStackTrace(); } catch (InterruptedException ex) { ex.printStackTrace(); } } // end of dealing with first item int dirsProcessed = 0; // add the rest of the dirs while ((line = d.readLine()) != null) { // code to skip the current work load if (skipCurrent) { // add the totalnumber per pass - the amount process this pass to the // work correction total manager.addToWorkCorrection(passTotal - dirsProcessed); break; } // if the line is not empty or starts with a # if (!line.equalsIgnoreCase("") && !line.startsWith("#")) { line = line.trim(); line = makeItemsafe(line); try { String method; if (manager.getAuto() && !baseCaseObj.useContentAnalysisMode() && !baseCaseObj.isUseRegexInstead()) { method = "HEAD"; } else { method = "GET"; } currentURL = new URL(firstPart + currentDir + line + "/"); // BaseCase baseCaseObj = new BaseCase(currentURL, failcode, true, // failurl, baseResponce); // if the base case is null then we need to switch to content // anylsis mode // System.out.println("Gen about to add to queue"); workQueue.put(new WorkUnit(currentURL, true, method, baseCaseObj, line)); // System.out.println("Gen finshed adding to queue"); if (Config.debug) { System.out.println("DEBUG WokerGen: 2 adding dir to work list " + method + " " + currentURL.toString()); } } catch (MalformedURLException e) { // TODO deal with bad line // e.printStackTrace(); // do nothing if it's malformed, I dont care about them! } catch (InterruptedException e) { e.printStackTrace(); } // if there is a call to stop the work gen then stop! if (stopMe) { return; } dirsProcessed++; } } // end of while } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } } // generate the list of files if (manager.getDoFiles()) { baseResponce = null; URL failurl = null; // loop for all the different file extentions for (int b = 0; b < extToCheck.size(); b++) { // only test if we are surposed to ExtToCheck extTemp = (ExtToCheck) extToCheck.elementAt(b); if (extTemp.toCheck()) { fileExtention = ""; if (extTemp.getName().equals(ExtToCheck.BLANK_EXT)) { fileExtention = ""; } else { fileExtention = "." + extTemp.getName(); } try { // get the base for this extention baseCaseObj = GenBaseCase.genBaseCase(manager, firstPart + currentDir, false, fileExtention); } catch (MalformedURLException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } // if the manager has sent the stop command then exit if (stopMe) { return; } try { BufferedReader d = new BufferedReader( new InputStreamReader(new FileInputStream(inputFile))); // if(failcode != 200) // { int filesProcessed = 0; while ((line = d.readLine()) != null) { // code to skip the current work load if (skipCurrent) { manager.addToWorkCorrection(passTotal - filesProcessed); break; } // dont process is the line empty for starts with a # if (!line.equalsIgnoreCase("") && !line.startsWith("#")) { line = line.trim(); line = makeItemsafe(line); try { String method; if (manager.getAuto() && !baseCaseObj.useContentAnalysisMode() && !baseCaseObj.isUseRegexInstead()) { method = "HEAD"; } else { method = "GET"; } URL currentURL = new URL(firstPart + currentDir + line + fileExtention); // BaseCase baseCaseObj = new BaseCase(currentURL, true, // failurl, baseResponce); workQueue.put(new WorkUnit(currentURL, false, method, baseCaseObj, line)); if (Config.debug) { System.out.println("DEBUG WokerGen: adding file to work list " + method + " " + currentURL.toString()); } } catch (MalformedURLException e) { // e.printStackTrace(); // again do nothing as I dont care } catch (InterruptedException e) { e.printStackTrace(); } if (stopMe) { return; } filesProcessed++; } } // end of while // } } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } } } // end of file ext loop } // end of if files finished = started; counter++; try { Thread.sleep(200); } catch (InterruptedException ex) { ex.printStackTrace(); } } // end of main while // System.out.println("Gen FINISHED!"); // manager.youAreFinished(); }
From source file:org.apache.maven.plugin.docck.AbstractCheckDocumentationMojo.java
private void checkURL(String url, String description, DocumentationReporter reporter) { try {/* w ww.jav a2 s .com*/ String protocol = getURLProtocol(url); if (protocol.startsWith("http")) { if (offline) { reporter.warn("Cannot verify " + description + " in offline mode with URL: \'" + url + "\'."); } else if (!validUrls.contains(url)) { HeadMethod headMethod = new HeadMethod(url); headMethod.setFollowRedirects(true); headMethod.setDoAuthentication(false); try { getLog().debug("Verifying http url: " + url); if (httpClient.executeMethod(headMethod) != HTTP_STATUS_200) { reporter.error("Cannot reach " + description + " with URL: \'" + url + "\'."); } else { validUrls.add(url); } } catch (HttpException e) { reporter.error("Cannot reach " + description + " with URL: \'" + url + "\'.\nError: " + e.getMessage()); } catch (IOException e) { reporter.error("Cannot reach " + description + " with URL: \'" + url + "\'.\nError: " + e.getMessage()); } finally { headMethod.releaseConnection(); } } } else { reporter.warn("Non-HTTP " + description + " URL not verified."); } } catch (MalformedURLException e) { reporter.warn("The " + description + " appears to have an invalid URL \'" + url + "\'." + " Message: \'" + e.getMessage() + "\'. Trying to access it as a file instead."); checkFile(url, description, reporter); } }
From source file:org.apache.webdav.ant.Utils.java
/** * Returns <code>true</code> if the resource given as URL does exist. * @param client//w w w .j a va 2s .c om * @param httpURL * @return <code>true</code>if the resource exists * @throws IOException * @throws HttpException */ public static boolean resourceExists(HttpClient client, HttpURL httpURL) throws IOException, HttpException { HeadMethod head = new HeadMethod(httpURL.getEscapedURI()); head.setFollowRedirects(true); int status = client.executeMethod(head); switch (status) { case WebdavStatus.SC_OK: return true; case WebdavStatus.SC_NOT_FOUND: return false; default: HttpException ex = new HttpException(); ex.setReasonCode(status); ex.setReason(head.getStatusText()); throw ex; } }
From source file:org.bibsonomy.rest.client.worker.impl.HeadWorker.java
@Override protected HeadMethod getMethod(String url, String requestBody) { final HeadMethod head = new HeadMethod(url); head.setFollowRedirects(true); return head;//from ww w . j a v a 2 s . c o m }
From source file:org.deri.pipes.utils.HttpResponseCache.java
/** * @param client/*from w ww . jav a 2 s. c o m*/ * @param location * @param location2 * @return */ public static HttpResponseData getResponseData(HttpClient client, String location, Map<String, String> requestHeaders) throws Exception { synchronized (client) { if (MINIMUM_CACHE_TIME_MILLIS <= 0) { logger.debug("caching disabled."); return getDataFromRequest(client, location, requestHeaders); } String cacheKey = makeCacheKey(location, requestHeaders); if (requestHeaders == null) { requestHeaders = new HashMap<String, String>(); } if (requestHeaders.get(HEADER_USER_AGENT) == null) { requestHeaders.put(HEADER_USER_AGENT, getDefaultUserAgent()); } JCS jcs = null; try { jcs = JCS.getInstance("httpResponseCache"); } catch (Exception e) { logger.warn("Problem getting JCS cache" + e, e); } if (jcs != null) { try { HttpResponseData data = (HttpResponseData) jcs.get(cacheKey); if (data != null) { if (data.getExpires() > System.currentTimeMillis()) { logger.info("Retrieved from cache (not timed out):" + location); return data; } if (location.length() < 2000) { HeadMethod headMethod = new HeadMethod(location); headMethod.setFollowRedirects(true); addRequestHeaders(headMethod, requestHeaders); try { int response = client.executeMethod(headMethod); Header lastModifiedHeader = headMethod.getResponseHeader(HEADER_LAST_MODIFIED); if (response == data.getResponse()) { if (lastModifiedHeader == null) { logger.debug("Not using cache (No last modified header available) for " + location); } else if (lastModifiedHeader != null && data.getLastModified().equals(lastModifiedHeader.getValue())) { setExpires(data, headMethod); jcs.put(cacheKey, data); logger.info("Retrieved from cache (used HTTP HEAD request to check " + HEADER_LAST_MODIFIED + ") :" + location); return data; } else { logger.debug("Not using cache (last modified changed) for " + location); } } } finally { headMethod.releaseConnection(); } } } } catch (Exception e) { logger.warn("Problem retrieving from cache for " + location, e); } } HttpResponseData data = getDataFromRequest(client, location, requestHeaders); if (jcs != null) { try { jcs.put(cacheKey, data); logger.debug("cached " + location); } catch (Exception e) { logger.warn("Could not store response for " + location + " in cache", e); } } return data; } }
From source file:org.eclipse.mylyn.internal.jira.core.service.web.JiraWebSession.java
private boolean isAuthenticated(HttpClient httpClient, HostConfiguration hostConfiguration, IProgressMonitor monitor) throws JiraException { String url = baseUrl + "/secure/UpdateUserPreferences!default.jspa"; //$NON-NLS-1$ HeadMethod method = new HeadMethod(url); method.setFollowRedirects(false); try {/*from w w w.jav a2 s .c o m*/ int statusCode = WebUtil.execute(httpClient, hostConfiguration, method, monitor); return statusCode == HttpStatus.SC_OK; } catch (IOException e) { throw new JiraException(e); } }
From source file:org.fcrepo.client.FedoraClient.java
public Date getLastModifiedDate(String locator) throws IOException { if (locator.startsWith(FEDORA_URI_PREFIX)) { String query = "select $date " + "from <#ri> " + "where <" + locator + "> <" + VIEW.LAST_MODIFIED_DATE.uri + "> $date"; Map<String, String> map = new HashMap<String, String>(); map.put("lang", "itql"); map.put("query", query); TupleIterator tuples = getTuples(map); try {//from w w w .j av a 2s.c o m if (tuples.hasNext()) { Map<String, Node> row = tuples.next(); Literal dateLiteral = (Literal) row.get("date"); if (dateLiteral == null) { throw new IOException("A row was returned, but it did not contain a 'date' binding"); } return DateUtility.parseDateLoose(dateLiteral.getLexicalForm()); } else { throw new IOException("No rows were returned"); } } catch (TrippiException e) { throw new IOException(e.getMessage()); } finally { try { tuples.close(); } catch (Exception e) { } } } else { HttpClient client = getHttpClient(); HeadMethod head = new HeadMethod(locator); head.setDoAuthentication(true); head.setFollowRedirects(FOLLOW_REDIRECTS); try { int statusCode = client.executeMethod(head); if (statusCode != HttpStatus.SC_OK) { throw new IOException("Method failed: " + head.getStatusLine()); } //Header[] headers = head.getResponseHeaders(); // Retrieve just the last modified header value. Header header = head.getResponseHeader("last-modified"); if (header != null) { String lastModified = header.getValue(); return DateUtility.parseDateLoose(lastModified); } else { // return current date time return new Date(); } } finally { head.releaseConnection(); } } }