List of usage examples for org.apache.commons.httpclient Header Header
public Header(String paramString1, String paramString2)
From source file:com.bigdata.rdf.sail.remoting.GraphRepositoryClient.java
/** * @see {@link GraphRepository#delete(String, QueryLanguage)} *///ww w . j a v a2 s . c o m public void delete(String query, QueryLanguage ql) throws Exception { if (query == null || ql == null) { return; } // DELETE DeleteMethod del = new DeleteMethod(servletURL); try { // add the header for the query if (query != null) { query = ql.toString().toLowerCase() + "[" + trim(query) + "]"; String rangeHeader = "query[" + query + "]"; Header range = new Header(GraphRepositoryServlet.HTTP_RANGE, rangeHeader); del.addRequestHeader(range); } // Execute the method. int sc = getHttpClient().executeMethod(del); if (sc != HttpStatus.SC_OK) { throw new IOException("HTTP-DELETE failed: " + del.getStatusLine()); } } finally { // Release the connection. del.releaseConnection(); } }
From source file:com.google.gsa.valve.modules.httpbasic.HTTPBasicAuthenticationProcess.java
/** * This is the main method that does the authentication and should be * invoked by the classes that would like to open a new authentication * process against an HTTP Basic protected source. * <p>// www .ja v a 2s . com * The username and password for the source are assumed to be the ones * captured during the authentication. These are stored in creds and in * this case the root parameters. creds is an array of credentials for * all external sources. The first element is 'root' which contains the * credentials captured from the login page. This method reviews if there * is a credential id identical to the name associated to this module * in the config file. If so, these credentials are used to authenticate * against this HTTP Basic source, and if not 'root' one will be used * instead. * <p> * If the HTTP Basic authentication result is OK, it creates an * authentication cookie containing the HTTP Basic credentials * to be reused during authorization. The content returned back from the * remote secure backend system is sent as well. Anyway, the HTTP * response code is returned in this method to inform the caller on the * status. * * @param request HTTP request * @param response HTTP response * @param authCookies vector that contains the authentication cookies * @param url the document url * @param creds an array of credentials for all external sources * @param id the default credential id to be retrieved from creds * @return the HTTP error code * @throws HttpException * @throws IOException */ public int authenticate(HttpServletRequest request, HttpServletResponse response, Vector<Cookie> authCookies, String url, Credentials creds, String id) throws HttpException, IOException { Cookie[] cookies = null; //Credentials UsernamePasswordCredentials credentials = null; // Initialize status code int statusCode = HttpServletResponse.SC_UNAUTHORIZED; // Read cookies cookies = request.getCookies(); // Debug logger.debug("HTTP Basic authentication start"); //First read the u/p the credentails store, in this case using the same as the root login logger.debug("HttpBasic: trying to get creds from repository ID: " + id); Credential httpBasicCred = null; try { httpBasicCred = creds.getCredential(id); } catch (NullPointerException npe) { logger.error("NPE while reading credentials of ID: " + id); } if (httpBasicCred != null) { credentials = new UsernamePasswordCredentials(httpBasicCred.getUsername(), httpBasicCred.getPassword()); } else { logger.debug("HttpBasic: trying to get creds from repository \"root\""); httpBasicCred = creds.getCredential("root"); if (httpBasicCred != null) { logger.info("Trying with root credentails"); credentials = new UsernamePasswordCredentials(httpBasicCred.getUsername(), httpBasicCred.getPassword()); } } logger.debug("Authenticating"); Header[] headers = null; HttpMethodBase method = null; //Get Max connections int maxConnectionsPerHost = 30; int maxTotalConnections = 100; //Cookie Max Age int authMaxAge = -1; try { maxConnectionsPerHost = new Integer(valveConf.getMaxConnectionsPerHost()).intValue(); maxTotalConnections = (new Integer(valveConf.getMaxTotalConnections())).intValue(); authMaxAge = Integer.parseInt(valveConf.getAuthMaxAge()); } catch (NumberFormatException nfe) { logger.error( "Configuration error: chack the configuration file as the numbers set for any of the following parameters are not OK:"); logger.error(" * maxConnectionsPerHost * maxTotalConnections * authMaxAge"); } // Protection if (webProcessor == null) { // Instantiate Web processor if ((maxConnectionsPerHost != -1) && (maxTotalConnections != -1)) { webProcessor = new WebProcessor(maxConnectionsPerHost, maxTotalConnections); } else { webProcessor = new WebProcessor(); } } // // Launch the authentication process // // A fixed URL in the repository that all users have access to which can be used to authN a user // and capture the HTTP Authorization Header String authURL = valveConf.getRepository(id).getParameterValue("HTTPAuthPage"); try { // Set HTTP headers headers = new Header[1]; // Set User-Agent headers[0] = new Header("User-Agent", "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8) Gecko/20051111 Firefox/1.5"); // Request page, testing if credentials are valid if (credentials != null) { logger.debug("Username: " + credentials.getUserName()); logger.debug("URL: " + authURL); } //HTTP request method = webProcessor.sendRequest(credentials, RequestType.GET_REQUEST, headers, null, authURL); //Read the auth header and store in the cookie, the authZ class will use this later headers = method.getRequestHeaders(); Header authHeader = null; authHeader = method.getRequestHeader("Authorization"); // Cache status code if (method != null) statusCode = method.getStatusCode(); if (statusCode == HttpServletResponse.SC_OK) { //Authentication worked, so create the auth cookie to indicate it has worked Cookie extAuthCookie = null; extAuthCookie = new Cookie(BASIC_COOKIE, ""); if (authHeader != null) { String basicCookie = null; try { basicCookie = URLEncoder.encode(getBasicAuthNChain(authHeader.getValue()), encoder); if (basicCookie == null) { basicCookie = ""; } } catch (Exception ex) { logger.error("Error when setting Basic cookie value: " + ex.getMessage(), ex); basicCookie = ""; } extAuthCookie.setValue(basicCookie); } String authCookieDomain = null; String authCookiePath = null; // Cache cookie properties authCookieDomain = valveConf.getAuthCookieDomain(); authCookiePath = valveConf.getAuthCookiePath(); // Set extra cookie parameters extAuthCookie.setDomain(authCookieDomain); extAuthCookie.setPath(authCookiePath); extAuthCookie.setMaxAge(authMaxAge); // Log info if (logger.isDebugEnabled()) logger.debug("Adding " + BASIC_COOKIE + " cookie: " + extAuthCookie.getName() + ":" + extAuthCookie.getValue() + ":" + extAuthCookie.getPath() + ":" + extAuthCookie.getDomain() + ":" + extAuthCookie.getSecure()); //sendCookies support boolean isSessionEnabled = new Boolean(valveConf.getSessionConfig().isSessionEnabled()) .booleanValue(); boolean sendCookies = false; if (isSessionEnabled) { sendCookies = new Boolean(valveConf.getSessionConfig().getSendCookies()).booleanValue(); } if ((!isSessionEnabled) || ((isSessionEnabled) && (sendCookies))) { logger.debug("Adding cookie to response"); response.addCookie(extAuthCookie); } //Add cookies to the Cookie array to support sessions authCookies.add(extAuthCookie); logger.debug("Cookie added to the array"); } // Clear webProcessor cookies webProcessor.clearCookies(); } catch (Exception e) { // Log error logger.error("HTTP Basic authentication failure: " + e.getMessage(), e); // Garbagge collect method = null; // Update status code statusCode = HttpServletResponse.SC_UNAUTHORIZED; } // End of the authentication process logger.debug("HTTP Basic Authentication completed (" + statusCode + ")"); // Return status code return statusCode; }
From source file:com.dtolabs.rundeck.core.resources.TestURLResourceModelSource.java
public void testGetNodesYaml() throws Exception { URLResourceModelSource provider = new URLResourceModelSource(getFrameworkInstance()); final URLResourceModelSource.Configuration build = URLResourceModelSource.Configuration.build(); build.project(PROJ_NAME);/*from w ww .java 2s. c o m*/ build.url("http://example.com/test"); provider.configure(build.getProperties()); final test1 test1 = new test1(); test1.httpResultCode = 200; test1.httpStatusText = "OK"; test1.responseHeaders.put("Content-Type", new Header("Content-Type", "text/yaml")); String yamlcontent = YAML_NODES_TEST; ByteArrayInputStream stringStream = new ByteArrayInputStream(yamlcontent.getBytes()); test1.bodyStream = stringStream; provider.interaction = test1; final INodeSet nodes = provider.getNodes(); assertNotNull(nodes); assertEquals(1, nodes.getNodes().size()); assertNotNull(nodes.getNode("testnode1")); assertNotNull(test1.method); assertNotNull(test1.client); assertNotNull(test1.followRedirects); assertNotNull(test1.releaseConnectionCalled); }
From source file:ixa.pipe.ned.DBpediaSpotlightClient.java
public JSONObject extractJSON(TextAdaptation text, String host, String port, String endpoint) throws AnnotationException { LOG.info("Querying API."); String spotlightResponse = ""; Document doc = null;/*from w w w. j a va 2 s . c o m*/ try { String url = host + ":" + port + "/rest/" + endpoint; PostMethod method = new PostMethod(url); method.setRequestHeader("Content-Type", "application/x-www-form-urlencoded;charset=utf-8"); NameValuePair[] params = { new NameValuePair("text", text.text()), new NameValuePair("spotter", "SpotXmlParser"), new NameValuePair("confidence", Double.toString(CONFIDENCE)), new NameValuePair("support", Integer.toString(SUPPORT)), new NameValuePair("coreferenceResolution", Boolean.toString(COREFERENCE)) }; method.setRequestBody(params); method.setRequestHeader(new Header("Accept", "application/json")); spotlightResponse = request(method); } catch (Exception e) { throw new AnnotationException("Could not encode text.", e); } assert spotlightResponse != null; JSONObject resultJSON = null; try { resultJSON = new JSONObject(spotlightResponse); } catch (JSONException e) { throw new AnnotationException("Received invalid response from DBpedia Spotlight API."); } return resultJSON; }
From source file:ensen.controler.DBpediaSpotlightClient.java
public List<EnsenDBpediaResource> extractCandidats(Text text) throws AnnotationException { if (local)/* w ww . j av a 2 s .co m*/ API_URL = PropertiesManager.getProperty("DBpediaSpotlightClientLocal"); else API_URL = PropertiesManager.getProperty("DBpediaSpotlightClient"); SPOTTER = PropertiesManager.getProperty("spotter"); System.out.println("Querying API: " + API_URL); String spotlightResponse; try { GetMethod getMethod = new GetMethod( API_URL + "rest/candidates/?" + "confidence=" + PropertiesManager.getProperty("CONFIDENCE") + "&support=" + PropertiesManager.getProperty("SUPPORT") + "&spotter=" + SPOTTER + "&text=" + URLEncoder.encode(text.text(), "utf-8")); getMethod.addRequestHeader(new Header("Accept", "application/json")); spotlightResponse = request(getMethod); } catch (UnsupportedEncodingException e) { throw new AnnotationException("Could not encode text.", e); } assert spotlightResponse != null; JSONObject resultJSON = null; JSONArray entities = null; try { resultJSON = new JSONObject(spotlightResponse); //System.out.println(resultJSON.toString()); JSONObject annotationObj = resultJSON.getJSONObject("annotation"); entities = annotationObj.getJSONArray("surfaceForm"); } catch (JSONException e) { System.err.println(resultJSON.toString()); e.printStackTrace(); throw new AnnotationException("Received invalid response from DBpedia Spotlight API."); } LinkedList<EnsenDBpediaResource> resources = new LinkedList<EnsenDBpediaResource>(); for (int i = 0; i < entities.length(); i++) { try { JSONObject entity = entities.getJSONObject(i); try { JSONArray resourcesArray = entity.getJSONArray("resource"); for (int j = 0; j < resourcesArray.length(); j++) { resources.add(new EnsenDBpediaResource( ((JSONObject) resourcesArray.get(j)).getString("@uri"), Integer.parseInt(((JSONObject) resourcesArray.get(j)).getString("@support")))); } } catch (Exception e) { } try { JSONObject resourcesArray = entity.getJSONObject("resource"); resources.add(new EnsenDBpediaResource(resourcesArray.getString("@uri"), Integer.parseInt(resourcesArray.getString("@support")))); } catch (Exception e) { } } catch (JSONException e) { System.out.println("JSON exception " + e); } } return resources; }
From source file:com.bigdata.rdf.sail.remoting.GraphRepositoryClient.java
/** * @see {@link GraphRepository#update(String, String)} *//* w w w .j a v a2 s . c o m*/ public void update(String rdfXmlToDelete, String rdfXmlToAdd) throws Exception { // DELETE PutMethod put = new PutMethod(servletURL); try { // add the range header if (rdfXmlToDelete != null) { String triples = "triples[" + trim(rdfXmlToDelete) + "]"; Header range = new Header(GraphRepositoryServlet.HTTP_RANGE, triples); put.addRequestHeader(range); } // set the body if (rdfXmlToAdd != null) { put.setRequestEntity(new StringRequestEntity(rdfXmlToAdd, // the rdf/xml body GraphRepositoryServlet.RDF_XML, // includes the encoding null // so we don't need to say it here. )); put.setContentChunked(true); } // Execute the method. int sc = getHttpClient().executeMethod(put); if (sc != HttpStatus.SC_OK) { throw new IOException("HTTP-PUT failed: " + put.getStatusLine()); } } finally { // Release the connection. put.releaseConnection(); } }
From source file:com.dtolabs.rundeck.core.resources.TestURLResourceModelSource.java
public void testGetNodesXml() throws Exception { URLResourceModelSource provider = new URLResourceModelSource(getFrameworkInstance()); final URLResourceModelSource.Configuration build = URLResourceModelSource.Configuration.build(); build.project(PROJ_NAME);//w ww.j a v a 2s . c o m build.url("http://example.com/test"); provider.configure(build.getProperties()); final test1 test1 = new test1(); test1.httpResultCode = 200; test1.httpStatusText = "OK"; test1.responseHeaders.put("Content-Type", new Header("Content-Type", "text/xml")); ByteArrayInputStream stringStream = new ByteArrayInputStream(XML_NODES_TEXT.getBytes()); test1.bodyStream = stringStream; provider.interaction = test1; final INodeSet nodes = provider.getNodes(); assertNotNull(nodes); assertEquals(1, nodes.getNodes().size()); assertNotNull(nodes.getNode("testnode1")); assertNotNull(test1.method); assertNotNull(test1.client); assertNotNull(test1.followRedirects); assertNotNull(test1.releaseConnectionCalled); }
From source file:com.itude.mobile.mobbl.server.http.HttpDelegate.java
private synchronized Header[] transformToHeader(TreeMap<String, String[]> headers) { if (headers == null) return null; Header[] result = new Header[headers.size()]; Iterator<Entry<String, String[]>> iterator = headers.entrySet().iterator(); int i = 0;//from w w w . j a v a 2 s. c o m while (iterator.hasNext()) { Entry<String, String[]> entry = iterator.next(); for (String s : entry.getValue()) result[i] = new Header(entry.getKey(), s); i++; } return result; }
From source file:net.sf.ehcache.constructs.web.filter.SimpleCachingHeadersPageCachingFilterTest.java
/** * Tests whether the page is gzipped using the rawer HttpClient library. * Lets us check that the responseBody is really gzipped. *///w ww .j a v a2s. c o m @Test public void testCachedPageIsGzippedWhenEncodingHeaderSet() throws IOException { HttpClient httpClient = new HttpClient(); HttpMethod httpMethod = new GetMethod(buildUrl(cachedPageUrl)); httpMethod.setRequestHeader(new Header("Accept-encoding", "gzip")); httpClient.executeMethod(httpMethod); byte[] responseBody = httpMethod.getResponseBody(); assertTrue(PageInfo.isGzipped(responseBody)); }
From source file:com.thesmartweb.swebrank.DBpediaSpotlightClient.java
/** * Method that recognizes the entities through DBpedia spotlight the content of a given URL * @param url_check the url to be annotated * @param StemFlag a flag to determine if we want to use stemming *///w w w.j a va 2s. c o m @Override public void extract(String url_check, boolean StemFlag) throws AnnotationException { try { Thread.sleep(1000); } catch (InterruptedException ex) { Thread.currentThread().interrupt(); } LinkedList<DBpediaResource> resources = new LinkedList<DBpediaResource>(); entitiesString = new ArrayList<>(); typesDBspot = new ArrayList<>(); similarityScores = new ArrayList<>(); similarityDifference = new ArrayList<>(); supports = new ArrayList<>(); allEntities = new ArrayList<>(); double simScore = 0.0; double percOfSec = 0.0; try { LOG.info("Querying API."); String spotlightResponse; String request = API_URL + "rest/annotate?" + "confidence=" + CONFIDENCE + "&support=" + SUPPORT + "&url=" + URLEncoder.encode(url_check, "utf-8"); GetMethod getMethod = new GetMethod(request); getMethod.addRequestHeader(new Header("Accept", "application/json")); spotlightResponse = request(getMethod); assert spotlightResponse != null; JSONObject resultJSON = null; JSONArray entities = null; if (spotlightResponse.startsWith("{")) { resultJSON = new JSONObject(spotlightResponse); entities = resultJSON.getJSONArray("Resources"); for (int i = 0; i < entities.length(); i++) { try { JSONObject entity = entities.getJSONObject(i); //get the entity string by getting the last part of the URI String entityString = entity.getString("@URI").substring(28).toLowerCase() .replaceAll("[\\_,\\%28,\\%29]", " "); if (StemFlag) {//if we use stemming, we use Snowball stemmr of both entities and queries String[] splitEntity = entityString.split(" "); entityString = ""; StemmerSnow stemmer = new StemmerSnow(); List<String> splitEntityList = stemmer.stem(Arrays.asList(splitEntity)); StringBuilder sb = new StringBuilder(); for (String s : splitEntityList) { sb.append(s.trim()); sb.append(" "); } entityString = sb.toString().trim(); } boolean flag_new_entity = false; if (!entitiesString.contains(entityString)) { flag_new_entity = true; entitiesString.add(entityString);//if we have found a unique entity we include it in the list } String typesString = entity.getString("@types");//we get the semantic types/categories String[] types = typesString.split("\\,"); String delimiter = "";//the delimiter is different according to the type for (String type : types) { if (type.contains("DBpedia") || type.contains("Schema")) { //if it is DBpedia or Schema delimiter = "\\:"; } if (type.contains("Freebase")) {//if it is Freebase delimiter = "\\/"; } String[] typeStrings = type.split(delimiter); String typeString = typeStrings[typeStrings.length - 1].toLowerCase() .replaceAll("[\\_,\\%28,\\%29]", " "); if (StemFlag) {//if we choose to use stemming String[] splitType = typeString.split(" "); typeString = ""; StemmerSnow stemmer = new StemmerSnow(); List<String> splitTypeList = stemmer.stem(Arrays.asList(splitType)); StringBuilder sb = new StringBuilder(); for (String s : splitTypeList) { sb.append(s.trim()); sb.append(" "); } typeString = sb.toString().trim(); } if (!typesDBspot.contains(typeString)) { typesDBspot.add(typeString); } } simScore = Double.valueOf(entity.getString("@similarityScore")); percOfSec = Double.valueOf(entity.getString("@percentageOfSecondRank")); allEntities.add(entityString); similarityScores.add(simScore); supports.add(Double.valueOf(entity.getString("@support"))); if (percOfSec == -1.0) similarityDifference.add(-1.0); else similarityDifference.add(simScore * (1 - percOfSec)); //resources.add(new DBpediaResource(entity.getString("@URI"),Integer.parseInt(entity.getString("@support")))); } catch (JSONException e) { LOG.error("JSON exception " + e); } } //calculate statistics - similarity score ent_avg_score = getMean(similarityScores); ent_max_score = getMax(similarityScores); ent_min_score = getMin(similarityScores); ent_median_score = getMedian(similarityScores); ent_std_score = getStd(similarityScores); //calculate statistics - support ent_avg_support = getMean(supports); ent_max_support = getMax(supports); ent_min_support = getMin(supports); ent_median_support = getMedian(supports); ent_std_support = getStd(supports); //calculate statistics - difference in similarity scores between first and second ranked entities unique_ent_cnt_dbpspot = 0.0; unique_ent_scoreSum_dbpspot = 0; List<Double> tempList = new ArrayList<>(); for (int i = 0; i < similarityDifference.size(); i++) { if (similarityDifference.get(i) == -1) { unique_ent_cnt_dbpspot += 1; unique_ent_scoreSum_dbpspot += similarityScores.get(i); } else { tempList.add(similarityDifference.get(i)); } } unique_ent_cnt_dbpspot = unique_ent_cnt_dbpspot / allEntities.size(); if (unique_ent_scoreSum_dbpspot == 0) unique_ent_scoreSum_dbpspot = -1; ent_avg_dif = getMean(tempList); ent_max_dif = getMax(tempList); ent_min_dif = getMin(tempList); ent_median_dif = getMedian(tempList); ent_std_dif = getStd(tempList); //calculate high precision content if (!StemFlag) { high_precision_content = (double) getHighPrecContent(url_check) / allEntities.size(); } } } catch (UnsupportedEncodingException | JSONException ex) { Logger.getLogger(DBpediaSpotlightClient.class.getName()).log(Level.SEVERE, null, ex); } }