List of usage examples for org.apache.commons.httpclient.util URIUtil encodeQuery
public static String encodeQuery(String unescaped) throws URIException
From source file:org.soaplab.gowlab.GowlabJob.java
/************************************************************************** * Fill 'httpMethod' with the user data (from 'formData', * 'queryString' and 'fileData') and execute it. It will do the * real data fetching.//ww w . j a v a 2 s . c o m * * If the fetching finished successfully the 'httpMethod' has the * response. *************************************************************************/ protected void getResponse() throws SoaplabException { if (isGetUsed()) { // GET method... if (StringUtils.isNotEmpty(queryString)) { // ...from a query string try { httpMethod.setQueryString(URIUtil.encodeQuery(queryString)); } catch (URIException e) { httpMethod.setQueryString(queryString); } } else { // ...from name-value pairs httpMethod.setQueryString(formData); } } else if (isPostUsed()) { // POST method... // ...from name-value pairs ((PostMethod) httpMethod).setRequestBody(formData); // ...files to be uploaded if (isMultipartUsed()) { httpMethod.getParams().setBooleanParameter(HttpMethodParams.USE_EXPECT_CONTINUE, true); List<Part> parts = new ArrayList<Part>(); for (IOData io : fileData) { if (!io.getDefinition().isRegularInput()) continue; File forUpload = io.getData(); if (forUpload == null) continue; try { String tag = io.getDefinition().get(ParamDef.TAG); if (StringUtils.isEmpty(tag)) tag = io.getDefinition().id; parts.add(new FilePart(tag, forUpload)); } catch (FileNotFoundException e) { internalError("A file for uploading was not found: " + forUpload.getAbsolutePath()); } } ((PostMethod) httpMethod).setRequestEntity( new MultipartRequestEntity(parts.toArray(new Part[] {}), httpMethod.getParams())); } } // finally, execute the method try { // instantiating an HttpClient new HttpClient().executeMethod(httpMethod); } catch (HttpException e) { internalError("Fatal protocol violation: " + e.getMessage()); } catch (IOException e) { logAndThrow("Fatal transport error: " + e.getMessage()); } }
From source file:org.talend.dataprep.api.dataset.location.AbstractUrlLocation.java
/** * Encode and set the given url.// www . j a va 2 s . c o m * * @param url the url to set. */ public void setUrl(String url) throws URIException { if (StringUtils.isNotBlank(url)) { this.url = URIUtil.encodeQuery(url); } else { this.url = url; } }
From source file:org.xwiki.manager.rest.test.WikiManagerRestTest.java
@Ignore("This test doesn't seem to work correctly with HSQLDB but it actually works if run against MySQL.") @Test// w w w . ja v a 2s .c o m public void testMultiwikiSearch() throws Exception { String WIKI1_ID = "w1"; String WIKI2_ID = "w2"; String PAGE_SPACE = "Main"; String PAGE_NAME = "Test"; String PAGE1_STRING = "foo"; String PAGE2_STRING = "bar"; Wiki wiki = objectFactory.createWiki(); wiki.setId(WIKI1_ID); PostMethod postMethod = executePost(getFullUri(WikiManagerResource.class), "superadmin", "pass", wiki); Assert.assertEquals(HttpStatus.SC_CREATED, postMethod.getStatusCode()); wiki = objectFactory.createWiki(); wiki.setId(WIKI2_ID); postMethod = executePost(getFullUri(WikiManagerResource.class), "superadmin", "pass", wiki); Assert.assertEquals(HttpStatus.SC_CREATED, postMethod.getStatusCode()); /* Store the page */ Page page1 = objectFactory.createPage(); page1.setTitle(PAGE1_STRING); page1.setContent(PAGE1_STRING); PutMethod putMethod = executePut( getUriBuilder(PageResource.class).build(WIKI1_ID, PAGE_SPACE, PAGE_NAME).toString(), "superadmin", "pass", page1); Assert.assertEquals(HttpStatus.SC_CREATED, putMethod.getStatusCode()); page1 = (Page) unmarshaller.unmarshal(putMethod.getResponseBodyAsStream()); /* Retrieve the page to check that it exists */ GetMethod getMethod = executeGet( getUriBuilder(PageResource.class).build(WIKI1_ID, PAGE_SPACE, PAGE_NAME).toString()); Assert.assertEquals(HttpStatus.SC_OK, getMethod.getStatusCode()); Page page = (Page) unmarshaller.unmarshal(getMethod.getResponseBodyAsStream()); Assert.assertEquals(WIKI1_ID, page.getWiki()); Assert.assertEquals(PAGE_SPACE, page.getSpace()); Assert.assertEquals(PAGE_NAME, page.getName()); Assert.assertEquals(PAGE1_STRING, page.getTitle()); Assert.assertEquals(PAGE1_STRING, page.getContent()); Assert.assertEquals(page1.getCreated(), page.getCreated()); Assert.assertEquals(page1.getModified(), page.getModified()); /* Store the page */ Page page2 = objectFactory.createPage(); page2.setTitle(PAGE2_STRING); page2.setContent(PAGE2_STRING); putMethod = executePut(getUriBuilder(PageResource.class).build(WIKI2_ID, PAGE_SPACE, PAGE_NAME).toString(), "superadmin", "pass", page2); Assert.assertEquals(HttpStatus.SC_CREATED, putMethod.getStatusCode()); page2 = (Page) unmarshaller.unmarshal(putMethod.getResponseBodyAsStream()); /* Retrieve the page to check that it exists */ getMethod = executeGet(getUriBuilder(PageResource.class).build(WIKI2_ID, PAGE_SPACE, PAGE_NAME).toString()); Assert.assertEquals(HttpStatus.SC_OK, getMethod.getStatusCode()); page = (Page) unmarshaller.unmarshal(getMethod.getResponseBodyAsStream()); Assert.assertEquals(WIKI2_ID, page.getWiki()); Assert.assertEquals(PAGE_SPACE, page.getSpace()); Assert.assertEquals(PAGE_NAME, page.getName()); Assert.assertEquals(PAGE2_STRING, page.getTitle()); Assert.assertEquals(PAGE2_STRING, page.getContent()); Assert.assertEquals(page2.getCreated(), page.getCreated()); Assert.assertEquals(page2.getModified(), page.getModified()); /* Wait a bit that the Lucene Indexer indexes the pages. */ Thread.sleep(5000); getMethod = executeGet(URIUtil.encodeQuery( String.format("%s?q=\"%s\"&wikis=w1,w2", getFullUri(WikisSearchQueryResource.class), PAGE_NAME))); Assert.assertEquals(HttpStatus.SC_OK, getMethod.getStatusCode()); SearchResults searchResults = (SearchResults) unmarshaller.unmarshal(getMethod.getResponseBodyAsStream()); Assert.assertEquals(2, searchResults.getSearchResults().size()); for (SearchResult searchResult : searchResults.getSearchResults()) { Page pageToBeCheckedAgainst = null; if (searchResult.getWiki().equals(WIKI1_ID)) { pageToBeCheckedAgainst = page1; } else { pageToBeCheckedAgainst = page2; } Assert.assertEquals(pageToBeCheckedAgainst.getWiki(), searchResult.getWiki()); Assert.assertEquals(pageToBeCheckedAgainst.getTitle(), searchResult.getTitle()); Assert.assertEquals(pageToBeCheckedAgainst.getAuthor(), searchResult.getAuthor()); Assert.assertEquals(pageToBeCheckedAgainst.getModified(), searchResult.getModified()); Assert.assertEquals(pageToBeCheckedAgainst.getVersion(), searchResult.getVersion()); } }
From source file:org.xwiki.test.rest.WikisResourceTest.java
@Test public void testHQLQuerySearch() throws Exception { GetMethod getMethod = executeGet(//w ww . j a va 2 s . c o m URIUtil.encodeQuery(String.format("%s?q=where doc.name='WebHome' order by doc.space desc&type=hql", buildURI(WikiSearchQueryResource.class, getWiki())))); Assert.assertEquals(getHttpMethodInfo(getMethod), HttpStatus.SC_OK, getMethod.getStatusCode()); SearchResults searchResults = (SearchResults) unmarshaller.unmarshal(getMethod.getResponseBodyAsStream()); int resultSize = searchResults.getSearchResults().size(); Assert.assertTrue(String.format("Found %s results", resultSize), resultSize >= 1); // Verify that some WebHomes we expect are found. int foundCounter = 0; List<String> expectedWebHomes = Arrays.asList("ColorThemes.WebHome", "Stats.WebHome", "Sandbox.WebHome", "Panels.WebHome", "Scheduler.WebHome", "Sandbox.WebHome", "XWiki.WebHome"); for (SearchResult searchResult : searchResults.getSearchResults()) { checkLinks(searchResult); if (expectedWebHomes.contains(searchResult.getPageFullName())) { foundCounter++; } Assert.assertTrue(searchResult.getPageFullName().endsWith(".WebHome")); } // Note: since we can have translations, the number of found pages can be greater than the expected size. Assert.assertTrue("Some WebHome pages were not found!", foundCounter >= expectedWebHomes.size()); Assert.assertEquals("XWiki.WebHome", searchResults.getSearchResults().get(0).getPageFullName()); }
From source file:org.xwiki.test.rest.WikisResourceTest.java
@Test public void testHQLQuerySearchWithClassnameAuthenticated() throws Exception { GetMethod getMethod = executeGet(// w w w . j a v a 2 s. c om URIUtil.encodeQuery(String.format( "%s?q=where doc.space='XWiki' and doc.name='Admin'&type=hql&className=XWiki.XWikiUsers", buildURI(WikiSearchQueryResource.class, getWiki()))), TestUtils.ADMIN_CREDENTIALS.getUserName(), TestUtils.ADMIN_CREDENTIALS.getPassword()); Assert.assertEquals(getHttpMethodInfo(getMethod), HttpStatus.SC_OK, getMethod.getStatusCode()); SearchResults searchResults = (SearchResults) unmarshaller.unmarshal(getMethod.getResponseBodyAsStream()); int resultSize = searchResults.getSearchResults().size(); Assert.assertTrue(String.format("Found %s results", resultSize), resultSize == 1); Assert.assertNotNull(searchResults.getSearchResults().get(0).getObject()); }
From source file:org.xwiki.test.rest.WikisResourceTest.java
@Test public void testHQLQuerySearchWithClassnameNotAuthenticated() throws Exception { GetMethod getMethod = executeGet(URIUtil.encodeQuery(String.format( "%s?q=where doc.space='XWiki' and doc.name='Admin'&type=hql&classname=XWiki.XWikiUsers", buildURI(WikiSearchQueryResource.class, getWiki())))); Assert.assertEquals(getHttpMethodInfo(getMethod), HttpStatus.SC_OK, getMethod.getStatusCode()); SearchResults searchResults = (SearchResults) unmarshaller.unmarshal(getMethod.getResponseBodyAsStream()); int resultSize = searchResults.getSearchResults().size(); Assert.assertTrue(String.format("Found %s results", resultSize), resultSize == 1); Assert.assertNull(searchResults.getSearchResults().get(0).getObject()); }
From source file:org.xwiki.test.rest.WikisResourceTest.java
@Test public void testSolrSearch() throws Exception { waitSOLRIndex();/*from www . j a va 2 s . c om*/ GetMethod getMethod = executeGet(URIUtil.encodeQuery( String.format("%s?q=\"support\"&type=solr", buildURI(WikiSearchQueryResource.class, getWiki())))); Assert.assertEquals(getHttpMethodInfo(getMethod), HttpStatus.SC_OK, getMethod.getStatusCode()); SearchResults searchResults = (SearchResults) unmarshaller.unmarshal(getMethod.getResponseBodyAsStream()); int resultSize = searchResults.getSearchResults().size(); Assert.assertTrue(String.format("Found %s results", resultSize), resultSize == 1); Assert.assertEquals("Sandbox.WebHome", searchResults.getSearchResults().get(0).getPageFullName()); }
From source file:org.xwiki.test.rest.WikisResourceTest.java
@Test public void testGlobalSearch() throws Exception { waitSOLRIndex();/* w w w .ja va2 s . co m*/ GetMethod getMethod = executeGet(URIUtil.encodeQuery( String.format("%s?q=\"support\"", buildURI(WikisSearchQueryResource.class, getWiki())))); Assert.assertEquals(getHttpMethodInfo(getMethod), HttpStatus.SC_OK, getMethod.getStatusCode()); SearchResults searchResults = (SearchResults) unmarshaller.unmarshal(getMethod.getResponseBodyAsStream()); int resultSize = searchResults.getSearchResults().size(); Assert.assertTrue(String.format("Found %s results", resultSize), resultSize == 1); Assert.assertEquals("Sandbox.WebHome", searchResults.getSearchResults().get(0).getPageFullName()); }
From source file:org.xwiki.wiki.test.ui.WikiManagerRestTest.java
@Ignore("This test doesn't seem to work correctly with HSQLDB but it actually works if run against MySQL.") @Test/* w w w .jav a 2 s.com*/ public void testMultiwikiSearch() throws Exception { String WIKI1_ID = "w1"; String WIKI2_ID = "w2"; String PAGE_SPACE = "Main"; String PAGE_NAME = "Test"; String PAGE1_STRING = "foo"; String PAGE2_STRING = "bar"; Wiki wiki = objectFactory.createWiki(); wiki.setId(WIKI1_ID); PostMethod postMethod = executePost(getFullUri(WikiManagerREST.class), "superadmin", "pass", wiki); Assert.assertEquals(HttpStatus.SC_CREATED, postMethod.getStatusCode()); wiki = objectFactory.createWiki(); wiki.setId(WIKI2_ID); postMethod = executePost(getFullUri(WikiManagerREST.class), "superadmin", "pass", wiki); Assert.assertEquals(HttpStatus.SC_CREATED, postMethod.getStatusCode()); /* Store the page */ Page page1 = objectFactory.createPage(); page1.setTitle(PAGE1_STRING); page1.setContent(PAGE1_STRING); PutMethod putMethod = executePut( getUriBuilder(PageResource.class).build(WIKI1_ID, PAGE_SPACE, PAGE_NAME).toString(), "superadmin", "pass", page1); Assert.assertEquals(HttpStatus.SC_CREATED, putMethod.getStatusCode()); page1 = (Page) unmarshaller.unmarshal(putMethod.getResponseBodyAsStream()); /* Retrieve the page to check that it exists */ GetMethod getMethod = executeGet( getUriBuilder(PageResource.class).build(WIKI1_ID, PAGE_SPACE, PAGE_NAME).toString()); Assert.assertEquals(HttpStatus.SC_OK, getMethod.getStatusCode()); Page page = (Page) unmarshaller.unmarshal(getMethod.getResponseBodyAsStream()); Assert.assertEquals(WIKI1_ID, page.getWiki()); Assert.assertEquals(PAGE_SPACE, page.getSpace()); Assert.assertEquals(PAGE_NAME, page.getName()); Assert.assertEquals(PAGE1_STRING, page.getTitle()); Assert.assertEquals(PAGE1_STRING, page.getContent()); Assert.assertEquals(page1.getCreated(), page.getCreated()); Assert.assertEquals(page1.getModified(), page.getModified()); /* Store the page */ Page page2 = objectFactory.createPage(); page2.setTitle(PAGE2_STRING); page2.setContent(PAGE2_STRING); putMethod = executePut(getUriBuilder(PageResource.class).build(WIKI2_ID, PAGE_SPACE, PAGE_NAME).toString(), "superadmin", "pass", page2); Assert.assertEquals(HttpStatus.SC_CREATED, putMethod.getStatusCode()); page2 = (Page) unmarshaller.unmarshal(putMethod.getResponseBodyAsStream()); /* Retrieve the page to check that it exists */ getMethod = executeGet(getUriBuilder(PageResource.class).build(WIKI2_ID, PAGE_SPACE, PAGE_NAME).toString()); Assert.assertEquals(HttpStatus.SC_OK, getMethod.getStatusCode()); page = (Page) unmarshaller.unmarshal(getMethod.getResponseBodyAsStream()); Assert.assertEquals(WIKI2_ID, page.getWiki()); Assert.assertEquals(PAGE_SPACE, page.getSpace()); Assert.assertEquals(PAGE_NAME, page.getName()); Assert.assertEquals(PAGE2_STRING, page.getTitle()); Assert.assertEquals(PAGE2_STRING, page.getContent()); Assert.assertEquals(page2.getCreated(), page.getCreated()); Assert.assertEquals(page2.getModified(), page.getModified()); /* Wait a bit that the Lucene Indexer indexes the pages. */ Thread.sleep(5000); getMethod = executeGet(URIUtil.encodeQuery( String.format("%s?q=\"%s\"&wikis=w1,w2", getFullUri(WikisSearchQueryResource.class), PAGE_NAME))); Assert.assertEquals(HttpStatus.SC_OK, getMethod.getStatusCode()); SearchResults searchResults = (SearchResults) unmarshaller.unmarshal(getMethod.getResponseBodyAsStream()); Assert.assertEquals(2, searchResults.getSearchResults().size()); for (SearchResult searchResult : searchResults.getSearchResults()) { Page pageToBeCheckedAgainst = null; if (searchResult.getWiki().equals(WIKI1_ID)) { pageToBeCheckedAgainst = page1; } else { pageToBeCheckedAgainst = page2; } Assert.assertEquals(pageToBeCheckedAgainst.getWiki(), searchResult.getWiki()); Assert.assertEquals(pageToBeCheckedAgainst.getTitle(), searchResult.getTitle()); Assert.assertEquals(pageToBeCheckedAgainst.getAuthor(), searchResult.getAuthor()); Assert.assertEquals(pageToBeCheckedAgainst.getModified(), searchResult.getModified()); Assert.assertEquals(pageToBeCheckedAgainst.getVersion(), searchResult.getVersion()); } }
From source file:org.zaproxy.zap.spider.URLCanonicalizer.java
/** * Gets the canonical url, starting from a relative or absolute url found in a given context (baseURL). * /* www . ja va2s . co m*/ * @param url the url string defining the reference * @param baseURL the context in which this url was found * @return the canonical url */ public static String getCanonicalURL(String url, String baseURL) { try { /* Build the absolute URL, from the url and the baseURL */ String resolvedURL = URLResolver.resolveUrl(baseURL == null ? "" : baseURL, url); log.debug("Resolved URL: " + resolvedURL); URI canonicalURI; try { canonicalURI = new URI(resolvedURL); } catch (Exception e) { canonicalURI = new URI(URIUtil.encodeQuery(resolvedURL)); } /* Some checking. */ if (canonicalURI.getScheme() == null) { throw new MalformedURLException("Protocol could not be reliably evaluated from uri: " + canonicalURI + " and base url: " + baseURL); } if (canonicalURI.getRawAuthority() == null) { log.debug("Ignoring URI with no authority (host[\":\"port]): " + canonicalURI); return null; } if (canonicalURI.getHost() == null) { throw new MalformedURLException("Host could not be reliably evaluated from: " + canonicalURI); } /* * Normalize: no empty segments (i.e., "//"), no segments equal to ".", and no segments equal to * ".." that are preceded by a segment not equal to "..". */ String path = canonicalURI.normalize().getRawPath(); /* Convert '//' -> '/' */ int idx = path.indexOf("//"); while (idx >= 0) { path = path.replace("//", "/"); idx = path.indexOf("//"); } /* Drop starting '/../' */ while (path.startsWith("/../")) { path = path.substring(3); } /* Trim */ path = path.trim(); /* Process parameters and sort them. */ final SortedMap<String, String> params = createParameterMap(canonicalURI.getRawQuery()); final String queryString; String canonicalParams = canonicalize(params); queryString = (canonicalParams.isEmpty() ? "" : "?" + canonicalParams); /* Add starting slash if needed */ if (path.length() == 0) { path = "/" + path; } /* Drop default port: example.com:80 -> example.com */ int port = canonicalURI.getPort(); if (port == 80) { port = -1; } /* Lowercasing protocol and host */ String protocol = canonicalURI.getScheme().toLowerCase(); String host = canonicalURI.getHost().toLowerCase(); String pathAndQueryString = normalizePath(path) + queryString; URL result = new URL(protocol, host, port, pathAndQueryString); return result.toExternalForm(); } catch (Exception ex) { log.warn("Error while Processing URL in the spidering process (on base " + baseURL + "): " + ex.getMessage()); return null; } }