Example usage for org.apache.commons.io FileUtils copyURLToFile

List of usage examples for org.apache.commons.io FileUtils copyURLToFile

Introduction

In this page you can find the example usage for org.apache.commons.io FileUtils copyURLToFile.

Prototype

public static void copyURLToFile(URL source, File destination) throws IOException 

Source Link

Document

Copies bytes from the URL source to a file destination.

Usage

From source file:no.dusken.aranea.admin.control.ImportStvMediaController.java

public ModelAndView handleRequest(HttpServletRequest httpServletRequest,
        HttpServletResponse httpServletResponse) throws Exception {
    if (isImporting) {
        log.error("import already running");
        return new ModelAndView("redirect:/");

    } else {/*from  w  w  w .  j a va2s. c o  m*/
        isImporting = true;
    }

    /*
     * parses the podcastfeed from stv to mediaResource
     */
    URI uri = new URI(feedUrl);
    URL url = uri.toURL();
    File file = new File(cacheDirectory + "/" + "feed.xml");
    FileUtils.copyURLToFile(url, file);
    Document doc = parseXmlFile(file);
    // get the root elememt
    Element docEle = doc.getDocumentElement();

    // get a nodelist of <item> elements
    NodeList nl = docEle.getElementsByTagName("item");
    if (nl != null && nl.getLength() > 0) {
        for (int i = 0; i < nl.getLength(); i++) {

            // get the item element
            Element el = (Element) nl.item(i);
            //get the mediaresource
            MediaResource mr = getMediaResource(el);

            if (!isInDb(mr)) {
                mediaResourceService.saveOrUpdate(mr);
                log.info("Imported media, url: {}", mr.getUrlToResource());
            }

        }
    }
    return new ModelAndView("redirect:/");
}

From source file:no.dusken.aranea.service.StoreImageServiceImpl.java

public Image createImage(URL url) throws IOException {
    File file = new File(imageDirectory + "/tmp/" + url.getFile());
    FileUtils.copyURLToFile(url, file);
    return createImage(file);
}

From source file:no.dusken.aranea.web.control.RssCacheController.java

public void updateFeeds() {
    for (String key : feeds.keySet()) {
        String url = feeds.get(key);
        SyndFeed feed = null;//from  ww w .j  a  va 2s  . co m
        File feedFile = null;
        try {
            feedFile = File.createTempFile(key, "xml");
            //download the feed
            FileUtils.copyURLToFile(new URL(url), feedFile);
            log.info("loading new rss from url: {}", url);
            //try to parse it
            feed = getFeed(feedFile);
            //cache to memory
            cacheMap.put(url, feed);
            // saving the feed in disk cache
            FileUtils.copyFile(feedFile, new File(cacheDirectory + "/" + url));
        } catch (Exception e) {
            log.error("Could not download rss or rss is invalid, using stored file", e);
            feedFile = new File(cacheDirectory + "/" + url);
            try {
                feed = getFeed(feedFile);
                cacheMap.put(url, feed);
            } catch (FeedException e1) {
                log.error("Epic Fail, could not parse old file", e);
            } catch (IOException e1) {
                log.error("All aboard the failboat, the stored file was bad or missing. Poor thing", e);
            } finally {
                deleteTempFile(feedFile);
            }
        } finally {
            deleteTempFile(feedFile);
        }
    }
}

From source file:npanday.plugin.partcover.PartCoverConverterMojo.java

private File copyFileToOutputDirectory(String resourcePath, String file) throws MojoExecutionException {
    URL resourceUrl = getClass().getResource(resourcePath);
    File outputFile = new File(outputDirectory, file);

    try {/*w w w .j a  v a 2 s  .c o m*/
        FileUtils.copyURLToFile(resourceUrl, outputFile);
    } catch (IOException e) {
        throw new MojoExecutionException("Unable to copy file to " + outputFile, e);
    }

    return outputFile;
}

From source file:org.ado.minesync.github.mock.MockMinecraftWorldFactory.java

public static File getMockZippedWorld(String filename) throws IOException {
    //        File mockWorld = File.createTempFile("mock", "world");
    //        mockWorld.createNewFile();
    File destination = new File(APPLICATION_CACHE_DIR, filename);
    FileUtils.copyURLToFile(getResource("bbbb-see-view.zip"), destination);
    return destination;
}

From source file:org.ado.minesync.github.TestFileUtils.java

public static void addFile(File directory, String filePath) throws IOException {
    File file = new File(directory, getFilename(filePath));
    FileUtils.copyURLToFile(getResource(filePath), file);
}

From source file:org.ala.harvester.WikipediaImageHarvester.java

@Override
public void start(int infosourceId, int timeGap) throws Exception {

    //name index//from w w w. ja  v  a 2  s  .c om
    CBIndexSearch nameIndex = new CBIndexSearch("/data/lucene/namematching");

    //download the images file from DBPedia
    if (!downloaded) {
        System.out.println("Downloading NT triple dump from DBPedia..." + dbpediaImagesFile);
        File bzipFile = new File("/data/images_en.nt.bz2");
        FileUtils.copyURLToFile(new URL(dbpediaImagesFile), bzipFile);
        System.out.println("Downloaded.");

        //decompress
        System.out.println("Decompressing.....");
        FileInputStream in = new FileInputStream(bzipFile);
        FileOutputStream out = new FileOutputStream("/data/images_en.nt");
        BZip2CompressorInputStream bzIn = new BZip2CompressorInputStream(in);
        final byte[] buffer = new byte[1048576];
        int n = 0;
        while (-1 != (n = bzIn.read(buffer))) {
            out.write(buffer, 0, n);
        }
        out.close();
        bzIn.close();
        System.out.println("Decompressed.");
    }

    //iterate through each line
    BufferedReader reader = new BufferedReader(new FileReader("/data/images_en.nt"));
    String line = reader.readLine();
    while (line != null) {
        //            System.out.println("LINE: " + line);

        if (line.contains("depiction")) {
            String[] parts = line.split(" ");

            String dbpediaUrl = parts[0].replaceAll(">", "").replaceAll("<", "");
            String resourceName = parts[0].substring(parts[0].lastIndexOf('/') + 1);
            String nameToMatch = resourceName.replace(">", "").replaceAll("_", " ").trim();
            // println(nameToMatch)

            try {

                //name must be a bionomial or trinomial
                if (nameToMatch.contains("-") || nameToMatch.contains(" ")) {

                    //only match things that look like binomials or trinomials
                    NameSearchResult nsr = null;

                    try {
                        nameIndex.searchForRecord(nameToMatch, null);
                    } catch (HomonymException he) {

                    }
                    if (nsr == null) {
                        //search for common name
                        nsr = nameIndex.searchForCommonName(nameToMatch);
                    }

                    if (nsr != null
                            && (RankType.SPECIES.equals(nsr.getRank())
                                    || RankType.SUBSPECIES.equals(nsr.getRank()))
                            && nsr.getLsid() != null && nsr.getLsid().contains("biodiversity.org.au")
                            && nsr.getRankClassification().getScientificName() != null
                            && nsr.getRankClassification().getScientificName().contains(" ")) {

                        //validate the match
                        String dbpediaPage = WebUtils.getUrlContentAsString(dbpediaUrl);
                        if (dbpediaPage.contains("http://dbpedia.org/ontology/genus")
                                || dbpediaPage.contains("http://dbpedia.org/ontology/species")
                                || dbpediaPage.contains("http://dbpedia.org/property/genus")
                                || dbpediaPage.contains("http://dbpedia.org/property/species")
                                || dbpediaPage.contains("http://dbpedia.org/property/binomial")
                                || dbpediaPage.contains("http://dbpedia.org/ontology/phylum")) {
                            System.out.println("URL: " + dbpediaUrl + ", matched string: " + nameToMatch
                                    + ", to " + nsr.getRank().toString() + ": "
                                    + nsr.getRankClassification().getScientificName());

                            //TODO
                            //download image full res image
                            //download wikipedia page for image e.g. http://en.wikipedia.org/wiki/File:Kangur.rudy.drs.jpg
                            //retrieve creator, rights, licence, date
                            //save to repository
                            String wikiPageUrl = getWikiPageUrl(dbpediaPage);

                            if (wikiPageUrl != null && !"".equals(wikiPageUrl)) {
                                List<String> imagePageUrlList = getImagePageUrlList(wikiPageUrl);
                                harvestImagePages(imagePageUrlList, nsr, infosourceId);
                            }

                        } else {
                            System.out.println(
                                    "False positive for " + "http://en.wikipedia.org/wiki/" + resourceName);
                        }
                    }
                }
            } catch (Exception e) {
                e.printStackTrace();
            }
        }

        line = reader.readLine();
    }

    System.out.println("Finished.");
}

From source file:org.apache.accumulo.minicluster.MiniAccumuloClusterTest.java

@Test(timeout = 60000)
public void testPerTableClasspath() throws Exception {

    Connector conn = accumulo.getConnector("root", "superSecret");

    conn.tableOperations().create("table2");

    File jarFile = folder.newFile("iterator.jar");
    FileUtils.copyURLToFile(this.getClass().getResource("/FooFilter.jar"), jarFile);

    conn.instanceOperations().setProperty(Property.VFS_CONTEXT_CLASSPATH_PROPERTY.getKey() + "cx1",
            jarFile.toURI().toString());
    conn.tableOperations().setProperty("table2", Property.TABLE_CLASSPATH.getKey(), "cx1");
    conn.tableOperations().attachIterator("table2",
            new IteratorSetting(100, "foocensor", "org.apache.accumulo.test.FooFilter"));

    BatchWriter bw = conn.createBatchWriter("table2", new BatchWriterConfig());

    Mutation m1 = new Mutation("foo");
    m1.put("cf1", "cq1", "v2");
    m1.put("cf1", "cq2", "v3");

    bw.addMutation(m1);//w w  w.  ja  v a  2s  .co  m

    Mutation m2 = new Mutation("bar");
    m2.put("cf1", "cq1", "v6");
    m2.put("cf1", "cq2", "v7");

    bw.addMutation(m2);

    bw.close();

    Scanner scanner = conn.createScanner("table2", new Authorizations());

    int count = 0;
    for (Entry<Key, Value> entry : scanner) {
        Assert.assertFalse(entry.getKey().getRowData().toString().toLowerCase().contains("foo"));
        count++;
    }

    Assert.assertEquals(2, count);

    conn.instanceOperations().removeProperty(Property.VFS_CONTEXT_CLASSPATH_PROPERTY.getKey() + "cx1");
    conn.tableOperations().delete("table2");
}

From source file:org.apache.accumulo.start.classloader.vfs.AccumuloReloadingVFSClassLoaderTest.java

@Before
public void setup() throws Exception {
    Logger.getRootLogger().setLevel(Level.ERROR);

    vfs = ContextManagerTest.getVFS();//from ww  w . j a va 2s.c  o m

    folder1.create();
    folderPath = folder1.getRoot().toURI().toString() + ".*";

    FileUtils.copyURLToFile(this.getClass().getResource("/HelloWorld.jar"), folder1.newFile("HelloWorld.jar"));
}

From source file:org.apache.accumulo.start.classloader.vfs.AccumuloReloadingVFSClassLoaderTest.java

@Test
public void testReloading() throws Exception {
    FileObject testDir = vfs.resolveFile(folder1.getRoot().toURI().toString());
    FileObject[] dirContents = testDir.getChildren();

    AccumuloReloadingVFSClassLoader arvcl = new AccumuloReloadingVFSClassLoader(folderPath, vfs,
            new ReloadingClassLoader() {
                @Override/*w w  w . j a v a 2  s .  c  o m*/
                public ClassLoader getClassLoader() {
                    return ClassLoader.getSystemClassLoader();
                }
            }, 1000, true);

    FileObject[] files = ((VFSClassLoader) arvcl.getClassLoader()).getFileObjects();
    Assert.assertArrayEquals(createFileSystems(dirContents), files);

    Class<?> clazz1 = arvcl.getClassLoader().loadClass("test.HelloWorld");
    Object o1 = clazz1.newInstance();
    Assert.assertEquals("Hello World!", o1.toString());

    // Check that the class is the same before the update
    Class<?> clazz1_5 = arvcl.getClassLoader().loadClass("test.HelloWorld");
    Assert.assertEquals(clazz1, clazz1_5);

    assertTrue(new File(folder1.getRoot(), "HelloWorld.jar").delete());

    // VFS-487 significantly wait to avoid failure
    Thread.sleep(7000);

    // Update the class
    FileUtils.copyURLToFile(this.getClass().getResource("/HelloWorld.jar"), folder1.newFile("HelloWorld2.jar"));

    // Wait for the monitor to notice
    // VFS-487 significantly wait to avoid failure
    Thread.sleep(7000);

    Class<?> clazz2 = arvcl.getClassLoader().loadClass("test.HelloWorld");
    Object o2 = clazz2.newInstance();
    Assert.assertEquals("Hello World!", o2.toString());

    // This is false because they are loaded by a different classloader
    Assert.assertFalse(clazz1.equals(clazz2));
    Assert.assertFalse(o1.equals(o2));

    arvcl.close();
}