Example usage for org.apache.commons.io FileUtils copyURLToFile

List of usage examples for org.apache.commons.io FileUtils copyURLToFile

Introduction

In this page you can find the example usage for org.apache.commons.io FileUtils copyURLToFile.

Prototype

public static void copyURLToFile(URL source, File destination) throws IOException 

Source Link

Document

Copies bytes from the URL source to a file destination.

Usage

From source file:au.org.ala.delta.DeltaTestCase.java

/**
 * Copies the specified resource to a temp file, and returns that File
 * @param fileName the ClassLoader relative name of the file to open.
 * @return a new File./*from   w w  w  .  j  a v a  2s  .  com*/
 * @throws IOException if the file cannot be found.
 */
protected File copyURLToFile(String filename) throws IOException {
    URL deltaFileUrl = getClass().getResource(filename);
    File tempFile = File.createTempFile("test", ".dlt");
    _tempFiles.add(tempFile);
    FileUtils.copyURLToFile(deltaFileUrl, tempFile);
    return tempFile;
}

From source file:it.iit.genomics.cru.structures.bridges.eppic.client.EppicWSClient.java

/**
 *
 * @param pdbId//from   w  w  w.  j a  v a2 s.  c  o  m
 * @return
 */
public EppicAnalysisList retrievePDB(String pdbId) {
    String localName = localPath + pdbId + ".xml";
    // is it in local ?

    try {

        File file = new File(localName);
        if (false == file.exists()) {
            // Download it
            URL url = new URL(eppicUrl + pdbId);

            FileUtils.copyURLToFile(url, file);

        }

        return parse(new FileInputStream(localName));

    } catch (Exception e) {
        logger.error("Cannot download EPPIC analysis for PDB " + pdbId, e);
        return new EppicAnalysisList();
    }

}

From source file:fbpwn.plugins.core.DumpThumbnailImagesTask.java

private void processPhotos(HtmlPage PhotosPage, String FileDirectory, String Process) throws IOException {
    Pattern PhotoURL = Pattern.compile("url\\(http.*\\)");
    Matcher MatchedURL = PhotoURL.matcher(PhotosPage.asXml());
    ArrayList<String> images = new ArrayList<String>();
    while (MatchedURL.find()) {
        images.add(MatchedURL.group().substring(4, MatchedURL.group().length() - 1));
    }/*from w  w w .jav a 2  s .  c o m*/
    for (int i = 0; i < images.size(); i++) {

        if (checkForCancel()) {
            return;
        }

        FileUtils.copyURLToFile(new URL(images.get(i)),
                new File(FileDirectory + System.getProperty("file.separator") + "Image" + (i + 1) + ".jpg"));
        setMessage(Process);
        setPercentage(((double) (i + 1) / images.size()) * 100);
        getFacebookGUI().updateTaskProgress(this);
    }
}

From source file:metadata.etl.dataset.hdfs.HdfsMetadataEtl.java

private void extractLocal() throws Exception {

    URL localJarUrl = classLoader.getResource("jar/schemaFetch.jar");
    String homeDir = System.getProperty("user.home");
    String remoteJarFile = homeDir + "/.wherehows/schemaFetch.jar";
    File dest = new File(remoteJarFile);
    try {/*from  w ww  .  j  a v a2 s  .  co  m*/
        FileUtils.copyURLToFile(localJarUrl, dest);
    } catch (Exception e) {
        logger.error(e.toString());
    }

    String outputSchemaFile = prop.getProperty(Constant.HDFS_SCHEMA_LOCAL_PATH_KEY);
    String outputSampleDataFile = prop.getProperty(Constant.HDFS_SAMPLE_LOCAL_PATH_KEY);
    String cluster = prop.getProperty(Constant.HDFS_CLUSTER_KEY);
    String whiteList = prop.getProperty(Constant.HDFS_WHITE_LIST_KEY);
    String numOfThread = prop.getProperty(Constant.HDFS_NUM_OF_THREAD_KEY, String.valueOf(1));
    String hdfsUser = prop.getProperty(Constant.HDFS_REMOTE_USER_KEY);
    // String hdfsKeyTab = prop.getProperty(Constant.HDFS_REMOTE_KEYTAB_LOCATION_KEY);
    String hdfsExtractLogFile = outputSchemaFile + ".log";

    String[] hadoopCmd = { "hadoop", "jar", remoteJarFile,
            "-D" + Constant.HDFS_SCHEMA_REMOTE_PATH_KEY + "=" + outputSchemaFile,
            "-D" + Constant.HDFS_SAMPLE_REMOTE_PATH_KEY + "=" + outputSampleDataFile,
            "-D" + Constant.HDFS_CLUSTER_KEY + "=" + cluster,
            "-D" + Constant.HDFS_WHITE_LIST_KEY + "=" + whiteList,
            "-D" + Constant.HDFS_NUM_OF_THREAD_KEY + "=" + numOfThread,
            "-D" + Constant.HDFS_REMOTE_USER_KEY + "=" + hdfsUser, "-Dlog.file.name=hdfs_schema_fetch" };
    // delete the line (no kerberos needed): "-D" + Constant.HDFS_REMOTE_KEYTAB_LOCATION_KEY + "=" + hdfsKeyTab,
    ProcessBuilder pb = new ProcessBuilder(hadoopCmd);
    File logFile = new File(hdfsExtractLogFile);
    pb.redirectErrorStream(true);
    pb.redirectOutput(ProcessBuilder.Redirect.appendTo(logFile));
    Process process = pb.start();
    int pid = -1;
    if (process.getClass().getName().equals("java.lang.UNIXProcess")) {
        /* get the PID on unix/linux systems */
        try {
            Field f = process.getClass().getDeclaredField("pid");
            f.setAccessible(true);
            pid = f.getInt(process);
        } catch (Throwable e) {
        }
    }
    logger.info("executue command [PID=" + pid + "]: " + hadoopCmd);

    // wait until this process finished.
    int execResult = process.waitFor();

    // if the process failed, log the error and throw exception
    if (execResult > 0) {
        BufferedReader br = new BufferedReader(new InputStreamReader(process.getErrorStream()));
        String errString = "HDFS Metadata Extract Error:\n";
        String line = "";
        while ((line = br.readLine()) != null)
            errString = errString.concat(line).concat("\n");
        logger.error("*** Process  failed, status: " + execResult);
        logger.error(errString);
        throw new Exception("Process + " + pid + " failed");
    }

}

From source file:net.fabricmc.installer.installer.MultiMCInstaller.java

public static void install(File mcDir, String version, IInstallerProgress progress) throws Exception {
    File instancesDir = new File(mcDir, "instances");
    if (!instancesDir.exists()) {
        throw new FileNotFoundException(Translator.getString("install.multimc.notFound"));
    }/*from   w w w  .  j ava 2s.  c  o m*/
    progress.updateProgress(Translator.getString("install.multimc.findInstances"), 10);
    String mcVer = version.split("-")[0];
    List<File> validInstances = new ArrayList<>();
    for (File instanceDir : instancesDir.listFiles()) {
        if (instanceDir.isDirectory()) {
            if (isValidInstance(instanceDir, mcVer)) {
                validInstances.add(instanceDir);
            }
        }
    }
    if (validInstances.isEmpty()) {
        throw new Exception(Translator.getString("install.multimc.noInstances").replace("[MCVER]", mcVer));
    }
    List<String> instanceNames = new ArrayList<>();
    for (File instance : validInstances) {
        instanceNames.add(instance.getName());
    }
    String instanceName = (String) JOptionPane.showInputDialog(null,
            Translator.getString("install.multimc.selectInstance"),
            Translator.getString("install.multimc.selectInstance"), JOptionPane.QUESTION_MESSAGE, null,
            instanceNames.toArray(), instanceNames.get(0));
    if (instanceName == null) {
        progress.updateProgress(Translator.getString("install.multimc.canceled"), 100);
        return;
    }
    progress.updateProgress(
            Translator.getString("install.multimc.installingInto").replace("[NAME]", instanceName), 25);
    File instnaceDir = null;
    for (File instance : validInstances) {
        if (instance.getName().equals(instanceName)) {
            instnaceDir = instance;
        }
    }
    if (instnaceDir == null) {
        throw new FileNotFoundException("Could not find " + instanceName);
    }
    File patchesDir = new File(instnaceDir, "patches");
    if (!patchesDir.exists()) {
        patchesDir.mkdir();
    }
    File fabricJar = new File(patchesDir, "Fabric-" + version + ".jar");
    if (!fabricJar.exists()) {
        progress.updateProgress(Translator.getString("install.client.downloadFabric"), 30);
        FileUtils.copyURLToFile(new URL("http://maven.modmuss50.me/net/fabricmc/fabric-base/" + version
                + "/fabric-base-" + version + ".jar"), fabricJar);
    }
    progress.updateProgress(Translator.getString("install.multimc.createJson"), 70);
    File fabricJson = new File(patchesDir, "fabric.json");
    if (fabricJson.exists()) {
        fabricJson.delete();
    }
    String json = readBaseJson();
    json = json.replaceAll("%VERSION%", version);

    ZipFile fabricZip = new ZipFile(fabricJar);
    ZipEntry dependenciesEntry = fabricZip.getEntry("dependencies.json");
    String fabricDeps = IOUtils.toString(fabricZip.getInputStream(dependenciesEntry), Charset.defaultCharset());
    json = json.replace("%DEPS%", stripDepsJson(fabricDeps.replace("\n", "")));
    FileUtils.writeStringToFile(fabricJson, json, Charset.defaultCharset());
    fabricZip.close();
    progress.updateProgress(Translator.getString("install.success"), 100);
}

From source file:cfa.vo.iris.sdk.PluginManager.java

public void loadJar(URL url) {
    try {/*from  ww w . j a va 2 s . c  o m*/
        PluginJar jar = new PluginJar(url);
        jar.load();
        if (!jar.getPlugins().isEmpty()) {
            String[] paths = url.getFile().split("/");
            String name = paths[paths.length - 1];
            File dest = new File(app.getConfigurationDir() + "/components/" + name);
            if (!dest.exists())
                FileUtils.copyURLToFile(url, dest);
            jars.add(jar);
            jar.setFile(dest);
            for (IrisPlugin p : jar.getPlugins()) {
                for (IrisComponent c : p.getComponents()) {
                    for (IMenuItem item : c.getMenus()) {
                        item.consolidate(dest);
                    }
                }
            }

            PluginJarEvent.getInstance().fire(jar, SedCommand.ADDED);
        }

    } catch (Exception ex) {
        Logger.getLogger(PluginManager.class.getName()).log(Level.SEVERE, null, ex);
    }
}

From source file:com.kurento.test.recorder.RecorderIT.java

private void testRecord(String handler, int statusCode) throws IOException {
    // To follow redirect: .setRedirectStrategy(new LaxRedirectStrategy())
    HttpClient client = HttpClientBuilder.create().build();
    HttpPost post = new HttpPost("http://localhost:" + getServerPort() + "/kmf-content-api-test/" + handler);
    MultipartEntityBuilder multipartEntity = MultipartEntityBuilder.create();
    multipartEntity.setMode(HttpMultipartMode.BROWSER_COMPATIBLE);

    File file = new File("small");
    URL small = new URL(VideoURLs.map.get("small-webm"));
    FileUtils.copyURLToFile(small, file);
    FileBody fb = new FileBody(file);
    multipartEntity.addPart("file", fb);

    HttpEntity httpEntity = multipartEntity.build();
    post.setEntity(httpEntity);/*from ww w.  j  a  va2  s  .c  om*/

    EntityUtils.consume(httpEntity);
    HttpResponse response = client.execute(post);
    final int responseStatusCode = response.getStatusLine().getStatusCode();

    log.info("Response Status Code: {}", responseStatusCode);
    log.info("Deleting tmp file: {}", file.delete());

    Assert.assertEquals("HTTP response status code must be " + statusCode, statusCode, responseStatusCode);
}

From source file:hudson.cli.ClientAuthenticationCacheTest.java

@Issue("SECURITY-466")
@Test//from www .  ja v  a2 s  . co  m
public void login() throws Exception {
    File jar = tmp.newFile("jenkins-cli.jar");
    FileUtils.copyURLToFile(r.jenkins.getJnlpJars("jenkins-cli.jar").getURL(), jar);
    r.jenkins.setSecurityRealm(r.createDummySecurityRealm());
    r.jenkins.setAuthorizationStrategy(new FullControlOnceLoggedInAuthorizationStrategy());
    assertCLI(0, "Authenticated as: anonymous", jar, "who-am-i");
    assertCLI(0, null, jar, "login", "--username", "dev", "--password", "dev");
    try {
        assertCLI(0, "Authenticated as: dev", jar, "who-am-i");
        ClientAuthenticationCache cache = new ClientAuthenticationCache(null);
        String val = cache.props.getProperty(cache.getPropertyKey());
        assertNotNull(val);
        System.err.println(val);
        Secret s = Secret.decrypt(val);
        if (s != null && s.getPlainText().equals("dev")) {
            val = Secret.fromString("admin").getEncryptedValue();
        }
        System.err.println(val);
        val = val.replace("dev", "admin");
        System.err.println(val);
        cache.props.put(cache.getPropertyKey(), val);
        cache.save();
        assertCLI(0, "Authenticated as: anonymous", jar, "who-am-i");
    } finally {
        assertCLI(0, null, jar, "logout");
    }
}

From source file:edu.illinois.cs.cogcomp.depparse.DepAnnotator.java

@Override
public void initialize(ResourceManager rm) {
    try {/*w ww . j a  v a2s  .c  o  m*/
        // TODO Ugly hack: SL doesn't accept streams and can't create a file from inside a jar
        File dest = new File(TEMP_MODEL_FILE_NAME);
        String modelName = rm.getString(DepConfigurator.MODEL_NAME.key);
        URL fileURL = IOUtils.lsResources(DepAnnotator.class, modelName).get(0);
        logger.info("Loading {} into temp file: {}", modelName, TEMP_MODEL_FILE_NAME);
        FileUtils.copyURLToFile(fileURL, dest);
        model = SLModel.loadModel(TEMP_MODEL_FILE_NAME);
        ((LabeledChuLiuEdmondsDecoder) model.infSolver).loadDepRelDict();
        if (!dest.delete())
            throw new IOException("Could not delete temporary model file " + TEMP_MODEL_FILE_NAME);
    } catch (IOException | ClassNotFoundException | URISyntaxException e) {
        e.printStackTrace();
        File dest = new File(TEMP_MODEL_FILE_NAME);
        if (!dest.delete())
            throw new RuntimeException("Could not delete temporary model file " + TEMP_MODEL_FILE_NAME);
    }
}

From source file:hudson.lifecycle.WindowsServiceLifecycle.java

/**
 * If <tt>hudson.exe</tt> is old compared to our copy,
 * schedule an overwrite (except that since it's currently running,
 * we can only do it when Hudson restarts next time.)
 *///from w  w  w  .  j  a va  2 s .  co  m
private void updateHudsonExeIfNeeded() {
    try {
        File rootDir = Hudson.getInstance().getRootDir();

        URL exe = getClass().getResource("/windows-service/hudson.exe");
        String ourCopy = Util.getDigestOf(exe.openStream());
        File currentCopy = new File(rootDir, "hudson.exe");
        if (!currentCopy.exists())
            return;
        String curCopy = new FilePath(currentCopy).digest();

        if (ourCopy.equals(curCopy))
            return; // identical

        File stage = new File(rootDir, "hudson.exe.new");
        FileUtils.copyURLToFile(exe, stage);
        Kernel32.INSTANCE.MoveFileExA(stage.getAbsolutePath(), currentCopy.getAbsolutePath(),
                MOVEFILE_DELAY_UNTIL_REBOOT | MOVEFILE_REPLACE_EXISTING);
        LOGGER.info("Scheduled a replacement of hudson.exe");
    } catch (IOException e) {
        LOGGER.log(Level.SEVERE, "Failed to replace hudson.exe", e);
    } catch (InterruptedException e) {
        LOGGER.log(Level.SEVERE, "Failed to replace hudson.exe", e);
    }
}