Example usage for org.apache.commons.io FileUtils copyURLToFile

List of usage examples for org.apache.commons.io FileUtils copyURLToFile

Introduction

In this page you can find the example usage for org.apache.commons.io FileUtils copyURLToFile.

Prototype

public static void copyURLToFile(URL source, File destination) throws IOException 

Source Link

Document

Copies bytes from the URL source to a file destination.

Usage

From source file:org.apache.geode.internal.statistics.StatArchiveWithConsecutiveResourceInstGeneratorTest.java

@Before
public void setUpGeneratorTest() throws Exception {
    URL url = getClass().getResource(ARCHIVE_FILE_NAME);
    File testFolder = this.temporaryFolder.newFolder(getClass().getSimpleName());
    this.expectedStatArchiveFile = new File(testFolder, ARCHIVE_FILE_NAME);
    FileUtils.copyURLToFile(url, this.expectedStatArchiveFile);
}

From source file:org.apache.geode.internal.statistics.StatArchiveWithConsecutiveResourceInstIntegrationTest.java

@Before
public void setUp() throws Exception {
    URL url = getClass().getResource(ARCHIVE_FILE_NAME);
    this.archiveFile = this.temporaryFolder.newFile(ARCHIVE_FILE_NAME);
    FileUtils.copyURLToFile(url, archiveFile);

    this.statSpec = new StatSpec(STATS_SPEC_STRING);

    // precondition
    assertThat(this.archiveFile).exists();
}

From source file:org.apache.geode.internal.statistics.StatArchiveWithMissingResourceTypeRegressionTest.java

@Before
public void setUp() throws Exception {
    URL url = getClass().getResource(ARCHIVE_FILE_NAME);
    assertThat(url).isNotNull(); // precondition

    this.archiveFile = this.temporaryFolder.newFile(ARCHIVE_FILE_NAME);
    FileUtils.copyURLToFile(url, this.archiveFile);
    assertThat(this.archiveFile).exists(); // precondition
}

From source file:org.apache.geode.management.internal.configuration.ImportOldClusterConfigDUnitTest.java

@Before
public void before() throws Exception {
    // create the cc.zip that contains the 8.1 version cache.xml
    File ccDir = tempFolder.newFolder("cluster_config");
    File clusterDir = new File(ccDir, "cluster");
    clusterDir.mkdir();/*from   w  w w  .  j a v  a  2s .c om*/

    FileUtils.copyURLToFile(this.getClass().getResource("cluster8.xml"), new File(clusterDir, "cluster.xml"));
    zipFile = new File(tempFolder.getRoot(), "cc.zip").toPath();

    ZipUtils.zipDirectory(ccDir.toPath(), zipFile);
}

From source file:org.apache.geode.management.internal.configuration.utils.DtdResolver.java

@Deprecated
public InputSource resolveEntity(String publicId, String systemId) throws IOException {
    if (!isHttpUrlOK(systemId)) {
        URL dtdURL = getClass().getResource(CacheXml.LATEST_DTD_LOCATION);
        File dtd = new File(DistributionConfig.GEMFIRE_PREFIX + "dtd");
        FileUtils.copyURLToFile(dtdURL, dtd);
        InputSource inputSource = new InputSource(FileUtils.openInputStream(dtd));
        FileUtils.deleteQuietly(dtd);//from w ww  . j a  v a2  s .  c  om
        return inputSource;
    } else {
        return null;
    }
}

From source file:org.apache.karaf.cave.server.storage.CaveRepositoryImpl.java

/**
 * Upload an artifact from the given URL.
 *
 * @param url the URL of the artifact./*w ww . jav a2s. c o m*/
 * @throws Exception in case of upload failure.
 */
public void upload(URL url) throws Exception {
    LOGGER.debug("Upload new artifact from {}", url);
    String artifactName = "artifact-" + System.currentTimeMillis();
    File temp = new File(new File(this.getLocation()), artifactName);
    FileUtils.copyURLToFile(url, temp);
    // update the repository.xml
    ResourceImpl resource = (ResourceImpl) new DataModelHelperImpl().createResource(temp.toURI().toURL());
    if (resource == null) {
        temp.delete();
        LOGGER.warn("The {} artifact source is not a valid OSGi bundle", url);
        throw new IllegalArgumentException(
                "The " + url.toString() + " artifact source is not a valid OSGi bundle");
    }
    File destination = new File(new File(this.getLocation()),
            resource.getSymbolicName() + "-" + resource.getVersion() + ".jar");
    if (destination.exists()) {
        temp.delete();
        LOGGER.warn("The {} artifact is already present in the Cave repository", url);
        throw new IllegalArgumentException(
                "The " + url.toString() + " artifact is already present in the Cave repository");
    }
    FileUtils.moveFile(temp, destination);
    resource = (ResourceImpl) new DataModelHelperImpl().createResource(destination.toURI().toURL());
    this.addResource(resource);
    this.generateRepositoryXml();
}

From source file:org.apache.karaf.tooling.exam.container.internal.KarafTestContainer.java

private void copyBootClasspathLibraries(File karafHome, ExamSystem subsystem) throws IOException {
    BootClasspathLibraryOption[] bootClasspathLibraryOptions = subsystem
            .getOptions(BootClasspathLibraryOption.class);
    for (BootClasspathLibraryOption bootClasspathLibraryOption : bootClasspathLibraryOptions) {
        UrlReference libraryUrl = bootClasspathLibraryOption.getLibraryUrl();
        FileUtils.copyURLToFile(new URL(libraryUrl.getURL()), createFileNameWithRandomPrefixFromUrlAtTarget(
                libraryUrl.getURL(), new File(karafHome + "/lib"), new String[] { "jar" }));
    }// ww  w. ja v  a 2s  . co  m
}

From source file:org.apache.karaf.tooling.exam.container.internal.KarafTestContainer.java

@SuppressWarnings("rawtypes")
private void copyReferencedArtifactsToDeployFolder(File deploy, ExamSystem subsystem, String[] fileEndings) {
    ProvisionOption[] options = subsystem.getOptions(ProvisionOption.class);
    for (ProvisionOption option : options) {
        try {// w  w w .  j  a  v  a  2  s  .c o  m
            FileUtils.copyURLToFile(new URL(option.getURL()),
                    createFileNameWithRandomPrefixFromUrlAtTarget(option.getURL(), deploy, fileEndings));
        }
        // CHECKSTYLE:SKIP
        catch (Exception e) {
            // well, this can happen...
        }
    }
}

From source file:org.apache.kylin.tool.HBaseUsageExtractor.java

private void extractHTables(File dest) throws IOException {
    logger.info("These htables are going to be extracted:");
    for (String htable : htables) {
        logger.info(htable + "(required)");
    }//  w  w  w.  jav a 2s . com

    File tableDir = new File(dest, "table");
    FileUtils.forceMkdir(tableDir);

    for (String htable : htables) {
        try {
            URL srcUrl = new URL(getHBaseMasterUrl() + "table.jsp?name=" + htable);
            File destFile = new File(tableDir, htable + ".html");
            FileUtils.copyURLToFile(srcUrl, destFile);
        } catch (Exception e) {
            logger.warn("HTable " + htable + "info fetch failed: ", e);
        }
    }
}

From source file:org.apache.kylin.tool.HBaseUsageExtractor.java

private void extractCommonInfo(File dest) throws IOException {
    logger.info("The hbase master info/conf are going to be extracted...");

    // hbase master page
    try {/*from w  ww . j a  v a2  s .co  m*/
        File masterDir = new File(dest, "master");
        FileUtils.forceMkdir(masterDir);
        URL srcMasterUrl = new URL(getHBaseMasterUrl() + "master-status");
        File masterDestFile = new File(masterDir, "master-status.html");
        FileUtils.copyURLToFile(srcMasterUrl, masterDestFile);
    } catch (Exception e) {
        logger.warn("HBase Master status fetch failed: ", e);
    }

    // hbase conf
    try {
        File confDir = new File(dest, "conf");
        FileUtils.forceMkdir(confDir);
        URL srcConfUrl = new URL(getHBaseMasterUrl() + "conf");
        File destConfFile = new File(confDir, "hbase-conf.xml");
        FileUtils.copyURLToFile(srcConfUrl, destConfFile);
    } catch (Exception e) {
        logger.warn("HBase conf fetch failed: ", e);
    }

    // hbase jmx
    try {
        File jmxDir = new File(dest, "jmx");
        FileUtils.forceMkdir(jmxDir);
        URL srcJmxUrl = new URL(getHBaseMasterUrl() + "jmx");
        File jmxDestFile = new File(jmxDir, "jmx.html");
        FileUtils.copyURLToFile(srcJmxUrl, jmxDestFile);
    } catch (Exception e) {
        logger.warn("HBase JMX fetch failed: ", e);
    }

    // hbase hdfs status
    try {
        File hdfsDir = new File(dest, "hdfs");
        FileUtils.forceMkdir(hdfsDir);
        CliCommandExecutor cliCommandExecutor = kylinConfig.getCliCommandExecutor();
        String output = cliCommandExecutor
                .execute("hadoop fs -ls -R " + conf.get("hbase.rootdir") + "/data/default/KYLIN_*").getSecond();
        FileUtils.writeStringToFile(new File(hdfsDir, "hdfs-files.list"), output, Charset.defaultCharset());
        output = cliCommandExecutor
                .execute("hadoop fs -ls -R " + conf.get("hbase.rootdir") + "/data/default/kylin_*").getSecond();
        FileUtils.writeStringToFile(new File(hdfsDir, "hdfs-files.list"), output, Charset.defaultCharset(),
                true);
    } catch (Exception e) {
        logger.warn("HBase hdfs status fetch failed: ", e);
    }
}