Example usage for org.apache.commons.io FileUtils copyDirectoryToDirectory

List of usage examples for org.apache.commons.io FileUtils copyDirectoryToDirectory

Introduction

In this page you can find the example usage for org.apache.commons.io FileUtils copyDirectoryToDirectory.

Prototype

public static void copyDirectoryToDirectory(File srcDir, File destDir) throws IOException 

Source Link

Document

Copies a directory to within another directory preserving the file dates.

Usage

From source file:org.amanzi.awe.scripting.AbstractScriptingPluginTests.java

/**
 * @throws IOException//from ww  w. ja va 2s .  co  m
 */
private void restoreWS() throws IOException {
    URL scriptFolderUrl = Platform.getBundle(TestActivator.ID).getEntry(SCRIPT_ROOT);
    File targetFolder = new File(WORKSPACE_FOLDER + File.separator + PROJECT_FOLDER);
    File rubyFolder = new File(FileLocator.resolve(scriptFolderUrl).getPath());
    for (File file : rubyFolder.listFiles()) {
        FileUtils.forceMkdir(targetFolder);
        FileUtils.copyDirectoryToDirectory(file, targetFolder);
    }
}

From source file:org.apache.archiva.rest.services.RepositoriesServiceTest.java

protected File initSnapshotRepo() throws Exception {
    File targetRepo = new File(getBasedir(), "target/repo-with-snapshots");
    if (targetRepo.exists()) {
        FileUtils.deleteDirectory(targetRepo);
    }/*  w  ww.  j a va  2s. com*/
    assertFalse(targetRepo.exists());

    FileUtils.copyDirectoryToDirectory(new File(getBasedir(), "src/test/repo-with-snapshots"),
            targetRepo.getParentFile());

    if (getManagedRepositoriesService(authorizationHeader).getManagedRepository(SNAPSHOT_REPO_ID) != null) {
        getManagedRepositoriesService(authorizationHeader).deleteManagedRepository(SNAPSHOT_REPO_ID, true);
        assertNull(getManagedRepositoriesService(authorizationHeader).getManagedRepository(SNAPSHOT_REPO_ID));
    }
    ManagedRepository managedRepository = getTestManagedRepository(SNAPSHOT_REPO_ID, "repo-with-snapshots");
    /*managedRepository.setId( SNAPSHOT_REPO_ID );
    managedRepository.setLocation( );
    managedRepository.setCronExpression( "* * * * * ?" );*/
    getManagedRepositoriesService(authorizationHeader).addManagedRepository(managedRepository);
    assertNotNull(getManagedRepositoriesService(authorizationHeader).getManagedRepository(SNAPSHOT_REPO_ID));

    return targetRepo;
}

From source file:org.apache.flink.yarn.YarnTestBase.java

@AfterClass
public static void teardown() throws Exception {

    LOG.info("Stopping MiniYarn Cluster");
    yarnCluster.stop();//from  w w w .j  av a  2s . co m

    // Unset FLINK_CONF_DIR, as it might change the behavior of other tests
    Map<String, String> map = new HashMap<>(System.getenv());
    map.remove(ConfigConstants.ENV_FLINK_CONF_DIR);
    map.remove("YARN_CONF_DIR");
    map.remove("IN_TESTS");
    TestBaseUtils.setEnv(map);

    if (tempConfPathForSecureRun != null) {
        FileUtil.fullyDelete(tempConfPathForSecureRun);
        tempConfPathForSecureRun = null;
    }

    // When we are on travis, we copy the temp files of JUnit (containing the MiniYARNCluster log files)
    // to <flinkRoot>/target/flink-yarn-tests-*.
    // The files from there are picked up by the ./tools/travis_watchdog.sh script
    // to upload them to Amazon S3.
    if (isOnTravis()) {
        File target = new File("../target" + yarnConfiguration.get(TEST_CLUSTER_NAME_KEY));
        if (!target.mkdirs()) {
            LOG.warn("Error creating dirs to {}", target);
        }
        File src = tmp.getRoot();
        LOG.info("copying the final files from {} to {}", src.getAbsolutePath(), target.getAbsolutePath());
        try {
            FileUtils.copyDirectoryToDirectory(src, target);
        } catch (IOException e) {
            LOG.warn("Error copying the final files from {} to {}: msg: {}", src.getAbsolutePath(),
                    target.getAbsolutePath(), e.getMessage(), e);
        }
    }

}

From source file:org.apache.hadoop.gateway.AmbariServiceDefinitionTest.java

public static void setupGateway() throws Exception {
    File targetDir = new File(System.getProperty("user.dir"), "target");
    File gatewayDir = new File(targetDir, "gateway-home-" + UUID.randomUUID());
    gatewayDir.mkdirs();// w ww.ja  va2  s .co m

    config = new GatewayTestConfig();
    config.setGatewayHomeDir(gatewayDir.getAbsolutePath());

    URL svcsFileUrl = TestUtils.getResourceUrl(DAT, "test-svcs/readme.txt");
    File svcsFile = new File(svcsFileUrl.getFile());
    File svcsDir = svcsFile.getParentFile();
    config.setGatewayServicesDir(svcsDir.getAbsolutePath());

    String pathToStacksSource = "gateway-service-definitions/src/main/resources/services";
    File stacksSourceDir = new File(targetDir.getParent(), pathToStacksSource);
    if (!stacksSourceDir.exists()) {
        stacksSourceDir = new File(targetDir.getParentFile().getParent(), pathToStacksSource);
    }
    if (stacksSourceDir.exists()) {
        FileUtils.copyDirectoryToDirectory(stacksSourceDir, svcsDir);
    }

    File topoDir = new File(config.getGatewayTopologyDir());
    topoDir.mkdirs();

    File deployDir = new File(config.getGatewayDeploymentDir());
    deployDir.mkdirs();

    setupMockServers();
    startGatewayServer();
}

From source file:org.apache.hadoop.gateway.deploy.DeploymentFactoryFuncTest.java

private void addStacksDir(GatewayConfig config, File targetDir) {
    File stacksDir = new File(config.getGatewayServicesDir());
    stacksDir.mkdirs();/*w w w .  ja  v  a  2 s  .com*/
    //TODO: [sumit] This is a hack for now, need to find a better way to locate the source resources for 'stacks' to be tested
    String pathToStacksSource = "gateway-service-definitions/src/main/resources/services";
    File stacksSourceDir = new File(targetDir.getParent(), pathToStacksSource);
    if (!stacksSourceDir.exists()) {
        stacksSourceDir = new File(targetDir.getParentFile().getParent(), pathToStacksSource);
    }
    if (stacksSourceDir.exists()) {
        try {
            FileUtils.copyDirectoryToDirectory(stacksSourceDir, stacksDir);
        } catch (IOException e) {
            fail(e.getMessage());
        }
    }

}

From source file:org.apache.hadoop.gateway.GatewayTestDriver.java

/**
 * Creates a GATEWAY_HOME, starts a gateway instance and deploys a test topology.
 *///from   www.ja  v  a 2s.com
public void setupGateway(GatewayTestConfig config, String cluster, XMLTag topology, boolean use)
        throws Exception {
    this.useGateway = use;
    this.config = config;
    this.clusterName = cluster;

    File targetDir = new File(System.getProperty("user.dir"), "target");
    File gatewayDir = new File(targetDir, "gateway-home-" + UUID.randomUUID());
    gatewayDir.mkdirs();

    config.setGatewayHomeDir(gatewayDir.getAbsolutePath());

    File topoDir = new File(config.getGatewayTopologyDir());
    topoDir.mkdirs();

    File deployDir = new File(config.getGatewayDeploymentDir());
    deployDir.mkdirs();

    File descriptor = new File(topoDir, cluster + ".xml");
    FileOutputStream stream = new FileOutputStream(descriptor);
    topology.toStream(stream);
    stream.close();

    DefaultGatewayServices srvcs = new DefaultGatewayServices();
    Map<String, String> options = new HashMap<>();
    options.put("persist-master", "false");
    options.put("master", "password");
    try {
        srvcs.init(config, options);
    } catch (ServiceLifecycleException e) {
        e.printStackTrace(); // I18N not required.
    }
    File stacksDir = new File(config.getGatewayServicesDir());
    stacksDir.mkdirs();
    //TODO: [sumit] This is a hack for now, need to find a better way to locate the source resources for 'stacks' to be tested
    String pathToStacksSource = "gateway-service-definitions/src/main/resources/services";
    File stacksSourceDir = new File(targetDir.getParent(), pathToStacksSource);
    if (!stacksSourceDir.exists()) {
        stacksSourceDir = new File(targetDir.getParentFile().getParentFile().getParent(), pathToStacksSource);
    }
    if (stacksSourceDir.exists()) {
        FileUtils.copyDirectoryToDirectory(stacksSourceDir, stacksDir);
    }

    gateway = GatewayServer.startGateway(config, srvcs);
    MatcherAssert.assertThat("Failed to start gateway.", gateway, notNullValue());

    log.info("Gateway port = " + gateway.getAddresses()[0].getPort());
}

From source file:org.apache.kylin.tool.JobDiagnosisInfoCLI.java

@Override
protected void executeExtract(OptionsHelper optionsHelper, File exportDir) throws Exception {
    String kylinJobId = optionsHelper.getOptionValue(OPTION_JOB_ID);
    boolean includeCube = optionsHelper.hasOption(OPTION_INCLUDE_CUBE)
            ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_CUBE))
            : true;//from w  w w .j a v  a 2  s  .c  o m
    boolean includeYarnLogs = optionsHelper.hasOption(OPTION_INCLUDE_YARN_LOGS)
            ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_YARN_LOGS))
            : true;
    boolean includeClient = optionsHelper.hasOption(OPTION_INCLUDE_CLIENT)
            ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_CLIENT))
            : true;
    boolean includeConf = optionsHelper.hasOption(OPTION_INCLUDE_CONF)
            ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_CONF))
            : true;

    // dump job output
    logger.info("Start to dump job output");
    ExecutablePO executablePO = executableDao.getJob(kylinJobId);
    addRequired(ResourceStore.EXECUTE_RESOURCE_ROOT + "/" + kylinJobId);
    addRequired(ResourceStore.EXECUTE_OUTPUT_RESOURCE_ROOT + "/" + kylinJobId);
    for (ExecutablePO kylinTask : executablePO.getTasks()) {
        addRequired(ResourceStore.EXECUTE_RESOURCE_ROOT + "/" + kylinTask.getUuid());
        addRequired(ResourceStore.EXECUTE_OUTPUT_RESOURCE_ROOT + "/" + kylinTask.getUuid());
        if (includeYarnLogs) {
            yarnLogsResources.add(kylinTask.getUuid());
        }
    }
    extractResources(exportDir);

    // dump cube metadata
    if (includeCube) {
        String cubeName = executablePO.getParams().get("cubeName");
        if (!StringUtils.isEmpty(cubeName)) {
            File metaDir = new File(exportDir, "cube");
            FileUtils.forceMkdir(metaDir);
            String[] cubeMetaArgs = { "-cube", cubeName, "-destDir",
                    new File(metaDir, cubeName).getAbsolutePath(), "-includeJobs", "false", "-compress",
                    "false", "-submodule", "true" };
            logger.info("Start to extract related cube: " + StringUtils.join(cubeMetaArgs));
            CubeMetaExtractor cubeMetaExtractor = new CubeMetaExtractor();
            logger.info("CubeMetaExtractor args: " + Arrays.toString(cubeMetaArgs));
            cubeMetaExtractor.execute(cubeMetaArgs);
        }
    }

    // dump mr job info
    if (includeYarnLogs) {
        logger.info("Start to dump mr job info: " + kylinJobId);
        File yarnDir = new File(exportDir, "yarn");
        FileUtils.forceMkdir(yarnDir);
        for (String stepId : yarnLogsResources) {
            extractJobInfo(stepId, new File(yarnDir, stepId));
            extractJobLog(stepId, new File(yarnDir, stepId), true);
        }
    }

    if (includeClient) {
        String[] clientArgs = { "-destDir", new File(exportDir, "client").getAbsolutePath(), "-compress",
                "false", "-submodule", "true" };
        ClientEnvExtractor clientEnvExtractor = new ClientEnvExtractor();
        logger.info("ClientEnvExtractor args: " + Arrays.toString(clientArgs));
        clientEnvExtractor.execute(clientArgs);
    }

    // export conf
    if (includeConf) {
        logger.info("Start to extract kylin conf files.");
        try {
            FileUtils.copyDirectoryToDirectory(new File(ToolUtil.getConfFolder()), exportDir);
        } catch (Exception e) {
            logger.warn("Error in export conf.", e);
        }
    }

    // export kylin logs
    String[] logsArgs = { "-destDir", new File(exportDir, "logs").getAbsolutePath(), "-compress", "false",
            "-submodule", "true" };
    KylinLogExtractor logExtractor = new KylinLogExtractor();
    logger.info("KylinLogExtractor args: " + Arrays.toString(logsArgs));
    logExtractor.execute(logsArgs);
}

From source file:org.apache.maven.plugin.eclipse.it.EclipsePluginIT.java

protected void setUp() throws Exception {
    super.setUp();

    if (!initialized) {
        File tempWorkspace = new File("target/test-classes/eclipse");
        FileUtils.deleteDirectory(tempWorkspace);
        FileUtils.copyDirectoryToDirectory(new File("src/test/resources/eclipse"), tempWorkspace);
        initialized = true;/*from w  ww.  j  a va2 s .  co m*/
    }
}

From source file:org.apache.maven.plugin.eclipse.TempEclipseWorkspace.java

public TempEclipseWorkspace(String testWorkspaceName, String[] projectsToLink) throws Exception {

    File tempWorkspace = new File("target/tmp-workspace" + workspaceNumber++);
    FileUtils.deleteDirectory(tempWorkspace);
    FileUtils.copyDirectoryToDirectory(new File("src/test/resources/eclipse"), tempWorkspace);

    File eclipseLocation = new File(tempWorkspace, "eclipse").getCanonicalFile();

    File jdkLocation = new File(eclipseLocation, "dummyJDK");

    workspaceLocation = new File(eclipseLocation, testWorkspaceName + "/workspace").getCanonicalFile();

    File propertyfile = new File(workspaceLocation,
            ".metadata/.plugins/org.eclipse.core.runtime/.settings/org.eclipse.jdt.launching.prefs");

    preparePropertyFile(jdkLocation, propertyfile);

    if (projectsToLink != null && projectsToLink.length != 0) {
        for (String projectToLink : projectsToLink) {
            writeLocationFile(projectToLink);
        }/*from   w w w. java  2  s. c om*/
    }

}

From source file:org.apache.oodt.cas.install.CASInstallDistMojo.java

public void execute() throws MojoExecutionException, MojoFailureException {

    if (casDistributionFile == null || (!casDistributionFile.exists())) {
        throw new MojoExecutionException("the CAS distribution: [" + casDistributionFile + "] does not exist!");
    }//w w  w  . j  a  v  a  2 s . co m

    // remove cas installation libs directory and its contents if custom libs specified.
    // this is to prevent legacy jars from polluting the cas installation libs directory
    if (customLibs != null && customLibs.length > 0) {

        File libDir = null;

        // get the lib dir
        try {
            libDir = new File(casInstallationDir.getCanonicalPath() + File.separator + LIB_DIR_NAME);

        } catch (IOException e) {
            getLog().warn("Unable to detect lib dir: IO exception: " + e.getMessage());
        }

        // delete the lib dir
        if (libDir != null) {

            getLog().warn("removing pre-existing CAS libraries directory [" + libDir.getAbsolutePath()
                    + "] since custom CAS libraries have been specified");

            try {
                FileUtils.deleteDirectory(libDir);
            } catch (IOException e) {
                getLog().warn("Unable to delete lib dir [" + libDir.getAbsolutePath() + "]: " + e.getMessage());
            }
        }
    }

    getLog().info("unpackaging distro: [" + casDistributionFile + "] to: [" + casInstallationDir + "]");
    try {
        AntDecorator.untarFile(casDistributionFile, casInstallationDir);
    } catch (IOException e) {
        throw new MojoExecutionException(
                "an IO exception occured while untarring the CAS distribution: Message: " + e.getMessage());
    }

    if (customPolicyDirs != null && customPolicyDirs.length > 0) {
        getLog().info("installing [" + customPolicyDirs.length + "] custom policy dirs");

        // remove the default policy
        File policyDir = null;
        try {
            policyDir = new File(casInstallationDir.getCanonicalPath() + File.separator + POLICY_DIR_NAME);
            AntDecorator.deleteAllFilesAndDir(policyDir);
        } catch (IOException e) {
            getLog().warn("IO exception when removing default policy from null policy dir: " + "Message: "
                    + e.getMessage());
        }

        for (File customPolicyDir : customPolicyDirs) {
            getLog().info("Installing: [" + customPolicyDir + "] to: [" + policyDir + "]");

            if (customPolicyDir.exists()) {
                try {
                    FileUtils.copyDirectoryToDirectory(customPolicyDir, policyDir);
                } catch (IOException e) {
                    getLog().warn("error copying custom policy dir: [" + customPolicyDir + "] to policy dir: ["
                            + policyDir + "]");
                }
            }

        }

    }

    if (customConfigurationDir != null && customConfigurationDir.exists()) {

        try {
            File configDir = new File(casInstallationDir.getCanonicalPath() + File.separator + CONFIG_DIR_NAME);

            // remove default config
            AntDecorator.deleteAllFilesAndDir(configDir);

            configDir.mkdir();

            // install custom config
            FileUtils.copyDirectory(customConfigurationDir, configDir, true);

        } catch (IOException e) {
            getLog().warn("Unable to detect configuration dir: IO exception: " + e.getMessage());
        }

    }

    if (customLibs != null && customLibs.length > 0) {
        getLog().info("installing [" + customLibs.length + "] custom CAS libraries");

        File libDir = null;
        // get the lib dir
        try {
            libDir = new File(casInstallationDir.getCanonicalPath() + File.separator + LIB_DIR_NAME);

        } catch (IOException e) {
            getLog().warn("Unable to detect lib dir: IO exception: " + e.getMessage());
        }

        for (File customLib : customLibs) {
            getLog().info("installing [" + customLib + "] to " + libDir.getAbsolutePath() + "]");
            try {
                FileUtils.copyFileToDirectory(customLib, libDir);
            } catch (IOException e) {
                getLog().warn("IOException installing [" + customLib + "] to " + libDir.getAbsolutePath()
                        + "]: Message: " + e.getMessage());
            }
        }
    }

    if (envVarReplaceFiles != null && envVarReplaceFiles.length > 0) {
        getLog().info("Replacing env vars on [" + envVarReplaceFiles.length + "] files");

        for (EnvReplacer envVarReplaceFile : envVarReplaceFiles) {
            try {
                envVarReplaceFile.doEnvReplace();
            } catch (IOException e) {
                getLog().warn("IOException while doing env replacement on: [" + envVarReplaceFile.getFilepath()
                        + "]: Message: " + e.getMessage());
            }
        }
    }

    if (customBinScripts != null && customBinScripts.length > 0) {
        getLog().info("installing [" + customBinScripts.length + "] custom bin scripts");

        File binDir = null;

        try {
            binDir = new File(casInstallationDir.getCanonicalPath() + File.separator + BIN_DIR_NAME);
        } catch (IOException e) {
            getLog().warn("Unable to detect bin dir: IO exception: Message: " + e.getMessage());
        }

        for (File customBinScript : customBinScripts) {
            getLog().info("installing [" + customBinScript + "] to [" + binDir + "]");
            try {
                FileUtils.copyFileToDirectory(customBinScript, binDir);
                // now chmod it with exec perms
                String custBinScriptFullPath = binDir + File.separator + customBinScript.getName();

                getLog().info("fixing perms on [" + custBinScriptFullPath + "]");
                AntDecorator.chmodFile(new File(custBinScriptFullPath), "ugo+rx");
            } catch (IOException e) {
                getLog().warn("unable to install [" + customBinScript + "] to [" + binDir
                        + "]: IO exception: Message: " + e.getMessage());
            }
        }
    }

}