Example usage for org.apache.commons.io FileUtils deleteDirectory

List of usage examples for org.apache.commons.io FileUtils deleteDirectory

Introduction

In this page you can find the example usage for org.apache.commons.io FileUtils deleteDirectory.

Prototype

public static void deleteDirectory(File directory) throws IOException 

Source Link

Document

Deletes a directory recursively.

Usage

From source file:accumulo.AccumuloStuff.java

public static void main(String[] args) throws Exception {
    File tmp = new File(System.getProperty("user.dir") + "/target/mac-test");
    if (tmp.exists()) {
        FileUtils.deleteDirectory(tmp);
    }/* w w  w.j  a  v a 2s . c om*/
    tmp.mkdirs();
    String passwd = "password";

    MiniAccumuloConfigImpl cfg = new MiniAccumuloConfigImpl(tmp, passwd);
    cfg.setNumTservers(1);
    //    cfg.useMiniDFS(true);

    final MiniAccumuloClusterImpl cluster = cfg.build();
    setCoreSite(cluster);
    cluster.start();

    ExecutorService svc = Executors.newFixedThreadPool(2);

    try {
        Connector conn = cluster.getConnector("root", passwd);
        String table = "table";
        conn.tableOperations().create(table);

        final BatchWriter bw = conn.createBatchWriter(table, new BatchWriterConfig());
        final AtomicBoolean flushed = new AtomicBoolean(false);

        Runnable writer = new Runnable() {
            @Override
            public void run() {
                try {
                    Mutation m = new Mutation("row");
                    m.put("colf", "colq", "value");
                    bw.addMutation(m);
                    bw.flush();
                    flushed.set(true);
                } catch (Exception e) {
                    log.error("Got exception trying to flush mutation", e);
                }

                log.info("Exiting batchwriter thread");
            }
        };

        Runnable restarter = new Runnable() {
            @Override
            public void run() {
                try {
                    for (ProcessReference proc : cluster.getProcesses().get(ServerType.TABLET_SERVER)) {
                        cluster.killProcess(ServerType.TABLET_SERVER, proc);
                    }
                    cluster.exec(TabletServer.class);
                } catch (Exception e) {
                    log.error("Caught exception restarting tabletserver", e);
                }
                log.info("Exiting restart thread");
            }
        };

        svc.execute(writer);
        svc.execute(restarter);

        log.info("Waiting for shutdown");
        svc.shutdown();
        if (!svc.awaitTermination(120, TimeUnit.SECONDS)) {
            log.info("Timeout on shutdown exceeded");
            svc.shutdownNow();
        } else {
            log.info("Cleanly shutdown");
            log.info("Threadpool is terminated? " + svc.isTerminated());
        }

        if (flushed.get()) {
            log.info("****** BatchWriter was flushed *********");
        } else {
            log.info("****** BatchWriter was NOT flushed *********");
        }

        bw.close();

        log.info("Got record {}", Iterables.getOnlyElement(conn.createScanner(table, Authorizations.EMPTY)));
    } finally {
        cluster.stop();
    }
}

From source file:azkaban.migration.schedule2trigger.Schedule2Trigger.java

public static void main(String[] args) throws Exception {
    if (args.length < 1) {
        printUsage();/*from w  w w .ja v a 2  s  .  c o  m*/
    }

    File confFile = new File(args[0]);
    try {
        logger.info("Trying to load config from " + confFile.getAbsolutePath());
        props = loadAzkabanConfig(confFile);
    } catch (Exception e) {
        e.printStackTrace();
        logger.error(e);
        return;
    }

    try {
        outputDir = File.createTempFile("schedules", null);
        logger.info("Creating temp dir for dumping existing schedules.");
        outputDir.delete();
        outputDir.mkdir();
    } catch (Exception e) {
        e.printStackTrace();
        logger.error(e);
        return;
    }

    try {
        schedule2File();
    } catch (Exception e) {
        e.printStackTrace();
        logger.error(e);
        return;
    }

    try {
        file2ScheduleTrigger();
    } catch (Exception e) {
        e.printStackTrace();
        logger.error(e);
        return;
    }

    logger.info("Uploaded all schedules. Removing temp dir.");
    FileUtils.deleteDirectory(outputDir);
    System.exit(0);
}

From source file:edu.indiana.d2i.sloan.internal.DeleteVMSimulator.java

public static void main(String[] args) {
    DeleteVMSimulator simulator = new DeleteVMSimulator();

    CommandLineParser parser = new PosixParser();

    try {//from w ww .ja  va  2  s  .co  m
        CommandLine line = simulator.parseCommandLine(parser, args);
        String wdir = line.getOptionValue(CMD_FLAG_VALUE.get(CMD_FLAG_KEY.WORKING_DIR));

        if (!HypervisorCmdSimulator.resourceExist(wdir)) {
            logger.error(String.format("Cannot find VM working dir: %s", wdir));
            System.exit(ERROR_CODE.get(ERROR_STATE.VM_NOT_EXIST));
        }

        Properties prop = new Properties();
        String filename = HypervisorCmdSimulator.cleanPath(wdir) + HypervisorCmdSimulator.VM_INFO_FILE_NAME;

        prop.load(new FileInputStream(new File(filename)));

        // cannot delete VM when it is not shutdown
        VMState currentState = VMState.valueOf(prop.getProperty(CMD_FLAG_VALUE.get(CMD_FLAG_KEY.VM_STATE)));

        if (!currentState.equals(VMState.SHUTDOWN)) {
            logger.error("Cannot perform delete when VM is not shutdown");
            System.exit(ERROR_CODE.get(ERROR_STATE.VM_NOT_SHUTDOWN));
        }

        // delete working directory
        FileUtils.deleteDirectory(new File(wdir));

        // success
        System.exit(0);

    } catch (ParseException e) {
        logger.error(String.format("Cannot parse input arguments: %s%n, expected:%n%s",
                StringUtils.join(args, " "), simulator.getUsage(100, "", 5, 5, "")));

        System.exit(ERROR_CODE.get(ERROR_STATE.INVALID_INPUT_ARGS));
    } catch (IOException e) {
        logger.error(e.getMessage(), e);
        System.exit(ERROR_CODE.get(ERROR_STATE.IO_ERR));
    }

}

From source file:com.foudroyantfactotum.mod.fousarchive.utility.midi.FileSupporter.java

public static void main(String[] args) throws InterruptedException, IOException {
    for (int i = 0; i < noOfWorkers; ++i)
        pool.submit(new ConMidiDetailsPuller());

    final File sourceDir = new File(source);
    final File outputDir = new File(output);

    Logger.info(UserLogger.GENERAL, "source directory: " + sourceDir.getAbsolutePath());
    Logger.info(UserLogger.GENERAL, "output directory: " + outputDir.getAbsolutePath());
    Logger.info(UserLogger.GENERAL, "processing midi files using " + noOfWorkers + " cores");

    FileUtils.deleteDirectory(outputDir);
    FileUtils.touch(new File(outputDir + "/master.json.gz"));

    for (File sfile : sourceDir.listFiles()) {
        recFile(sfile, files);//from   w w w .  j  a  v  a 2s  .  c  o  m
    }

    for (int i = 0; i < noOfWorkers; ++i)
        files.put(TERMINATOR);

    pool.shutdown();
    pool.awaitTermination(Long.MAX_VALUE, TimeUnit.DAYS);//just get all the work done first.

    try (final OutputStream fstream = new FileOutputStream(outputDir + "/master.json.gz")) {
        try (final GZIPOutputStream gzstream = new GZIPOutputStream(fstream)) {
            final OutputStreamWriter osw = new OutputStreamWriter(gzstream);

            osw.write(JSON.toJson(processedMidiFiles));
            osw.flush();
        }
    } catch (IOException e) {
        Logger.info(UserLogger.GENERAL, e.toString());
    }

    Logger.info(UserLogger.GENERAL, "Processed " + processedMidiFiles.size() + " midi files out of " + fileCount
            + " files. " + (fileCount - processedMidiFiles.size()) + " removed");
}

From source file:com.sanaldiyar.projects.nanohttpd.nanoinstaller.App.java

public static void main(String[] args) {
    try {//from  w ww . ja  va 2 s.  c  om
        String executableName = new File(
                App.class.getProtectionDomain().getCodeSource().getLocation().getPath()).getName();
        Options options = new Options();

        Option destination = OptionBuilder.withArgName("folder").withLongOpt("destination").hasArgs(1)
                .withDescription("destionation folder").withType(String.class).create("d");

        Option lrfolder = OptionBuilder.withArgName("folder").withLongOpt("localrepo").hasArgs(1)
                .withDescription("local repository folder").withType(String.class).create("lr");

        Option rmlrfolder = OptionBuilder.withLongOpt("deletelocalrepo").hasArg(false)
                .withDescription("delete local repository after installation").create("dlr");

        Option help = OptionBuilder.withLongOpt("help").withDescription("print this help").create("h");

        options.addOption(destination);
        options.addOption(lrfolder);
        options.addOption(rmlrfolder);
        options.addOption(help);

        HelpFormatter helpFormatter = new HelpFormatter();

        CommandLineParser commandLineParser = new PosixParser();
        CommandLine commands;
        try {
            commands = commandLineParser.parse(options, args);
        } catch (ParseException ex) {
            System.out.println("Error at parsing arguments");
            helpFormatter.printHelp("java -jar " + executableName, options);
            return;
        }

        if (commands.hasOption("h")) {
            helpFormatter.printHelp("java -jar " + executableName, options);
            return;
        }

        String sdest = commands.getOptionValue("d", "./nanosystem");
        System.out.println("The nano system will be installed into " + sdest);
        File dest = new File(sdest);
        if (dest.exists()) {
            FileUtils.deleteDirectory(dest);
        }
        dest.mkdirs();
        File bin = new File(dest, "bin");
        bin.mkdir();
        File bundle = new File(dest, "bundle");
        bundle.mkdir();
        File conf = new File(dest, "conf");
        conf.mkdir();
        File core = new File(dest, "core");
        core.mkdir();
        File logs = new File(dest, "logs");
        logs.mkdir();
        File nanohttpdcore = new File(dest, "nanohttpd-core");
        nanohttpdcore.mkdir();
        File nanohttpdservices = new File(dest, "nanohttpd-services");
        nanohttpdservices.mkdir();
        File temp = new File(dest, "temp");
        temp.mkdir();
        File apps = new File(dest, "apps");
        apps.mkdir();

        File local = new File(commands.getOptionValue("lr", "./local-repository"));
        Collection<RemoteRepository> repositories = Arrays.asList(
                new RemoteRepository("sanaldiyar-snap", "default", "http://maven2.sanaldiyar.com/snap-repo"),
                new RemoteRepository("central", "default", "http://repo1.maven.org/maven2/"));
        Aether aether = new Aether(repositories, local);

        //Copy core felix main
        System.out.println("Downloading Felix main executable");
        List<Artifact> felixmain = aether.resolve(
                new DefaultArtifact("org.apache.felix", "org.apache.felix.main", "jar", "LATEST"), "runtime");
        for (Artifact artifact : felixmain) {
            if (artifact.getArtifactId().equals("org.apache.felix.main")) {
                FileUtils.copyFile(artifact.getFile(), new File(bin, "felix-main.jar"));
                System.out.println(artifact.getArtifactId());
                break;
            }
        }
        System.out.println("OK");

        //Copy core felix bundles
        System.out.println("Downloading Felix core bundles");
        Collection<String> felixcorebundles = Arrays.asList("fileinstall", "bundlerepository", "gogo.runtime",
                "gogo.shell", "gogo.command");
        for (String felixcorebunlde : felixcorebundles) {
            List<Artifact> felixcore = aether.resolve(new DefaultArtifact("org.apache.felix",
                    "org.apache.felix." + felixcorebunlde, "jar", "LATEST"), "runtime");
            for (Artifact artifact : felixcore) {
                if (artifact.getArtifactId().equals("org.apache.felix." + felixcorebunlde)) {
                    FileUtils.copyFileToDirectory(artifact.getFile(), core);
                    System.out.println(artifact.getArtifactId());
                }
            }
        }
        System.out.println("OK");

        //Copy nanohttpd core bundles
        System.out.println("Downloading nanohttpd core bundles and configurations");
        List<Artifact> nanohttpdcorebundle = aether.resolve(
                new DefaultArtifact("com.sanaldiyar.projects.nanohttpd", "nanohttpd", "jar", "LATEST"),
                "runtime");
        for (Artifact artifact : nanohttpdcorebundle) {
            if (!artifact.getArtifactId().equals("org.osgi.core")) {
                FileUtils.copyFileToDirectory(artifact.getFile(), nanohttpdcore);
                System.out.println(artifact.getArtifactId());
            }
        }

        nanohttpdcorebundle = aether.resolve(
                new DefaultArtifact("com.sanaldiyar.projects", "engender", "jar", "LATEST"), "runtime");
        for (Artifact artifact : nanohttpdcorebundle) {
            FileUtils.copyFileToDirectory(artifact.getFile(), nanohttpdcore);
            System.out.println(artifact.getArtifactId());
        }

        nanohttpdcorebundle = aether.resolve(
                new DefaultArtifact("org.codehaus.jackson", "jackson-mapper-asl", "jar", "1.9.5"), "runtime");
        for (Artifact artifact : nanohttpdcorebundle) {
            FileUtils.copyFileToDirectory(artifact.getFile(), nanohttpdcore);
            System.out.println(artifact.getArtifactId());
        }

        nanohttpdcorebundle = aether
                .resolve(new DefaultArtifact("org.mongodb", "mongo-java-driver", "jar", "LATEST"), "runtime");
        for (Artifact artifact : nanohttpdcorebundle) {
            FileUtils.copyFileToDirectory(artifact.getFile(), nanohttpdcore);
            System.out.println(artifact.getArtifactId());
        }

        //Copy nanohttpd conf
        FileUtils.copyInputStreamToFile(App.class.getResourceAsStream("/nanohttpd.conf"),
                new File(dest, "nanohttpd.conf"));
        System.out.println("Configuration: nanohttpd.conf");

        //Copy nanohttpd start script
        File startsh = new File(dest, "start.sh");
        FileUtils.copyInputStreamToFile(App.class.getResourceAsStream("/start.sh"), startsh);
        startsh.setExecutable(true);
        System.out.println("Script: start.sh");

        System.out.println("OK");

        //Copy nanohttpd service bundles
        System.out.println("Downloading nanohttpd service bundles");
        List<Artifact> nanohttpdservicebundle = aether
                .resolve(new DefaultArtifact("com.sanaldiyar.projects.nanohttpd", "mongodbbasedsessionhandler",
                        "jar", "1.0-SNAPSHOT"), "runtime");
        for (Artifact artifact : nanohttpdservicebundle) {
            if (artifact.getArtifactId().equals("mongodbbasedsessionhandler")) {
                FileUtils.copyFileToDirectory(artifact.getFile(), nanohttpdservices);
                System.out.println(artifact.getArtifactId());
                break;
            }
        }

        //Copy nanohttpd mongodbbasedsessionhandler conf
        FileUtils.copyInputStreamToFile(App.class.getResourceAsStream("/mdbbasedsh.conf"),
                new File(dest, "mdbbasedsh.conf"));
        System.out.println("Configuration: mdbbasedsh.conf");

        System.out.println("OK");

        if (commands.hasOption("dlr")) {
            System.out.println("Local repository is deleting");
            FileUtils.deleteDirectory(local);
            System.out.println("OK");
        }

        System.out.println("You can reconfigure nanohttpd and services. To start system run start.sh script");

    } catch (Exception ex) {
        System.out.println("Error at installing.");
    }
}

From source file:cn.lhfei.spark.streaming.NginxlogSorterApp.java

public static void main(String[] args) {
    JavaSparkContext sc = null;/*from   www.java  2 s .  c  om*/
    try {
        SparkConf conf = new SparkConf().setMaster("local").setAppName("NginxlogSorterApp");
        sc = new JavaSparkContext(conf);
        JavaRDD<String> lines = sc.textFile(ORIGIN_PATH);

        JavaRDD<NginxLog> items = lines.map(new Function<String, NginxLog>() {
            private static final long serialVersionUID = -1530783780334450383L;

            @Override
            public NginxLog call(String v1) throws Exception {
                NginxLog item = new NginxLog();
                String[] arrays = v1.split("[\\t]");

                if (arrays.length == 3) {
                    item.setIp(arrays[0]);
                    item.setLiveTime(Long.parseLong(arrays[1]));
                    item.setAgent(arrays[2]);
                }
                return item;
            }
        });

        log.info("=================================Length: [{}]", items.count());

        JavaPairRDD<String, Iterable<NginxLog>> keyMaps = items.groupBy(new Function<NginxLog, String>() {

            @Override
            public String call(NginxLog v1) throws Exception {
                return v1.getIp();
            }
        });

        log.info("=================================Group by Key Length: [{}]", keyMaps.count());

        keyMaps.foreach(new VoidFunction<Tuple2<String, Iterable<NginxLog>>>() {

            @Override
            public void call(Tuple2<String, Iterable<NginxLog>> t) throws Exception {
                log.info("++++++++++++++++++++++++++++++++ key: {}", t._1);

                Iterator<NginxLog> ts = t._2().iterator();

                while (ts.hasNext()) {
                    log.info("=====================================[{}]", ts.next().toString());
                }
            }

        });

        FileUtils.deleteDirectory(new File(DESTI_PATH));
        keyMaps.saveAsTextFile(DESTI_PATH);

    } catch (Exception e) {
        e.printStackTrace();
    } finally {
        sc.close();
    }
}

From source file:ee.ria.xroad.asyncdb.AsyncDBMemoryUsageTest.java

/**
 * @param args - arguments of main method, here not used.
 * @throws Exception - thrown when memory usage test fails.
 *///  w  w w  . j a  va2  s.  c  o  m
public static void main(String[] args) throws Exception {
    File logDir = null;

    try {
        SoapMessageImpl requestMessage = AsyncDBTestUtil.getFirstSoapRequest();
        File logFile = new File(AsyncDBTestUtil.getAsyncLogFilePath());
        logDir = logFile.getParentFile();

        long previousFreeFileDescriptorCount = 0;
        boolean first = true;

        for (int i = 0; i < ITERATIONS; i++) {
            LOG.info("Adding request number {}...", i);

            WritingCtx writingCtx = QUEUE.startWriting();
            writingCtx.getConsumer().soap(requestMessage);
            writingCtx.commit();

            long freeFileDescriptorCount = SystemMetrics.getFreeFileDescriptorCount();
            LOG.info("Free file descriptor count: {}", freeFileDescriptorCount);

            if (!first && freeFileDescriptorCount < previousFreeFileDescriptorCount) {
                throw new RuntimeException("File descriptor count must not increase as requests are added!");
            }

            previousFreeFileDescriptorCount = freeFileDescriptorCount;
            first = false;
        }

        LOG.info("Async DB memory usage test accomplished successfully");
    } finally {
        FileUtils.deleteDirectory(new File(AsyncDBTestUtil.getProviderDirPath()));

        if (logDir != null) {
            FileUtils.deleteDirectory(logDir);
        }
    }
}

From source file:com.bluexml.tools.miscellaneous.PrepareSIDEModulesMigration.java

/**
 * @param args// w w  w . jav a2  s  .c  om
 */
public static void main(String[] args) {
    boolean inplace = false;

    String workspace = "/Users/davidabad/workspaces/SIDE-Modules/";
    String frameworkmodulesPath = "/Volumes/Data/SVN/side/HEAD/S-IDE/FrameworksModules/trunk/";
    String classifier_base = "enterprise";
    String version_base = "3.4.6";
    String classifier_target = "enterprise";
    String version_target = "3.4.11";
    String frameworkmodulesInplace = "/Volumes/Data/SVN/projects/Ifremer/IfremerV5/src/modules/mavenProjects";

    Properties props = new Properties();
    try {
        InputStream resourceAsStream = PrepareSIDEModulesMigration.class
                .getResourceAsStream("config.properties");
        if (resourceAsStream != null) {
            props.load(resourceAsStream);

            inplace = Boolean.parseBoolean(props.getProperty("inplace", Boolean.toString(inplace)));
            workspace = props.getProperty("workspace", workspace);
            frameworkmodulesPath = props.getProperty("frameworkmodulesPath", frameworkmodulesPath);
            classifier_base = props.getProperty("classifier_base", classifier_base);
            version_base = props.getProperty("version_base", version_base);
            classifier_target = props.getProperty("classifier_target", classifier_target);
            version_target = props.getProperty("version_target", version_target);
            frameworkmodulesInplace = props.getProperty("frameworkmodulesInplace", frameworkmodulesInplace);
        } else {
            System.out.println("no configuration founded in classpath config.properties");
        }

    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
        return;
    }

    System.out.println("properties :");
    Enumeration<?> propertyNames = props.propertyNames();
    while (propertyNames.hasMoreElements()) {
        String nextElement = propertyNames.nextElement().toString();
        System.out.println("\t " + nextElement + " : " + props.getProperty(nextElement));
    }

    File workspaceFile = new File(workspace);

    File targetHome = new File(workspaceFile, MIGRATION_FOLDER);
    if (targetHome.exists()) {
        try {
            FileUtils.deleteDirectory(targetHome);
        } catch (IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
            throw new RuntimeException(e);
        }
    }

    final String versionInProjectName = getVersionInProjectName(classifier_base, version_base);
    String versionInProjectName2 = getVersionInProjectName(classifier_target, version_target);

    if (frameworkmodulesPath.contains(",")) {
        // this is a list of paths
        String[] split = frameworkmodulesPath.split(",");
        for (String string : split) {
            if (StringUtils.trimToNull(string) != null) {
                executeInpath(inplace, string, classifier_base, version_base, classifier_target, version_target,
                        frameworkmodulesInplace, workspaceFile, versionInProjectName, versionInProjectName2);
            }
        }
    } else {
        executeInpath(inplace, frameworkmodulesPath, classifier_base, version_base, classifier_target,
                version_target, frameworkmodulesInplace, workspaceFile, versionInProjectName,
                versionInProjectName2);
    }

    System.out.println("Job's done !");
    System.out.println("Please check " + MIGRATION_FOLDER);
    System.out.println(
            "If all is ok you can use commit.sh in a terminal do : cd " + MIGRATION_FOLDER + "; sh commit.sh");
    System.out.println(
            "This script will create new svn projet and commit resources, add 'target' to svn:ignore ...");

}

From source file:com.mycompany.hdp.hdp.java

public static void main(String[] args) throws IOException, FileNotFoundException, URISyntaxException {
    Configuration configuration = new Configuration();
    configuration.set("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
    configuration.set("fs.file.impl", org.apache.hadoop.fs.LocalFileSystem.class.getName());
    hdfs = FileSystem.get(new URI("hdfs://104.236.110.203:9000"), configuration);
    date = dt.toString().replaceAll(" ", "_").replaceAll(":", "-");
    crawl("iphone");
    crawl("ipad");
    crawl("samsung phone");
    crawl("samsung tab");
    if (success > 0) {
        try {/*  ww  w . j a  v a 2s.  c o m*/
            FileUtils.deleteDirectory(new File("data"));
            System.out.println("DONE");

        } catch (Exception e) {
            System.out.println(e.getMessage());
        }
    } else {
        System.out.println("Something Went Wrong Try to Fix the Error:::::");
    }

}

From source file:com.sangupta.keepwalking.MergeRepo.java

/**
 * @param args/*from  w  w  w  . j  ava 2s .c o m*/
 * @throws IOException 
 */
public static void main(String[] args) throws IOException {
    if (args.length != 3) {
        usage();
        return;
    }

    final String previousRepo = args[0];
    final String newerRepo = args[1];
    final String mergedRepo = args[2];

    final File previous = new File(previousRepo);
    final File newer = new File(newerRepo);
    final File merged = new File(mergedRepo);

    if (!(previous.exists() && previous.isDirectory())) {
        System.out.println("The previous version does not exists or is not a directory.");
        return;
    }

    if (!(newer.exists() && newer.isDirectory())) {
        System.out.println("The newer version does not exists or is not a directory.");
        return;
    }

    final IOFileFilter directoryFilter = FileFilterUtils.makeCVSAware(FileFilterUtils.makeSVNAware(null));

    final Collection<File> olderFiles = FileUtils.listFiles(previous, TrueFileFilter.TRUE, directoryFilter);
    final Collection<File> newerFiles = FileUtils.listFiles(newer, TrueFileFilter.TRUE, directoryFilter);

    // build a list of unique paths
    System.out.println("Reading files from older version...");
    List<String> olderPaths = new ArrayList<String>();
    for (File oldFile : olderFiles) {
        olderPaths.add(getRelativePath(oldFile, previous));
    }

    System.out.println("Reading files from newer version...");
    List<String> newerPaths = new ArrayList<String>();
    for (File newerFile : newerFiles) {
        newerPaths.add(getRelativePath(newerFile, newer));
    }

    // find which files have been removed from Perforce depot
    List<String> filesRemoved = new ArrayList<String>(olderPaths);
    filesRemoved.removeAll(newerPaths);
    System.out.println("Files removed in newer version: " + filesRemoved.size());
    for (String removed : filesRemoved) {
        System.out.print("    ");
        System.out.println(removed);
    }

    // find which files have been added in Perforce depot
    List<String> filesAdded = new ArrayList<String>(newerPaths);
    filesAdded.removeAll(olderPaths);
    System.out.println("Files added in newer version: " + filesAdded.size());
    for (String added : filesAdded) {
        System.out.print("    ");
        System.out.println(added);
    }

    // find which files are common 
    // now check if they have modified or not
    newerPaths.retainAll(olderPaths);
    List<String> modified = checkModifiedFiles(newerPaths, previous, newer);
    System.out.println("Files modified in newer version: " + modified.size());
    for (String modify : modified) {
        System.out.print("    ");
        System.out.println(modify);
    }

    // clean any previous existence of merged repo
    System.out.println("Cleaning any previous merged repositories...");
    if (merged.exists() && merged.isDirectory()) {
        FileUtils.deleteDirectory(merged);
    }

    System.out.println("Merging from newer to older repository...");
    // copy the original SVN repo to merged
    FileUtils.copyDirectory(previous, merged);

    // now remove all files that need to be
    for (String removed : filesRemoved) {
        File toRemove = new File(merged, removed);
        toRemove.delete();
    }

    // now add all files that are new in perforce
    for (String added : filesAdded) {
        File toAdd = new File(newer, added);
        File destination = new File(merged, added);
        FileUtils.copyFile(toAdd, destination);
    }

    // now over-write modified files
    for (String changed : modified) {
        File change = new File(newer, changed);
        File destination = new File(merged, changed);
        destination.delete();
        FileUtils.copyFile(change, destination);
    }

    System.out.println("Done merging.");
}