Example usage for org.apache.commons.io FileUtils moveFile

List of usage examples for org.apache.commons.io FileUtils moveFile

Introduction

In this page you can find the example usage for org.apache.commons.io FileUtils moveFile.

Prototype

public static void moveFile(File srcFile, File destFile) throws IOException 

Source Link

Document

Moves a file.

Usage

From source file:org.apache.sqoop.orm.CompilationManager.java

/**
 * Compile the .java files into .class files via embedded javac call.
 * On success, move .java files to the code output dir.
 *///w w w. ja  va2 s.co  m
public void compile() throws IOException {
    List<String> args = new ArrayList<String>();

    // ensure that the jar output dir exists.
    String jarOutDir = options.getJarOutputDir();
    File jarOutDirObj = new File(jarOutDir);
    if (!jarOutDirObj.exists()) {
        boolean mkdirSuccess = jarOutDirObj.mkdirs();
        if (!mkdirSuccess) {
            LOG.debug("Warning: Could not make directories for " + jarOutDir);
        }
    } else if (LOG.isDebugEnabled()) {
        LOG.debug("Found existing " + jarOutDir);
    }

    // Make sure jarOutDir ends with a '/'.
    if (!jarOutDir.endsWith(File.separator)) {
        jarOutDir = jarOutDir + File.separator;
    }

    // find hadoop-*-core.jar for classpath.
    String coreJar = findHadoopCoreJar();
    if (null == coreJar) {
        // Couldn't find a core jar to insert into the CP for compilation.  If,
        // however, we're running this from a unit test, then the path to the
        // .class files might be set via the hadoop.alt.classpath property
        // instead. Check there first.
        String coreClassesPath = System.getProperty("hadoop.alt.classpath");
        if (null == coreClassesPath) {
            // no -- we're out of options. Fail.
            throw new IOException("Could not find hadoop core jar!");
        } else {
            coreJar = coreClassesPath;
        }
    }

    // find sqoop jar for compilation classpath
    String sqoopJar = Jars.getSqoopJarPath();
    if (null != sqoopJar) {
        sqoopJar = File.pathSeparator + sqoopJar;
    } else {
        LOG.warn("Could not find sqoop jar; child compilation may fail");
        sqoopJar = "";
    }

    String curClasspath = System.getProperty("java.class.path");

    args.add("-sourcepath");
    args.add(jarOutDir);

    args.add("-d");
    args.add(jarOutDir);

    args.add("-classpath");
    args.add(curClasspath + File.pathSeparator + coreJar + sqoopJar);

    JavaCompiler compiler = ToolProvider.getSystemJavaCompiler();
    if (null == compiler) {
        LOG.error("It seems as though you are running sqoop with a JRE.");
        LOG.error("Sqoop requires a JDK that can compile Java code.");
        LOG.error("Please install a JDK and set $JAVA_HOME to use it.");
        throw new IOException("Could not start Java compiler.");
    }
    StandardJavaFileManager fileManager = compiler.getStandardFileManager(null, null, null);

    ArrayList<String> srcFileNames = new ArrayList<String>();
    for (String srcfile : sources) {
        srcFileNames.add(jarOutDir + srcfile);
        LOG.debug("Adding source file: " + jarOutDir + srcfile);
    }

    if (LOG.isDebugEnabled()) {
        LOG.debug("Invoking javac with args:");
        for (String arg : args) {
            LOG.debug("  " + arg);
        }
    }

    Iterable<? extends JavaFileObject> srcFileObjs = fileManager.getJavaFileObjectsFromStrings(srcFileNames);
    JavaCompiler.CompilationTask task = compiler.getTask(null, // Write to stderr
            fileManager, null, // No special diagnostic handling
            args, null, // Compile all classes in the source compilation units
            srcFileObjs);

    boolean result = task.call();
    if (!result) {
        throw new IOException("Error returned by javac");
    }

    // Where we should move source files after compilation.
    String srcOutDir = new File(options.getCodeOutputDir()).getAbsolutePath();
    if (!srcOutDir.endsWith(File.separator)) {
        srcOutDir = srcOutDir + File.separator;
    }

    // Move these files to the srcOutDir.
    for (String srcFileName : sources) {
        String orig = jarOutDir + srcFileName;
        String dest = srcOutDir + srcFileName;
        File fOrig = new File(orig);
        File fDest = new File(dest);
        File fDestParent = fDest.getParentFile();
        if (null != fDestParent && !fDestParent.exists()) {
            if (!fDestParent.mkdirs()) {
                LOG.error("Could not make directory: " + fDestParent);
            }
        }
        try {
            FileUtils.moveFile(fOrig, fDest);
        } catch (IOException e) {
            LOG.debug("Could not rename " + orig + " to " + dest, e);
        }
    }
}

From source file:org.apache.synapse.deployers.AbstractSynapseArtifactDeployer.java

private String backupFile(File file) throws DeploymentException {

    //TODO: WSO2 Specific hack to prevent creating backup files in worker nodes
    String isWorker = System.getProperty("workerNode");
    if (isWorker != null) {
        return "NO_BACKUP_ON_WORKER.INFO";
    }/*from   w ww  . ja v a 2  s .c om*/
    String filePath = SynapseArtifactDeploymentStore.getNormalizedAbsolutePath(file.getAbsolutePath());

    SynapseArtifactDeploymentStore deploymentStore = getSynapseConfiguration().getArtifactDeploymentStore();

    deploymentStore.addBackedUpArtifact(filePath);
    String backupFilePath = filePath + ".back";
    int backupIndex = 0;
    while (backupIndex >= 0) {
        if (new File(backupFilePath).exists()) {
            backupIndex++;
            backupFilePath = filePath + "." + backupIndex + ".back";
        } else {
            backupIndex = -1;
            try {
                FileUtils.moveFile(file, new File(backupFilePath));
            } catch (IOException e) {
                log.warn("Error while backing up the artifact: ", e);
                return "ERROR_WHILE_BACKING_UP_ARTIFACT";
            }
        }
    }
    return backupFilePath;
}

From source file:org.apache.synapse.deployers.ImportDeployer.java

private String backupFile(File file) throws DeploymentException {
    String filePath = FilenameUtils.normalize(file.getAbsolutePath());

    String backupFilePath = filePath + ".back";
    int backupIndex = 0;
    while (backupIndex >= 0) {
        if (new File(backupFilePath).exists()) {
            backupIndex++;// w w  w.j  av  a  2s.  c o  m
            backupFilePath = filePath + "." + backupIndex + ".back";
        } else {
            backupIndex = -1;
            try {
                FileUtils.moveFile(file, new File(backupFilePath));
            } catch (IOException e) {
                handleSynapseArtifactDeploymentError("Error while backing up the artifact: " + file.getName(),
                        e);
            }
        }
    }
    return backupFilePath;
}

From source file:org.apache.zeppelin.helium.HeliumBundleFactory.java

public synchronized File buildBundle(List<HeliumPackage> pkgs, boolean forceRefresh) throws IOException {
    // package.json
    URL pkgUrl = Resources.getResource("helium/package.json");
    String pkgJson = Resources.toString(pkgUrl, Charsets.UTF_8);
    StringBuilder dependencies = new StringBuilder();
    StringBuilder cacheKeyBuilder = new StringBuilder();

    FileFilter npmPackageCopyFilter = new FileFilter() {
        @Override/*from   w  w w.j  a v  a2 s  .c  om*/
        public boolean accept(File pathname) {
            String fileName = pathname.getName();
            if (fileName.startsWith(".") || fileName.startsWith("#") || fileName.startsWith("~")) {
                return false;
            } else {
                return true;
            }
        }
    };

    for (HeliumPackage pkg : pkgs) {
        String[] moduleNameVersion = getNpmModuleNameAndVersion(pkg);
        if (moduleNameVersion == null) {
            logger.error("Can't get module name and version of package " + pkg.getName());
            continue;
        }
        if (dependencies.length() > 0) {
            dependencies.append(",\n");
        }
        dependencies.append("\"" + moduleNameVersion[0] + "\": \"" + moduleNameVersion[1] + "\"");
        cacheKeyBuilder.append(pkg.getName() + pkg.getArtifact());

        File pkgInstallDir = new File(workingDirectory, "node_modules/" + pkg.getName());
        if (pkgInstallDir.exists()) {
            FileUtils.deleteDirectory(pkgInstallDir);
        }

        if (isLocalPackage(pkg)) {
            FileUtils.copyDirectory(new File(pkg.getArtifact()), pkgInstallDir, npmPackageCopyFilter);
        }
    }
    pkgJson = pkgJson.replaceFirst("DEPENDENCIES", dependencies.toString());

    // check if we can use previous buildBundle or not
    if (cacheKeyBuilder.toString().equals(bundleCacheKey) && currentCacheBundle.isFile() && !forceRefresh) {
        return currentCacheBundle;
    }

    // webpack.config.js
    URL webpackConfigUrl = Resources.getResource("helium/webpack.config.js");
    String webpackConfig = Resources.toString(webpackConfigUrl, Charsets.UTF_8);

    // generate load.js
    StringBuilder loadJsImport = new StringBuilder();
    StringBuilder loadJsRegister = new StringBuilder();

    long idx = 0;
    for (HeliumPackage pkg : pkgs) {
        String[] moduleNameVersion = getNpmModuleNameAndVersion(pkg);
        if (moduleNameVersion == null) {
            continue;
        }

        String className = "bundles" + idx++;
        loadJsImport.append("import " + className + " from \"" + moduleNameVersion[0] + "\"\n");

        loadJsRegister.append(HELIUM_BUNDLES_VAR + ".push({\n");
        loadJsRegister.append("id: \"" + moduleNameVersion[0] + "\",\n");
        loadJsRegister.append("name: \"" + pkg.getName() + "\",\n");
        loadJsRegister.append("icon: " + gson.toJson(pkg.getIcon()) + ",\n");
        loadJsRegister.append("type: \"" + pkg.getType() + "\",\n");
        loadJsRegister.append("class: " + className + "\n");
        loadJsRegister.append("})\n");
    }

    FileUtils.write(new File(workingDirectory, "package.json"), pkgJson);
    FileUtils.write(new File(workingDirectory, "webpack.config.js"), webpackConfig);
    FileUtils.write(new File(workingDirectory, "load.js"), loadJsImport.append(loadJsRegister).toString());

    copyFrameworkModuleToInstallPath(npmPackageCopyFilter);

    try {
        out.reset();
        npmCommand("install --loglevel=error");
    } catch (TaskRunnerException e) {
        // ignore `(empty)` warning
        String cause = new String(out.toByteArray());
        if (!cause.contains("(empty)")) {
            throw new IOException(cause);
        }
    }

    try {
        out.reset();
        npmCommand("run bundle");
    } catch (TaskRunnerException e) {
        throw new IOException(new String(out.toByteArray()));
    }

    String bundleStdoutResult = new String(out.toByteArray());

    File heliumBundle = new File(workingDirectory, HELIUM_BUNDLE);
    if (!heliumBundle.isFile()) {
        throw new IOException("Can't create bundle: \n" + bundleStdoutResult);
    }

    WebpackResult result = getWebpackResultFromOutput(bundleStdoutResult);
    if (result.errors.length > 0) {
        heliumBundle.delete();
        throw new IOException(result.errors[0]);
    }

    synchronized (this) {
        currentCacheBundle.delete();
        FileUtils.moveFile(heliumBundle, currentCacheBundle);
        bundleCacheKey = cacheKeyBuilder.toString();
    }
    return currentCacheBundle;
}

From source file:org.apache.zeppelin.helium.HeliumVisualizationFactory.java

public synchronized File bundle(List<HeliumPackage> pkgs, boolean forceRefresh) throws IOException {
    // package.json
    URL pkgUrl = Resources.getResource("helium/package.json");
    String pkgJson = Resources.toString(pkgUrl, Charsets.UTF_8);
    StringBuilder dependencies = new StringBuilder();
    StringBuilder cacheKeyBuilder = new StringBuilder();

    FileFilter npmPackageCopyFilter = new FileFilter() {
        @Override/*  w  w w  .  j a v a  2 s  .  c o  m*/
        public boolean accept(File pathname) {
            String fileName = pathname.getName();
            if (fileName.startsWith(".") || fileName.startsWith("#") || fileName.startsWith("~")) {
                return false;
            } else {
                return true;
            }
        }
    };

    for (HeliumPackage pkg : pkgs) {
        String[] moduleNameVersion = getNpmModuleNameAndVersion(pkg);
        if (moduleNameVersion == null) {
            logger.error("Can't get module name and version of package " + pkg.getName());
            continue;
        }
        if (dependencies.length() > 0) {
            dependencies.append(",\n");
        }
        dependencies.append("\"" + moduleNameVersion[0] + "\": \"" + moduleNameVersion[1] + "\"");
        cacheKeyBuilder.append(pkg.getName() + pkg.getArtifact());

        File pkgInstallDir = new File(workingDirectory, "node_modules/" + pkg.getName());
        if (pkgInstallDir.exists()) {
            FileUtils.deleteDirectory(pkgInstallDir);
        }

        if (isLocalPackage(pkg)) {
            FileUtils.copyDirectory(new File(pkg.getArtifact()), pkgInstallDir, npmPackageCopyFilter);
        }
    }
    pkgJson = pkgJson.replaceFirst("DEPENDENCIES", dependencies.toString());

    // check if we can use previous bundle or not
    if (cacheKeyBuilder.toString().equals(bundleCacheKey) && currentBundle.isFile() && !forceRefresh) {
        return currentBundle;
    }

    // webpack.config.js
    URL webpackConfigUrl = Resources.getResource("helium/webpack.config.js");
    String webpackConfig = Resources.toString(webpackConfigUrl, Charsets.UTF_8);

    // generate load.js
    StringBuilder loadJsImport = new StringBuilder();
    StringBuilder loadJsRegister = new StringBuilder();

    long idx = 0;
    for (HeliumPackage pkg : pkgs) {
        String[] moduleNameVersion = getNpmModuleNameAndVersion(pkg);
        if (moduleNameVersion == null) {
            continue;
        }

        String className = "vis" + idx++;
        loadJsImport.append("import " + className + " from \"" + moduleNameVersion[0] + "\"\n");

        loadJsRegister.append("visualizations.push({\n");
        loadJsRegister.append("id: \"" + moduleNameVersion[0] + "\",\n");
        loadJsRegister.append("name: \"" + pkg.getName() + "\",\n");
        loadJsRegister.append("icon: " + gson.toJson(pkg.getIcon()) + ",\n");
        loadJsRegister.append("class: " + className + "\n");
        loadJsRegister.append("})\n");
    }

    FileUtils.write(new File(workingDirectory, "package.json"), pkgJson);
    FileUtils.write(new File(workingDirectory, "webpack.config.js"), webpackConfig);
    FileUtils.write(new File(workingDirectory, "load.js"), loadJsImport.append(loadJsRegister).toString());

    // install tabledata module
    File tabledataModuleInstallPath = new File(workingDirectory, "node_modules/zeppelin-tabledata");
    if (tabledataModulePath != null) {
        if (tabledataModuleInstallPath.exists()) {
            FileUtils.deleteDirectory(tabledataModuleInstallPath);
        }
        FileUtils.copyDirectory(tabledataModulePath, tabledataModuleInstallPath, npmPackageCopyFilter);
    }

    // install visualization module
    File visModuleInstallPath = new File(workingDirectory, "node_modules/zeppelin-vis");
    if (visualizationModulePath != null) {
        if (visModuleInstallPath.exists()) {
            // when zeppelin-vis and zeppelin-table package is published to npm repository
            // we don't need to remove module because npm install command will take care
            // dependency version change. However, when two dependencies are copied manually
            // into node_modules directory, changing vis package version results inconsistent npm
            // install behavior.
            //
            // Remote vis package everytime and let npm download every time bundle as a workaround
            FileUtils.deleteDirectory(visModuleInstallPath);
        }
        FileUtils.copyDirectory(visualizationModulePath, visModuleInstallPath, npmPackageCopyFilter);
    }

    out.reset();
    try {
        npmCommand("install");
        npmCommand("run bundle");
    } catch (TaskRunnerException e) {
        throw new IOException(new String(out.toByteArray()));
    }

    File visBundleJs = new File(workingDirectory, "vis.bundle.js");
    if (!visBundleJs.isFile()) {
        throw new IOException("Can't create visualization bundle : \n" + new String(out.toByteArray()));
    }

    WebpackResult result = getWebpackResultFromOutput(new String(out.toByteArray()));
    if (result.errors.length > 0) {
        visBundleJs.delete();
        throw new IOException(result.errors[0]);
    }

    synchronized (this) {
        currentBundle.delete();
        FileUtils.moveFile(visBundleJs, currentBundle);
        bundleCacheKey = cacheKeyBuilder.toString();
    }
    return currentBundle;
}

From source file:org.apereo.portal.io.xml.JaxbPortalDataHandlerService.java

@Override
public boolean exportData(String typeId, String dataId, File directory) {
    directory.mkdirs();//from www. j  a va2s  .c  om

    final File exportTempFile;
    try {
        exportTempFile = File.createTempFile(
                SafeFilenameUtils.makeSafeFilename(StringUtils.rightPad(dataId, 2, '-') + "-"),
                SafeFilenameUtils.makeSafeFilename("." + typeId), directory);
    } catch (IOException e) {
        throw new RuntimeException("Could not create temp file to export " + typeId + " " + dataId, e);
    }

    try {
        final String fileName = this.exportData(typeId, dataId, new StreamResult(exportTempFile));
        if (fileName == null) {
            logger.info("Skipped: type={} id={}", typeId, dataId);
            return false;
        }

        final File destFile = new File(directory, fileName + "." + typeId + ".xml");
        if (destFile.exists()) {
            logger.warn("Exporting " + typeId + " " + dataId
                    + " but destination file already exists, it will be overwritten: " + destFile);
            destFile.delete();
        }
        FileUtils.moveFile(exportTempFile, destFile);
        logger.info("Exported: {}", destFile);

        return true;
    } catch (Exception e) {
        if (e instanceof RuntimeException) {
            throw (RuntimeException) e;
        }

        throw new RuntimeException("Failed to export " + typeId + " " + dataId, e);
    } finally {
        FileUtils.deleteQuietly(exportTempFile);
    }
}

From source file:org.archive.crawler.framework.ActionDirectory.java

/**
 * Process an individual action file found 
 * /*www. j  a va  2  s . c  om*/
 * @param actionFile File to process
 */
protected void actOn(File actionFile) {
    LOGGER.info("processing action file: " + actionFile);
    String filename = actionFile.getName();
    boolean isGzip = filename.endsWith(".gz");
    String corename = isGzip ? filename.substring(0, filename.length() - 3) : filename;
    String timestamp = ArchiveUtils.get17DigitDate();

    if (corename.endsWith(".seeds")) {
        // import seeds
        getSeeds().actOn(actionFile);
    } else if (corename.endsWith(".recover")) {
        // apply recovery-log
        boolean alsoScope = corename.endsWith(".s.recover");
        try {
            // consider-included all successes and explicit-includes...
            getFrontier().importRecoverFormat(actionFile, alsoScope, true, false, "F[si] ");
            // then retry all adds...
            getFrontier().importRecoverFormat(actionFile, alsoScope, false, false, "F\\+ ");
        } catch (IOException ioe) {
            LOGGER.log(Level.SEVERE, "problem with action file: " + actionFile, ioe);
        }
    } else if (corename.endsWith(".include")) {
        // consider-included-only (do not schedule)
        boolean alsoScope = corename.endsWith(".s.include");
        try {
            getFrontier().importRecoverFormat(actionFile, alsoScope, true, false, ".*");
        } catch (IOException ioe) {
            LOGGER.log(Level.SEVERE, "problem with action file: " + actionFile, ioe);
        }
    } else if (corename.endsWith(".schedule")) {
        // schedule to queues
        boolean alsoScope = corename.endsWith(".s.schedule");
        try {
            getFrontier().importRecoverFormat(actionFile, alsoScope, false, false, ".*");
        } catch (IOException ioe) {
            LOGGER.log(Level.SEVERE, "problem with action file: " + actionFile, ioe);
        }
    } else if (corename.endsWith(".force")) {
        // schedule to queues
        boolean alsoScope = corename.endsWith(".s.force");
        try {
            getFrontier().importRecoverFormat(actionFile, alsoScope, false, true, ".*");
        } catch (IOException ioe) {
            LOGGER.log(Level.SEVERE, "problem with action file: " + actionFile, ioe);
        }
        //        } else if (filename.endsWith(".robots")) {
        //            // force refresh of robots
        //            // TODO
    } else if (!tryAsScript(actionFile, timestamp)) {
        LOGGER.warning("action file ignored: " + actionFile);
    }

    // move file to 'done' area with timestamp prefix
    while (actionFile.exists()) {
        try {
            File doneFile = new File(doneDir.getFile(), timestamp + "." + actionFile.getName());
            FileUtils.moveFile(actionFile, doneFile);

            // attempt to symlink from action/done/ to done file
            File actionDoneDirFile = new File(actionDir.getFile(), "done");
            if (!actionDoneDirFile.equals(doneDir.getFile())) {
                actionDoneDirFile.mkdirs();
                File doneSymlinkFile = new File(actionDoneDirFile, doneFile.getName());
                boolean success = FilesystemLinkMaker.makeSymbolicLink(doneFile.getPath(),
                        doneSymlinkFile.getPath());
                if (!success) {
                    LOGGER.warning("failed to create symlink from " + doneSymlinkFile + " to " + doneFile);
                }
            }
        } catch (IOException e) {
            LOGGER.log(Level.SEVERE, "unable to move " + actionFile, e);
        }
    }
}

From source file:org.archive.crawler.framework.Engine.java

/**
 * Copy a job to a new location, possibly making a job
 * a profile or a profile a runnable job. 
 * //from   ww w . ja v a2 s  . c  o m
 * @param orig CrawlJob representing source
 * @param destDir File location destination
 * @param asProfile true if destination should become a profile
 * @throws IOException 
 */
public synchronized void copy(CrawlJob orig, File destDir, boolean asProfile) throws IOException {
    org.archive.util.FileUtils.ensureWriteableDirectory(destDir);
    if (destDir.list().length > 0) {
        throw new IOException("destination dir not empty");
    }
    File srcDir = orig.getPrimaryConfig().getParentFile();

    // FIXME: Add option for only copying history DB
    // FIXME: Don't hardcode these names
    // FIXME: (?) copy any referenced file (ConfigFile/ConfigPath),
    // even outside the job directory? 

    // copy all simple files except the 'job.log' and its '.lck' (if any)
    FileUtils.copyDirectory(srcDir, destDir, FileFilterUtils.andFileFilter(FileFilterUtils.fileFileFilter(),
            FileFilterUtils.notFileFilter(FileFilterUtils.prefixFileFilter("job.log"))));

    // ...and all contents of 'resources' subdir...
    File srcResources = new File(srcDir, "resources");
    if (srcResources.isDirectory()) {
        FileUtils.copyDirectory(srcResources, new File(destDir, "resources"));
    }

    File newPrimaryConfig = new File(destDir, orig.getPrimaryConfig().getName());
    if (asProfile) {
        if (!orig.isProfile()) {
            // rename cxml to have 'profile-' prefix
            FileUtils.moveFile(newPrimaryConfig, new File(destDir, "profile-" + newPrimaryConfig.getName()));
        }
    } else {
        if (orig.isProfile()) {
            // rename cxml to remove 'profile-' prefix
            FileUtils.moveFile(newPrimaryConfig, new File(destDir, newPrimaryConfig.getName().substring(8)));
        }
    }
    findJobConfigs();
}

From source file:org.artifactory.converters.LoggingConverter.java

@Override
public void revert() {
    File loginFile = new File(path, ArtifactoryHome.LOGBACK_CONFIG_FILE_NAME);
    File loginBackupFile = new File(path, ArtifactoryHome.LOGBACK_CONFIG_FILE_NAME + ".back");
    try {//w ww.  ja va 2 s . co  m
        if (loginBackupFile.exists()) {
            if (loginFile.exists()) {
                FileUtils.forceDelete(loginFile);
            }
            FileUtils.moveFile(loginBackupFile, loginFile);
        }
    } catch (Exception e) {
        throw new RuntimeException("Fail to revert conversion", e);
    }
}

From source file:org.artifactory.converters.MimeTypeConverter.java

@Override
public void revert() {
    File mimeTypeFile = path;//from  w  w  w. j  av a2 s  .co  m
    File mimeTypeBackupFile = new File(path.getAbsolutePath() + ".back");
    try {
        if (mimeTypeBackupFile.exists()) {
            if (mimeTypeFile.exists()) {
                FileUtils.forceDelete(mimeTypeFile);
            }
            FileUtils.moveFile(mimeTypeBackupFile, mimeTypeFile);
        }
    } catch (Exception e) {
        throw new RuntimeException("Fail to revert conversion", e);
    }
}