Example usage for org.apache.commons.io FileUtils moveFile

List of usage examples for org.apache.commons.io FileUtils moveFile

Introduction

In this page you can find the example usage for org.apache.commons.io FileUtils moveFile.

Prototype

public static void moveFile(File srcFile, File destFile) throws IOException 

Source Link

Document

Moves a file.

Usage

From source file:org.ambraproject.admin.action.AdminTopActionTest.java

@Test(dataProviderClass = SampleArticleData.class, dataProvider = "sampleArticle")
public void testIngest(ZipFile archive, Article article) throws Exception {
    String zipFileName = new File(archive.getName()).getName();
    File crossref_file = new File(ingestedDir, crossrefFileName(article.getDoi()));
    File articleDir = new File(filestoreDir,
            article.getDoi().replaceAll("info:doi/10.1371/journal.", "10.1371/"));

    //delete the article in case it's still in the database from the ingester test
    try {//from  w  ww . j  av  a  2  s  . c  om
        documentManagementService.delete(article.getDoi(), DEFAULT_ADMIN_AUTHID);
    } catch (NoSuchArticleIdException e) {
        //ignore
    }

    try {
        action.setFilesToIngest(new String[] { zipFileName });
        String result = action.ingest();
        assertEquals(result, Action.SUCCESS, "Action didn't return success");
        assertTrue(new File(ingestedDir, zipFileName).exists(),
                "Zip file didn't get moved to ingested directory");
        assertFalse(new File(ingestDir, zipFileName).exists(),
                "Zip file didn't get moved to ingested directory");
        assertTrue(crossref_file.exists(), "crossref file didn't get created");
        assertTrue(articleDir.exists(), "Article didn't get written to the file store");

        try {
            articleService.getArticle(article.getDoi(), DEFAULT_ADMIN_AUTHID);
        } catch (NoSuchArticleIdException e) {
            fail("Article didn't get written to the database");
        }

        //cleanup
    } finally {
        FileUtils.deleteQuietly(crossref_file);
        try {
            //move the archive back
            FileUtils.moveFile(new File(ingestedDir, zipFileName), new File(ingestDir, zipFileName));
        } catch (IOException e) {
            //suppress
        }
        try {
            //delete the files that got written to the filestore
            FileUtils.deleteDirectory(articleDir);
        } catch (IOException e) {
            //suppress
        }
        //delete the article
        documentManagementService.delete(article.getDoi(), DEFAULT_ADMIN_AUTHID);
    }
}

From source file:org.ambraproject.admin.action.AdminTopActionTest.java

@Test(dataProviderClass = SampleArticleData.class, dataProvider = "sampleArticle")
public void testIngestDuplicateArticle(ZipFile archive, Article article) throws Exception {
    String zipFileName = new File(archive.getName()).getName();

    Article articleToStore = new Article();
    articleToStore.setDoi(article.getDoi());
    dummyDataStore.store(articleToStore);

    File destinationFile = new File(ingestedDir, zipFileName);
    File crossrefFile = new File(ingestedDir, crossrefFileName(article.getDoi()));
    try {/*  ww  w .ja v  a  2s.  c om*/
        dummyDataStore.store(article);
        action.setFilesToIngest(new String[] { zipFileName });
        action.ingest();
        assertNotNull(action.getActionErrors(), "nul list of errors");
        assertTrue(action.getActionErrors().size() > 0, "didn't add an error for ingest");
        assertFalse(destinationFile.exists(), "Zip file got moved to ingested dir");
        assertFalse(crossrefFile.exists(), "Crossref file got created in ingest dir");

    } finally {
        if (destinationFile.exists()) {
            try {
                FileUtils.moveFile(destinationFile, new File(ingestDir, zipFileName));
            } catch (IOException e) {
                //ignore
            }
        }
        documentManagementService.delete(article.getDoi(), DEFAULT_ADMIN_AUTHID);
    }
}

From source file:org.ambraproject.admin.service.impl.DocumentManagementServiceImpl.java

/**
 * Revert the data out of the ingested queue
 *
 * @param uri the article uri/* w  w  w  . ja v a  2s.c o  m*/
 * @throws java.io.IOException on an error
 */
@Override
public void revertIngestedQueue(String uri) throws IOException {
    // delete any crossref submission file
    File queueDir = new File(documentDirectory);
    File ingestedDir = new File(ingestedDocumentDirectory);
    File ingestedXmlFile = new File(ingestedDir, uri.replaceAll("[:/.]", "_") + ".xml");

    if (log.isDebugEnabled())
        log.debug("Deleting '" + ingestedXmlFile + "'");

    try {
        FileUtils.forceDelete(ingestedXmlFile);
    } catch (FileNotFoundException fnfe) {
        log.info("'" + ingestedXmlFile + "' does not exist - cannot delete: ", fnfe);
    }

    // move zip back to ingestion queue
    if (!queueDir.equals(ingestedDir)) {
        // strip 'info:doi/10.1371/journal.'
        String fname = uri.substring(documentPrefix.length()) + ".zip";
        File fromFile = new File(ingestedDir, fname);
        File toFile = new File(queueDir, fname);

        try {
            if (log.isDebugEnabled())
                log.debug("Moving '" + fromFile + "' to '" + toFile + "'");
            FileUtils.moveFile(fromFile, toFile);
        } catch (FileNotFoundException fnfe) {
            log.info("Could not move '" + fromFile + "' to '" + toFile + "': ", fnfe);
        }
    }
}

From source file:org.apache.archiva.consumers.core.AutoRenameConsumer.java

@Override
public void processFile(String path) throws ConsumerException {
    File file = new File(this.repositoryDir, path);
    if (file.exists()) {
        Iterator<String> itExtensions = this.extensionRenameMap.keySet().iterator();
        while (itExtensions.hasNext()) {
            String extension = itExtensions.next();
            if (path.endsWith(extension)) {
                String fixedExtension = this.extensionRenameMap.get(extension);
                String correctedPath = path.substring(0, path.length() - extension.length()) + fixedExtension;
                File to = new File(this.repositoryDir, correctedPath);
                try {
                    // Rename the file.
                    FileUtils.moveFile(file, to);
                } catch (IOException e) {
                    log.warn("Unable to rename {} to {} :", path, correctedPath, e);
                    triggerConsumerWarning(RENAME_FAILURE,
                            "Unable to rename " + path + " to " + correctedPath + ": " + e.getMessage());
                }//w w w.  j  a v a 2 s .  c o  m
            }
        }

        log.info("(Auto) Removing File: {} ", file.getAbsolutePath());
        triggerConsumerInfo("(Auto) Removing File: " + file.getAbsolutePath());
        file.delete();
    }
}

From source file:org.apache.archiva.webdav.ArchivaDavResource.java

@Override
public void move(DavResource destination) throws DavException {
    if (!exists()) {
        throw new DavException(HttpServletResponse.SC_NOT_FOUND, "Resource to copy does not exist.");
    }/*from  w  w w.j a v a  2s  .co  m*/

    try {
        ArchivaDavResource resource = checkDavResourceIsArchivaDavResource(destination);
        if (isCollection()) {
            FileUtils.moveDirectory(getLocalResource(), resource.getLocalResource());

            triggerAuditEvent(remoteAddr, locator.getRepositoryId(), logicalResource,
                    AuditEvent.MOVE_DIRECTORY);
        } else {
            FileUtils.moveFile(getLocalResource(), resource.getLocalResource());

            triggerAuditEvent(remoteAddr, locator.getRepositoryId(), logicalResource, AuditEvent.MOVE_FILE);
        }

        log.debug("{}{}' moved to '{}' (current user '{}')", (isCollection() ? "Directory '" : "File '"),
                getLocalResource().getName(), destination, this.principal);

    } catch (IOException e) {
        throw new DavException(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, e);
    }
}

From source file:org.apache.drill.fmpp.mojo.FMPPMojo.java

private Report moveIfChanged(File root, String tmpPath) throws MojoFailureException, IOException {
    Report report = new Report();
    for (File file : root.listFiles()) {
        if (file.isDirectory()) {
            report.add(moveIfChanged(file, tmpPath));
            if (!file.delete()) {
                throw new MojoFailureException(format("can not delete %s", file));
            }//from   w  w  w . ja va 2s  .c  o  m
        } else {
            String absPath = file.getAbsolutePath();
            if (!absPath.startsWith(tmpPath)) {
                throw new MojoFailureException(format("%s should start with %s", absPath, tmpPath));
            }
            String relPath = absPath.substring(tmpPath.length());
            File outputFile = new File(output, relPath);
            if (!outputFile.exists()) {
                report.addNew();
            } else if (!FileUtils.contentEquals(file, outputFile)) {
                getLog().info(format("%s has changed", relPath));
                if (!outputFile.delete()) {
                    throw new MojoFailureException(format("can not delete %s", outputFile));
                }
                report.addChanged();
            } else {
                report.addUnchanged();
            }
            if (!outputFile.exists()) {
                File parentDir = outputFile.getParentFile();
                if (parentDir.exists() && !parentDir.isDirectory()) {
                    throw new MojoFailureException(
                            format("can not move %s to %s as %s is not a dir", file, outputFile, parentDir));
                }
                if (!parentDir.exists() && !parentDir.mkdirs()) {
                    throw new MojoFailureException(format("can not move %s to %s as dir %s can not be created",
                            file, outputFile, parentDir));
                }
                FileUtils.moveFile(file, outputFile);
            } else {
                if (!file.delete()) {
                    throw new MojoFailureException(format("can not delete %s", file));
                }
            }
        }
    }
    return report;
}

From source file:org.apache.flex.utilities.converter.retrievers.download.DownloadRetriever.java

public File retrieve(SdkType type, String version, PlatformType platformType) throws RetrieverException {
    try {//from ww w .j ava2  s.  c o  m
        if (type.equals(SdkType.FLASH) || type.equals(SdkType.AIR) || type.equals(SdkType.FONTKIT)) {
            confirmLicenseAcceptance(type);
        }

        if (type.equals(SdkType.FONTKIT)) {
            File tmpTargetFile = File.createTempFile(UUID.randomUUID().toString(), "");
            String tempSuffix = tmpTargetFile.getName().substring(tmpTargetFile.getName().lastIndexOf("-"));
            if (!(tmpTargetFile.delete())) {
                throw new IOException("Could not delete temp file: " + tmpTargetFile.getAbsolutePath());
            }

            File targetRootDir = new File(tmpTargetFile.getParentFile(), type.toString() + tempSuffix);
            File targetDir = new File(targetRootDir, "/lib/external/optional");
            if (!(targetDir.mkdirs())) {
                throw new IOException("Could not create temp directory: " + targetDir.getAbsolutePath());
            }

            final URI afeUri = new URI(
                    "http://sourceforge.net/adobe/flexsdk/code/HEAD/tree/trunk/lib/afe.jar?format=raw");
            final File afeFile = new File(targetDir, "afe.jar");
            performSafeDownload(afeUri, afeFile);

            final URI aglj40Uri = new URI(
                    "http://sourceforge.net/adobe/flexsdk/code/HEAD/tree/trunk/lib/aglj40.jar?format=raw");
            final File aglj40File = new File(targetDir, "aglj40.jar");
            performSafeDownload(aglj40Uri, aglj40File);

            final URI rideauUri = new URI(
                    "http://sourceforge.net/adobe/flexsdk/code/HEAD/tree/trunk/lib/rideau.jar?format=raw");
            final File rideauFile = new File(targetDir, "rideau.jar");
            performSafeDownload(rideauUri, rideauFile);

            final URI flexFontkitUri = new URI(
                    "http://sourceforge.net/adobe/flexsdk/code/HEAD/tree/trunk/lib/flex-fontkit.jar?format=raw");
            final File flexFontkitFile = new File(targetDir, "flex-fontkit.jar");
            performSafeDownload(flexFontkitUri, flexFontkitFile);

            return targetRootDir;
        } else {
            final URL sourceUrl = new URL(getBinaryUrl(type, version, platformType));
            final File targetFile = File.createTempFile(
                    type.toString() + "-" + version + ((platformType != null) ? "-" + platformType : "") + "-",
                    sourceUrl.getFile().substring(sourceUrl.getFile().lastIndexOf(".")));
            performFastDownload(sourceUrl, targetFile);

            ////////////////////////////////////////////////////////////////////////////////
            // Do the extracting.
            ////////////////////////////////////////////////////////////////////////////////

            if (type.equals(SdkType.FLASH)) {
                final File targetDirectory = new File(targetFile.getParent(),
                        targetFile.getName().substring(0, targetFile.getName().lastIndexOf(".") - 1));
                final File libDestFile = new File(targetDirectory,
                        "frameworks/libs/player/" + version + "/playerglobal.swc");
                if (!libDestFile.getParentFile().exists()) {
                    libDestFile.getParentFile().mkdirs();
                }
                FileUtils.moveFile(targetFile, libDestFile);
                return targetDirectory;
            } else {
                System.out.println("Extracting archive to temp directory.");
                File targetDirectory = new File(targetFile.getParent(),
                        targetFile.getName().substring(0, targetFile.getName().lastIndexOf(".") - 1));
                if (type.equals(SdkType.SWFOBJECT)) {
                    unpack(targetFile, new File(targetDirectory, "templates"));
                } else {
                    unpack(targetFile, targetDirectory);
                }
                System.out.println();
                System.out.println("Finished extracting.");
                System.out.println("===========================================================");

                // In case of the swfobject, delete some stuff we don't want in there.
                if (type.equals(SdkType.SWFOBJECT)) {
                    File delFile = new File(targetDirectory, "templates/swfobject/index_dynamic.html");
                    FileUtils.deleteQuietly(delFile);
                    delFile = new File(targetDirectory, "templates/swfobject/index.html");
                    FileUtils.deleteQuietly(delFile);
                    delFile = new File(targetDirectory, "templates/swfobject/test.swf");
                    FileUtils.deleteQuietly(delFile);
                    delFile = new File(targetDirectory, "templates/swfobject/src");
                    FileUtils.deleteDirectory(delFile);
                }

                return targetDirectory;
            }
        }
    } catch (MalformedURLException e) {
        throw new RetrieverException("Error downloading archive.", e);
    } catch (FileNotFoundException e) {
        throw new RetrieverException("Error downloading archive.", e);
    } catch (IOException e) {
        throw new RetrieverException("Error downloading archive.", e);
    } catch (URISyntaxException e) {
        throw new RetrieverException("Error downloading archive.", e);
    }
}

From source file:org.apache.flink.hdfstests.ContinuousFileProcessingFrom12MigrationTest.java

/**
 * Create a file with pre-determined String format of the form:
 * {@code fileIdx +": "+ sampleLine +" "+ lineNo}.
 * *///  ww  w .  ja v a2 s . c  o m
private Tuple2<File, String> createFileAndFillWithData(File base, String fileName, int fileIdx,
        String sampleLine) throws IOException {

    File file = new File(base, fileName + fileIdx);
    Assert.assertFalse(file.exists());

    File tmp = new File(base, "." + fileName + fileIdx);
    FileOutputStream stream = new FileOutputStream(tmp);
    StringBuilder str = new StringBuilder();
    for (int i = 0; i < LINES_PER_FILE; i++) {
        String line = fileIdx + ": " + sampleLine + " " + i + "\n";
        str.append(line);
        stream.write(line.getBytes());
    }
    stream.close();

    FileUtils.moveFile(tmp, file);

    Assert.assertTrue("No result file present", file.exists());
    return new Tuple2<>(file, str.toString());
}

From source file:org.apache.flink.test.checkpointing.utils.SavepointMigrationTestBase.java

@SafeVarargs
protected final void executeAndSavepoint(StreamExecutionEnvironment env, String savepointPath,
        Tuple2<String, Integer>... expectedAccumulators) throws Exception {

    // Retrieve the job manager
    ActorGateway jobManager = Await.result(cluster.leaderGateway().future(), DEADLINE.timeLeft());

    // Submit the job
    JobGraph jobGraph = env.getStreamGraph().getJobGraph();

    JobSubmissionResult jobSubmissionResult = cluster.submitJobDetached(jobGraph);

    LOG.info("Submitted job {} and waiting...", jobSubmissionResult.getJobID());

    StandaloneClusterClient clusterClient = new StandaloneClusterClient(cluster.configuration());

    boolean done = false;
    while (DEADLINE.hasTimeLeft()) {
        Thread.sleep(100);/*from   www. jav  a 2 s .  c o  m*/
        Map<String, Object> accumulators = clusterClient.getAccumulators(jobSubmissionResult.getJobID());

        boolean allDone = true;
        for (Tuple2<String, Integer> acc : expectedAccumulators) {
            Integer numFinished = (Integer) accumulators.get(acc.f0);
            if (numFinished == null) {
                allDone = false;
                break;
            }
            if (!numFinished.equals(acc.f1)) {
                allDone = false;
                break;
            }
        }
        if (allDone) {
            done = true;
            break;
        }
    }

    if (!done) {
        fail("Did not see the expected accumulator results within time limit.");
    }

    LOG.info("Triggering savepoint.");
    // Flink 1.2
    final Future<Object> savepointResultFuture = jobManager.ask(
            new JobManagerMessages.TriggerSavepoint(jobSubmissionResult.getJobID(), Option.<String>empty()),
            DEADLINE.timeLeft());

    // Flink 1.1
    //      final Future<Object> savepointResultFuture =
    //            jobManager.ask(new JobManagerMessages.TriggerSavepoint(jobSubmissionResult.getJobID()), DEADLINE.timeLeft());

    Object savepointResult = Await.result(savepointResultFuture, DEADLINE.timeLeft());

    if (savepointResult instanceof JobManagerMessages.TriggerSavepointFailure) {
        fail("Error drawing savepoint: "
                + ((JobManagerMessages.TriggerSavepointFailure) savepointResult).cause());
    }

    // jobmanager will store savepoint in heap, we have to retrieve it
    final String jobmanagerSavepointPath = ((JobManagerMessages.TriggerSavepointSuccess) savepointResult)
            .savepointPath();
    LOG.info("Saved savepoint: " + jobmanagerSavepointPath);

    // Flink 1.2
    FileUtils.moveFile(new File(new URI(jobmanagerSavepointPath).getPath()), new File(savepointPath));

    // Flink 1.1
    // Retrieve the savepoint from the testing job manager
    //      LOG.info("Requesting the savepoint.");
    //      Future<Object> savepointFuture = jobManager.ask(new TestingJobManagerMessages.RequestSavepoint(jobmanagerSavepointPath), DEADLINE.timeLeft());
    //
    //      Savepoint savepoint = ((TestingJobManagerMessages.ResponseSavepoint) Await.result(savepointFuture, DEADLINE.timeLeft())).savepoint();
    //      LOG.info("Retrieved savepoint: " + jobmanagerSavepointPath + ".");
    //
    //      LOG.info("Storing savepoint to file.");
    //      Configuration config = new Configuration();
    //      config.setString(org.apache.flink.runtime.checkpoint.savepoint.SavepointStoreFactory.SAVEPOINT_BACKEND_KEY, "filesystem");
    //      config.setString(org.apache.flink.runtime.checkpoint.savepoint.SavepointStoreFactory.SAVEPOINT_DIRECTORY_KEY, "file:///Users/aljoscha/Downloads");
    //      String path = org.apache.flink.runtime.checkpoint.savepoint.SavepointStoreFactory.createFromConfig(config).storeSavepoint(savepoint);
    //
    //      FileUtils.moveFile(new File(new URI(path).getPath()), new File(savepointPath));
}

From source file:org.apache.geode.internal.JarDeployer.java

private void renameJarWithOldNamingConvention(File oldJar) throws IOException {
    Matcher matcher = oldNamingPattern.matcher(oldJar.getName());
    if (!matcher.matches()) {
        throw new IllegalArgumentException(
                "The given jar " + oldJar.getCanonicalPath() + " does not match the old naming convention");
    }/*from   www  . jav a2s.co m*/

    String unversionedJarNameWithoutExtension = matcher.group(1);
    String jarVersion = matcher.group(2);
    String newJarName = unversionedJarNameWithoutExtension + ".v" + jarVersion + ".jar";

    File newJar = new File(this.deployDirectory, newJarName);
    logger.debug("Renaming deployed jar from {} to {}", oldJar.getCanonicalPath(), newJar.getCanonicalPath());

    FileUtils.moveFile(oldJar, newJar);
}