Example usage for org.apache.commons.io FileUtils forceMkdir

List of usage examples for org.apache.commons.io FileUtils forceMkdir

Introduction

In this page you can find the example usage for org.apache.commons.io FileUtils forceMkdir.

Prototype

public static void forceMkdir(File directory) throws IOException 

Source Link

Document

Makes a directory, including any necessary but nonexistent parent directories.

Usage

From source file:eu.planets_project.services.utils.ZipUtilsTest.java

/**
 * Test method for {@link eu.planets_project.services.utils.ZipUtils#createZip(java.io.File, java.io.File, java.lang.String, boolean)}.
 * @throws IOException // w ww  .j ava  2s.  co m
 */
@Test
public void testCreateZipAndUnzipTo() throws IOException {
    FileUtils.cleanDirectory(outputFolder);
    int inputFileCount = ZipUtils.listAllFilesAndFolders(TEST_FILE_FOLDER, new ArrayList<File>()).size();
    File zip = ZipUtils.createZip(TEST_FILE_FOLDER, outputFolder, "zipUtilsTest.zip", true);
    System.out.println("Zip created. Please find it here: " + zip.getAbsolutePath());
    String folderName = zip.getName().substring(0, zip.getName().lastIndexOf("."));
    File extract = new File(outputFolder, folderName);
    FileUtils.forceMkdir(extract);
    List<File> extracted = ZipUtils.unzipTo(zip, extract);
    System.out.println("Extracted files:" + System.getProperty("line.separator"));
    for (File file : extracted) {
        System.out.println(file.getAbsolutePath());
    }
    System.out.println("input file-count:  " + inputFileCount);
    System.out.println("output file-count: " + extracted.size());
}

From source file:com.hs.mail.smtp.spool.FileWatcher.java

public void start() {
    if (targetDir != null) {
        try {//from  ww  w.j a v  a 2  s.c om
            if (null == failureDirFile) {
                failureDirFile = new File(targetDir.getParent(), "failure");
            }
            FileUtils.forceMkdir(failureDirFile);
        } catch (Exception e) {
            logger.error("Cannot create failure directory " + failureDirFile);
            return;
        }
        mainJob = new MainJob(this);
        new Thread(mainJob).start();
    }
}

From source file:at.uni_salzburg.cs.ros.viewer.services.BigraphReactionRuleArchiveImpl.java

/**
 * @param imageRenderer the BigraphImageRenderer instance.
 * @throws IOException//w  ww.ja v  a  2 s. co  m
 */
public BigraphReactionRuleArchiveImpl(BigraphImageRenderer imageRenderer) throws IOException {
    this.imageRenderer = imageRenderer;
    String brrArchiveDirName = System.getProperty(BRR_ARCHIVE_PROP, BRR_ARCHIVE_DEFAULT_DIR);
    brrArchive = new File(brrArchiveDirName);
    FileUtils.forceMkdir(brrArchive);
    LOG.info("Using archive directory {}", brrArchive.getAbsolutePath());

    String cleanUp = System.getProperty(BRR_ARCHIVE_CLEANUP_PROP, "true");
    if ("true".equalsIgnoreCase(cleanUp)) {
        LOG.info("Cleaning up bigraph image archive folder '{}'", brrArchive.getAbsolutePath());
        FileUtils.cleanDirectory(brrArchive);
    } else {
        LOG.info("Cleaning up bigraph image archive folder '{}' declined.", brrArchive.getAbsolutePath());
    }
}

From source file:edu.ur.ir.ir_export.service.DefaultContributorTypeExportService.java

/** 
 * Create the xml file for the set of collections.
 * //from w w w  .  j  av  a  2  s  .co m
 * @param xmlFile - file to write the xml to
 * @param contributor types - set of contributor types to export
 * 
 * @throws IOException - if writing to the file fails.
 */
public void createXmlFile(File xmlFile, Collection<ContributorType> contributorTypes) throws IOException {
    DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
    DocumentBuilder builder;

    String path = FilenameUtils.getPath(xmlFile.getCanonicalPath());
    if (!path.equals("")) {
        File pathOnly = new File(FilenameUtils.getFullPath(xmlFile.getCanonicalPath()));
        FileUtils.forceMkdir(pathOnly);
    }

    if (!xmlFile.exists()) {
        if (!xmlFile.createNewFile()) {
            throw new IllegalStateException("could not create file");
        }
    }

    try {
        builder = factory.newDocumentBuilder();
    } catch (ParserConfigurationException e) {
        throw new IllegalStateException(e);
    }

    DOMImplementation impl = builder.getDOMImplementation();
    DOMImplementationLS domLs = (DOMImplementationLS) impl.getFeature("LS", "3.0");
    LSSerializer serializer = domLs.createLSSerializer();
    LSOutput lsOut = domLs.createLSOutput();

    Document doc = impl.createDocument(null, "contributor_types", null);
    Element root = doc.getDocumentElement();

    FileOutputStream fos;
    OutputStreamWriter outputStreamWriter;
    BufferedWriter writer;

    try {
        fos = new FileOutputStream(xmlFile);

        try {
            outputStreamWriter = new OutputStreamWriter(fos, "UTF-8");
        } catch (UnsupportedEncodingException e) {
            throw new IllegalStateException(e);
        }
        writer = new BufferedWriter(outputStreamWriter);
        lsOut.setCharacterStream(writer);
    } catch (FileNotFoundException e) {
        throw new IllegalStateException(e);
    }

    // create XML for the child collections
    for (ContributorType ct : contributorTypes) {
        Element contributorType = doc.createElement("contributor_type");

        this.addIdElement(contributorType, ct.getId().toString(), doc);
        this.addNameElement(contributorType, ct.getName(), doc);
        this.addDescription(contributorType, ct.getDescription(), doc);
        this.addSystemCode(contributorType, ct.getUniqueSystemCode(), doc);
    }
    serializer.write(root, lsOut);

    try {
        fos.close();
        writer.close();
        outputStreamWriter.close();
    } catch (Exception e) {
        throw new IllegalStateException(e);
    }

}

From source file:cec.easyshop.storefront.filters.AbstractAddOnFilterTest.java

@Before
public void prepare() throws IOException {
    //log("Starting ...");
    MockitoAnnotations.initMocks(this);

    webTargetResource = createWebTargetDir();
    addOnSourceResource = new File(rootSandboxDir, ADDONTWO_NAME + "/acceleratoraddon" + getFolder());

    FileUtils.forceMkdir(webTargetResource);
    FileUtils.forceMkdir(addOnSourceResource);

    LOG.info("Created sandbox dirs");

    setAddOnFilterActive(true);//from   www .  j a v  a2 s. c  o m

    final File addOnExtensionPhysicalPath = new File(rootSandboxDir, ADDONTWO_NAME);
    webExtensionPhysicalPath = createWebCtxPhysicalPath();

    Mockito.doReturn(webExtensionPhysicalPath).when(extensionAccessor).getExtensionDir(STOREFRONT_NAME);
    Mockito.doReturn(addOnExtensionPhysicalPath).when(extensionAccessor).getExtensionDir(ADDONTWO_NAME);
    Mockito.doReturn("/addons/").when(request).getAttribute("javax.servlet.include.servlet_path");
}

From source file:ml.shifu.shifu.fs.ShifuFileUtilsTest.java

@Test
public void copyDataTest() throws IOException {
    File file = new File("common-utils/from_data");
    if (!file.exists()) {
        FileUtils.forceMkdir(file);
    }//from   w  w  w  .  j  a  v  a 2s  .co  m

    ShifuFileUtils.copy("common-utils/from_data", "common-utils/to_data", SourceType.LOCAL);

    file = new File("common-utils/to_data");

    Assert.assertTrue(file.exists());
}

From source file:blocks.CreateJavaFiles.java

/**
 * Creates a directory with the same name as the model for all the relevant
 * files; if the directory already exists, then deletes the directory and
 * all the files within it before re-creating the directory. (neha)
 *//*from   w w  w . j  a  v a  2s.  c om*/
public void createDirectory() {
    try {
        File directory = new File(destination + modelName);
        // neha: 10/09/2015 if the directory already exists, delete it first
        // Java isn't able to delete folders with data in it; so rather than
        // write a recursive method, this is done with FileUtils in
        // the Apache Commons IO library.
        if (directory.exists()) {
            FileUtils.deleteDirectory(directory); // directory.delete();
        }

        // Makes a directory, including any necessary but nonexistent
        // parent directories.
        FileUtils.forceMkdir(directory);
        System.out.println("Directory: " + destination + modelName + " created");

    } catch (Exception e) { // Catch exception if any
        System.err.println("Error creating directory: " + destination + modelName + ": " + e.getMessage());
    }
}

From source file:com.alexholmes.hadooputils.test.HadoopTestCaseFixer.java

/**
 * Performs some additional setup required before the
 * {@link org.apache.hadoop.mapred.HadoopTestCase#setUp()} method can be called.
 *
 * @throws Exception if something goes wrong
 *//*from w w w .j ava 2s. com*/
@Override
@Before
public void setUp() throws Exception {
    // this path is used by
    File f = new File("build/test/mapred/local").getAbsoluteFile();
    if (f.exists()) {
        FileUtils.forceDelete(f);
    }
    FileUtils.forceMkdir(f);

    // required by JobHistory.initLogDir
    System.setProperty("hadoop.log.dir", f.getAbsolutePath());

    super.setUp();
}

From source file:com.alexholmes.hadooputils.test.MiniHadoop.java

/**
 * Creates a {@link MiniMRCluster} and {@link MiniDFSCluster} all working within
 * the directory supplied in {@code tmpDir}.
 *
 * The DFS will be formatted regardless if there was one or not before in the
 * given location.//from w w  w.java 2  s .  co m
 *
 * @param config the Hadoop configuration
 * @param taskTrackers number of task trackers to start
 * @param dataNodes number of data nodes to start
 * @param tmpDir the temporary directory which the Hadoop cluster will use for storage
 * @throws IOException thrown if the base directory cannot be set.
 */
public MiniHadoop(final Configuration config, final int taskTrackers, final int dataNodes, final File tmpDir)
        throws IOException {

    if (taskTrackers < 1) {
        throw new IllegalArgumentException("Invalid taskTrackers value, must be greater than 0");
    }
    if (dataNodes < 1) {
        throw new IllegalArgumentException("Invalid dataNodes value, must be greater than 0");
    }

    config.set("hadoop.tmp.dir", tmpDir.getAbsolutePath());

    if (tmpDir.exists()) {
        FileUtils.forceDelete(tmpDir);
    }
    FileUtils.forceMkdir(tmpDir);

    // used by MiniDFSCluster for DFS storage
    System.setProperty("test.build.data", new File(tmpDir, "data").getAbsolutePath());

    // required by JobHistory.initLogDir
    System.setProperty("hadoop.log.dir", new File(tmpDir, "logs").getAbsolutePath());

    JobConf jobConfig = new JobConf(config);

    dfsCluster = new MiniDFSCluster(jobConfig, dataNodes, true, null);
    fileSystem = dfsCluster.getFileSystem();
    mrCluster = new MiniMRCluster(0, 0, taskTrackers, fileSystem.getUri().toString(), 1, null, null, null,
            jobConfig);
}

From source file:it.marcoberri.mbmeteo.action.Commons.java

/**
 *
 * @param log//from   w  w  w  . j a  va 2  s .c  o  m
 */
public static void importLogEasyWeather(Logger log) {

    try {

        final File importPath = new File(
                ConfigurationHelper.prop.getProperty("import.loggerEasyWeather.filepath"));
        FileUtils.forceMkdir(importPath);
        final File importPathBackup = new File(
                ConfigurationHelper.prop.getProperty("import.loggerEasyWeather.filepath") + File.separator
                        + "old" + File.separator);
        FileUtils.forceMkdir(importPathBackup);
        boolean hasHeader = Default
                .toBoolean(ConfigurationHelper.prop.getProperty("import.loggerEasyWeather.hasHeader"), true);

        final String[] extension = { "csv", "txt" };
        Collection<File> files = FileUtils.listFiles(importPath, extension, false);

        if (files.isEmpty()) {
            log.debug("No file to inport: " + importPath);
            return;
        }

        for (File f : files) {

            log.debug("read file:" + f);

            final List<String> l = FileUtils.readLines(f, "UTF-8");
            log.debug("tot line:" + l.size());

            final Datastore ds = MongoConnectionHelper.ds;

            log.debug("hasHeader: " + hasHeader);
            int lineCount = 0;
            int lineDouble = 0;
            for (String s : l) {

                if (hasHeader) {
                    hasHeader = false;
                    continue;
                }

                final String[] columns = s.split(";");
                List<Meteolog> check = ds.createQuery(Meteolog.class).field("time")
                        .equal(getDate("dd-MM-yyyy HH:mm", columns[1], log)).asList();

                if (check != null && !check.isEmpty()) {
                    log.debug("data exist, continue");
                    lineDouble++;
                    continue;
                }

                final Meteolog meteoLog = new Meteolog();
                try {
                    meteoLog.setN(Integer.valueOf(columns[0]));
                } catch (Exception e) {
                    log.error("line skipped " + lineCount, e);
                    continue;
                }

                try {
                    meteoLog.setTime(getDate("dd-MM-yyyy HH:mm", columns[1], log));
                } catch (Exception e) {
                    log.error("line skipped" + lineCount, e);
                    continue;
                }

                try {
                    meteoLog.setInterval(Integer.valueOf(columns[2]));
                } catch (Exception e) {
                    log.error("line " + lineCount, e);
                }

                try {
                    meteoLog.setIndoorHumidity(Double.valueOf(columns[3]));
                } catch (Exception e) {
                    log.error("line " + lineCount, e);
                }

                try {
                    meteoLog.setIndoorTemperature(Double.valueOf(columns[4]));
                } catch (Exception e) {
                    log.error("line " + lineCount, e);
                }

                try {
                    meteoLog.setOutdoorHumidity(Double.valueOf(columns[5]));
                } catch (Exception e) {
                    log.error("line " + lineCount, e);
                }

                try {
                    meteoLog.setOutdoorTemperature(Double.valueOf(columns[6]));
                } catch (Exception e) {
                    log.error("line " + lineCount, e);
                }

                try {
                    meteoLog.setAbsolutePressure(Double.valueOf(columns[7]));
                } catch (Exception e) {
                    log.error("line " + lineCount, e);
                }

                try {
                    meteoLog.setWind(Double.valueOf(columns[8]));
                } catch (Exception e) {
                    log.error("line " + lineCount, e);
                }

                try {
                    meteoLog.setGust(Double.valueOf(columns[9]));
                } catch (Exception e) {
                    log.error("line " + lineCount, e);
                }

                try {
                    meteoLog.setDirection(columns[10]);
                } catch (Exception e) {
                    log.error("line " + lineCount, e);
                }

                try {
                    meteoLog.setRelativePressure(Double.valueOf(columns[11]));
                } catch (Exception e) {
                    log.error("line " + lineCount, e);
                }

                try {
                    meteoLog.setDewpoint(Double.valueOf(columns[12]));
                } catch (Exception e) {
                    log.error("line " + lineCount, e);
                }

                try {
                    meteoLog.setWindChill(Double.valueOf(columns[13]));
                } catch (Exception e) {
                    log.error("line " + lineCount, e);
                }

                try {
                    meteoLog.setHourRainfall(Double.valueOf(columns[14]));
                } catch (Exception e) {
                    log.error("line " + lineCount, e);
                }

                try {
                    meteoLog.setDayRainfall(Double.valueOf(columns[15]));
                } catch (Exception e) {
                    log.error("line " + lineCount, e);
                }

                try {
                    meteoLog.setWeekRainfall(Double.valueOf(columns[16]));
                } catch (Exception e) {
                    log.error("line " + lineCount, e);
                }

                try {
                    meteoLog.setMonthRainfall(Double.valueOf(columns[17]));
                } catch (Exception e) {
                    log.error("line " + lineCount, e);
                }

                try {
                    meteoLog.setTotalRainfall(Double.valueOf(columns[18]));
                } catch (Exception e) {
                    log.error("line " + lineCount, e);
                }

                try {
                    meteoLog.setWindLevel(Double.valueOf(columns[19]));
                } catch (Exception e) {
                    log.error("line " + lineCount, e);
                }

                try {
                    meteoLog.setGustLevel(Double.valueOf(columns[20]));
                } catch (Exception e) {
                    log.error("line " + lineCount, e);
                }

                ds.save(meteoLog, WriteConcern.NORMAL);
                lineCount++;

            }

            log.debug("Tot line insert:" + lineCount);
            log.debug("Tot line scarted:" + lineDouble);

            //move file to backup dir with rename
            final File toMove = new File(
                    importPathBackup + "/" + f.getName() + "_" + System.currentTimeMillis());

            log.debug("Move File to Backup dir" + toMove);
            f.renameTo(toMove);

        }
    } catch (IOException ex) {
        log.fatal(ex);

    }
}