Example usage for org.apache.commons.io FileUtils forceMkdir

List of usage examples for org.apache.commons.io FileUtils forceMkdir

Introduction

In this page you can find the example usage for org.apache.commons.io FileUtils forceMkdir.

Prototype

public static void forceMkdir(File directory) throws IOException 

Source Link

Document

Makes a directory, including any necessary but nonexistent parent directories.

Usage

From source file:info.donsun.cache.filecache.FileCache.java

@Override
public void put(Object key, Object value) throws CacheException {
    synchronized (LOCK) {
        // ?/*from w  w  w  .j a  va  2 s  . c  o  m*/
        remove(key);

        // id???
        String fileName = formatId(key);

        // ttl<0??????

        // 
        File cacheDir = new File(config.getDir());
        if (!cacheDir.exists() || !cacheDir.isDirectory()) {
            try {
                FileUtils.forceMkdir(cacheDir);
            } catch (IOException e) {
                throw new CacheException(String.format("Create cacheDir %s fail.", cacheDir.getPath()));
            }
        }

        // 
        File file = FileUtils.getFile(config.getDir(), fileName);
        if (!file.exists()) {
            try {
                file.createNewFile();
            } catch (IOException e) {
                throw new CacheException("Create new file " + file.getName() + " fail.", e);
            }
        }

        try {
            // ?
            FileOutputStream fileStream = new FileOutputStream(file);
            ObjectOutputStream out = new ObjectOutputStream(fileStream);

            try {
                CachedObject cachedObject = new CachedObject();
                if (config.getExpireTime() != null) {
                    cachedObject.setExpire(System.currentTimeMillis() + config.getExpireTime());
                }
                cachedObject.setKey(key);
                cachedObject.setData(value);
                out.writeObject(cachedObject);
            } finally {
                out.close();
                fileStream.close();

            }
        } catch (Exception e) {
            throw new CacheException("Save cache file fail.", e);
        }
    }
}

From source file:com.esri.geoevent.test.performance.report.AbstractFileRollOverReportWriter.java

protected void createParentDirectoriesIfNeeded(String fileName) throws IOException {
    final File file = new File(fileName);
    final File parent = file.getParentFile();
    FileUtils.forceMkdir(parent);
}

From source file:ch.algotrader.starter.GoogleDailyDownloader.java

private void retrieve(HttpClient httpclient, String symbol, String startDate, String endDate, String exchange)
        throws IOException, HttpException, FileNotFoundException, ParseException {

    GetMethod fileGet = new GetMethod("https://www.google.com/finance/historical?q=" + exchange + ":" + symbol
            + "&output=csv&startdate=" + startDate + (endDate == null ? "" : "&endDate=" + endDate));

    fileGet.getParams().setCookiePolicy(CookiePolicy.BROWSER_COMPATIBILITY);

    try {/*from   ww  w  .j a  v a  2  s. com*/
        int status = httpclient.executeMethod(fileGet);

        if (status == HttpStatus.SC_OK) {

            BufferedReader reader = new BufferedReader(
                    new InputStreamReader(fileGet.getResponseBodyAsStream()));

            File parent = new File("files" + File.separator + "google");
            if (!parent.exists()) {
                FileUtils.forceMkdir(parent);
            }

            Writer writer = new OutputStreamWriter(new FileOutputStream(new File(parent, symbol + ".csv")));

            try {

                reader.readLine();

                String line;
                List<String> lines = new ArrayList<String>();
                while ((line = reader.readLine()) != null) {

                    String tokens[] = line.split(",");

                    Date dateTime = fileFormat.parse(tokens[0]);

                    StringBuffer buffer = new StringBuffer();
                    buffer.append(
                            DateTimePatterns.LOCAL_DATE_TIME.format(DateTimeLegacy.toLocalDateTime(dateTime)));
                    buffer.append(",");
                    buffer.append(tokens[1].equals("-") ? "" : tokens[1]);
                    buffer.append(",");
                    buffer.append(tokens[2].equals("-") ? "" : tokens[2]);
                    buffer.append(",");
                    buffer.append(tokens[3].equals("-") ? "" : tokens[3]);
                    buffer.append(",");
                    buffer.append(tokens[4].equals("-") ? "" : tokens[4]);
                    buffer.append(",");
                    buffer.append(tokens[5]);
                    buffer.append("\n");

                    lines.add(buffer.toString());
                }

                writer.write("dateTime,open,high,low,close,vol\n");

                // write in reverse order
                for (int i = lines.size() - 1; i > 0; i--) {
                    writer.append(lines.get(i));
                }

            } finally {
                reader.close();
                writer.close();
            }
        }
    } finally {
        fileGet.releaseConnection();
    }
}

From source file:com.btoddb.chronicle.plunkers.FilePlunkerImpl.java

PrintWriter retrievePrintWriter(final String fn) {
    try {//from w ww .  ja v a 2 s  . c  o  m
        return printWriterCache.get(fn, new Callable<PrintWriter>() {
            @Override
            public PrintWriter call() throws IOException {
                File f = new File(fn);
                FileUtils.forceMkdir(f.getParentFile());
                return new PrintWriter(new FileWriter(f));
            }
        });
    } catch (ExecutionException e) {
        Utils.logAndThrow(logger, "exception while trying to retrieve PrintWriter from cache", e);
        return null;
    }
}

From source file:com.cdancy.artifactory.rest.util.ArtifactoryUtils.java

public static File getGradleHome() {
    String possibleGradleHome = System.getenv("GRADLE_HOME");
    if (possibleGradleHome == null) {
        possibleGradleHome = System.getProperty("user.home") + "/.gradle";
    }/*from  www. j a v a2s  .c o m*/
    File gradleHome = new File(possibleGradleHome);
    if (!gradleHome.exists()) {
        try {
            FileUtils.forceMkdir(gradleHome);
        } catch (Exception e) {
            Throwables.propagate(e);
        }
    }
    return gradleHome;
}

From source file:it.marcoberri.mbmeteo.action.UploadFile.java

/**
 * Handles the HTTP//from   w  w  w . j a  v a2 s.  co m
 * <code>POST</code> method.
 *
 * @param request servlet request
 * @param response servlet response
 * @throws ServletException if a servlet-specific error occurs
 * @throws IOException if an I/O error occurs
 */
@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response)
        throws ServletException, IOException {

    // checks if the request actually contains upload file
    if (!ServletFileUpload.isMultipartContent(request)) {
        return;
    }

    // configures some settings
    final DiskFileItemFactory factory = new DiskFileItemFactory();
    factory.setSizeThreshold(THRESHOLD_SIZE);
    factory.setRepository(new File(System.getProperty("java.io.tmpdir")));

    final ServletFileUpload upload = new ServletFileUpload(factory);
    upload.setFileSizeMax(MAX_FILE_SIZE);
    upload.setSizeMax(REQUEST_SIZE);

    // constructs the directory path to store upload file
    final String uploadPath = ConfigurationHelper.prop.getProperty("import.loggerEasyWeather.filepath");

    final File uploadDir = new File(uploadPath);

    if (!uploadDir.exists()) {
        FileUtils.forceMkdir(uploadDir);
    }

    try {
        // parses the request's content to extract file data
        final List formItems = upload.parseRequest(request);
        Iterator iter = formItems.iterator();

        // iterates over form's fields
        while (iter.hasNext()) {
            final FileItem item = (FileItem) iter.next();
            // processes only fields that are not form fields
            if (item.isFormField()) {
                continue;
            }

            final String fileName = new File(item.getName()).getName();
            final String filePath = uploadPath + File.separator + fileName;
            final File storeFile = new File(filePath);
            item.write(storeFile);
        }
        request.setAttribute("message", "Upload has been done successfully!");
    } catch (final Exception ex) {
        request.setAttribute("message", "There was an error: " + ex.getMessage());
    }

    final ExecuteImport i = new ExecuteImport();
    Thread t = new Thread(i);
    t.start();

}

From source file:com.github.jrh3k5.flume.mojo.plugin.AbstractFlumePluginMojoTest.java

/**
 * Set up the mojo for each test./*  w  ww  . ja v  a2s.co m*/
 * 
 * @throws Exception
 *             If any errors occur during the setup.
 */
@Before
public void setUp() throws Exception {
    mojo = new ConcreteMojo(getTestName());
    Whitebox.setInternalState(mojo, "artifactRepository", artifactRepository);
    Whitebox.setInternalState(mojo, "artifactResolver", artifactResolver);
    setAttach(mojo, true);
    Whitebox.setInternalState(mojo, "classifierSuffix", classifier);
    Whitebox.setInternalState(mojo, "dependencyGraphBuilder", dependencyGraphBuilder);

    outputDirectory = new File(getTestDirectory(), "target");
    FileUtils.forceMkdir(outputDirectory);
    Whitebox.setInternalState(mojo, "outputDirectory", outputDirectory);

    pluginsStagingDirectory = new File(outputDirectory, "flume-plugins");
    FileUtils.forceMkdir(pluginsStagingDirectory);
    Whitebox.setInternalState(mojo, "pluginsStagingDirectory", pluginsStagingDirectory);

    Whitebox.setInternalState(mojo, "project", project);
    Whitebox.setInternalState(mojo, "projectHelper", projectHelper);
    Whitebox.setInternalState(mojo, "remoteArtifactRepositories",
            Collections.singletonList(remoteArtifactRepository));
}

From source file:com.kylinolap.job.engine.GenericJobEngineTest.java

@BeforeClass
public static void beforeClass() throws Exception {

    FileUtils.forceMkdir(new File("/tmp/kylin/logs/"));

    FileUtils.deleteDirectory(new File(tempTestMetadataUrl));
    FileUtils.copyDirectory(new File("../examples/test_case_data"), new File(tempTestMetadataUrl));
    System.setProperty(KylinConfig.KYLIN_CONF, tempTestMetadataUrl);

    // deploy files to hdfs
    SSHClient hadoopCli = new SSHClient(getHadoopCliHostname(), getHadoopCliUsername(), getHadoopCliPassword(),
            null);/*  w ww  .j a v  a 2 s  .  c  o m*/
    scpFilesToHdfs(hadoopCli, new String[] { "src/test/resources/json/dummy_jobinstance.json" }, mrInputDir);
    // deploy sample java jar
    hadoopCli.scpFileToRemote("src/test/resources/jarfile/SampleJavaProgram.jarfile", "/tmp");
    hadoopCli.scpFileToRemote("src/test/resources/jarfile/SampleBadJavaProgram.jarfile", "/tmp");

    // create log dir
    hadoopCli.execCommand("mkdir -p /tmp/kylin/logs/");
    KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
    kylinConfig.setMetadataUrl(tempTestMetadataUrl);

    jobManager = new JobManager("GenericJobEngineTest", new JobEngineConfig(KylinConfig.getInstanceFromEnv()));

    jobDAO = JobDAO.getInstance(KylinConfig.getInstanceFromEnv());

    jobDAO.updateJobInstance(createARunningJobInstance("a_running_job"));

    jobManager.startJobEngine(2);
    Thread.sleep(2000);
}

From source file:com.stevpet.sonar.plugins.dotnet.mscover.vstest.clean.CleanTest.java

@Test
public void CorrectDirWithFiles_ExpectDeleted() throws IOException {
    VsTestRunner runner = givenANewRunner();
    File placeHolder = TestUtils.getResource(CLEAN_TEST_WITHFILES_PLACE_HOLDER_TXT);
    File testDir = new File(placeHolder.getParentFile(), ".sonar");
    File testResultsDir = new File(testDir, "TestResults");
    createFile(testDir, "test1");
    FileUtils.forceMkdir(testResultsDir);
    createFile(testResultsDir, "test1");
    createFile(testResultsDir, "test2");
    expectFilesInDir(testDir, 3);/*from   w w  w . j  a  v a2  s .  c o m*/
    expectFilesInDir(testResultsDir, 2);

    runner.setSonarPath(testDir.getAbsolutePath());
    runner.clean();
    assertTrue(testDir.exists());
    expectFilesInDir(testDir, 1);
    assertFalse(testResultsDir.exists());
}

From source file:com.ikanow.aleph2.data_model.utils.TestPropertiesUtils.java

@Test
public void test_mergeProperties() throws IOException {

    final String temp_dir = System.getProperty("java.io.tmpdir") + File.separator;
    final String conf_dir_str = temp_dir + "test_merge_conf_dir";

    final File dir = new File(conf_dir_str);
    final File default_conf = new File(temp_dir + "default.properties");
    default_conf.delete();
    assertFalse(default_conf.exists());
    FileUtils.forceMkdir(dir);
    final Collection<File> conf_files = FileUtils.listFiles(dir,
            Arrays.asList("conf", "properties", "json").toArray(new String[0]), false);
    conf_files.forEach(f -> f.delete());
    final Collection<File> conf_files_deleted = FileUtils.listFiles(dir,
            Arrays.asList("conf", "properties", "json").toArray(new String[0]), false);
    assertTrue(conf_files_deleted.isEmpty());

    // Fallback file:
    FileUtils.write(default_conf, "a=test_1\nb=test2");

    final File merge1 = new File(conf_dir_str + File.separator + "a_test.properties");
    final File merge2 = new File(conf_dir_str + File.separator + "b_test.properties");
    final File merge3 = new File(conf_dir_str + File.separator + "c_test.properties");

    FileUtils.write(merge1, "b=test_2.1\nc=test3");
    FileUtils.write(merge2, "c=test_3.1\nd=test4");
    FileUtils.write(merge3, "d=test_4.1\ne=test5");

    final Config conf = PropertiesUtils.getMergedConfig(Optional.of(dir), default_conf);

    assertEquals(Arrays.asList("a", "b", "c", "d", "e"),
            conf.root().keySet().stream().sorted().collect(Collectors.toList()));
    assertEquals("test_1", conf.getString("a"));
    assertEquals("test_2.1", conf.getString("b"));
    assertEquals("test_3.1", conf.getString("c"));
    assertEquals("test_4.1", conf.getString("d"));
    assertEquals("test5", conf.getString("e"));

    final Config conf2 = PropertiesUtils.getMergedConfig(Optional.empty(), default_conf);
    assertEquals(Arrays.asList("a", "b"), conf2.root().keySet().stream().sorted().collect(Collectors.toList()));

}