Example usage for org.apache.hadoop.fs FileSystem mkdirs

List of usage examples for org.apache.hadoop.fs FileSystem mkdirs

Introduction

In this page you can find the example usage for org.apache.hadoop.fs FileSystem mkdirs.

Prototype

public boolean mkdirs(Path f) throws IOException 

Source Link

Document

Call #mkdirs(Path,FsPermission) with default permission.

Usage

From source file:fr.jetoile.hadoopunit.integrationtest.SparkSolrIntegrationTest.java

License:Apache License

@Before
public void before() throws IOException, URISyntaxException {
    FileSystem fileSystem = HdfsUtils.INSTANCE.getFileSystem();

    fileSystem.mkdirs(new Path("/khanh/test"));
    fileSystem.mkdirs(new Path("/khanh/test_parquet"));
    fileSystem.copyFromLocalFile(/*from ww  w  .j  a v  a  2 s. c  o m*/
            new Path(SparkSolrIntegrationTest.class.getClassLoader().getResource("test.csv").toURI()),
            new Path("/khanh/test/test.csv"));

    new HiveSetup(HiveConnectionUtils.INSTANCE.getDestination(), Operations.sequenceOf(CREATE_TABLES)).launch();
}

From source file:fr.jetoile.hadoopunit.sample.ParquetToSolrJobIntegrationTest.java

License:Apache License

@Before
public void before() throws IOException, URISyntaxException {
    FileSystem fileSystem = HdfsUtils.INSTANCE.getFileSystem();

    fileSystem.mkdirs(new Path("/khanh/test"));
    fileSystem.mkdirs(new Path("/khanh/test_parquet"));
    fileSystem.copyFromLocalFile(/*from   w w w . ja  v a 2  s  .co  m*/
            new Path(ParquetToSolrJobIntegrationTest.class.getClassLoader().getResource("test.csv").toURI()),
            new Path("/khanh/test/test.csv"));
}

From source file:fr.jetoile.hadoopunit.sample.ParquetToSolrJobTest.java

License:Apache License

@Before
public void before() throws IOException, URISyntaxException {
    FileSystem fileSystem = HdfsUtils.INSTANCE.getFileSystem();

    fileSystem.mkdirs(new Path("/khanh/test"));
    fileSystem.mkdirs(new Path("/khanh/test_parquet"));
    fileSystem.copyFromLocalFile(//www .j  ava2  s.  c om
            new Path(ParquetToSolrJobTest.class.getClassLoader().getResource("test.csv").toURI()),
            new Path("/khanh/test/test.csv"));

    new HiveSetup(HiveConnectionUtils.INSTANCE.getDestination(), Operations.sequenceOf(CREATE_TABLES)).launch();
}

From source file:fr.jetoile.hadoopunit.sample.SparkJobIntegrationTest.java

License:Apache License

@Before
public void before() throws IOException, URISyntaxException {
    FileSystem fileSystem = HdfsUtils.INSTANCE.getFileSystem();

    fileSystem.mkdirs(new Path("/khanh/test"));
    fileSystem.mkdirs(new Path("/khanh/test_parquet"));
    fileSystem.copyFromLocalFile(/*  w  w  w.  j av  a  2s  .c o m*/
            new Path(SparkJobIntegrationTest.class.getClassLoader().getResource("test.csv").toURI()),
            new Path("/khanh/test/test.csv"));

    new HiveSetup(HiveConnectionUtils.INSTANCE.getDestination(), Operations.sequenceOf(CREATE_TABLES)).launch();
}

From source file:fr.jetoile.hadoopunit.sample.SparkJobTest.java

License:Apache License

@Before
public void before() throws IOException, URISyntaxException {
    FileSystem fileSystem = HdfsUtils.INSTANCE.getFileSystem();

    fileSystem.mkdirs(new Path("/khanh/test"));
    fileSystem.mkdirs(new Path("/khanh/test_parquet"));
    fileSystem.copyFromLocalFile(new Path(SparkJobTest.class.getClassLoader().getResource("test.csv").toURI()),
            new Path("/khanh/test/test.csv"));

    new HiveSetup(HiveConnectionUtils.INSTANCE.getDestination(), Operations.sequenceOf(CREATE_TABLES)).launch();
}

From source file:fuse4j.hadoopfs.HdfsClientImpl.java

License:Apache License

@Override
public boolean mkdir(int uid, String path) {
    FileSystem dfs = null;
    try {//from  ww  w .j  a va 2 s .c  o  m
        dfs = getDfs(uid);
        return dfs.mkdirs(new Path(path));
    } catch (Exception ioe) {
        // fall through to failure
    }
    return false;
}

From source file:gaffer.accumulo.bulkimport.MoveIntoAccumulo.java

License:Apache License

public int run(String[] args) throws Exception {
    // Usage/*  www .  j av a 2  s  . c o  m*/
    if (args.length < 3) {
        System.err.println("Usage: " + MoveIntoAccumulo.class.getName()
                + " <inputpath> <failurepath> <accumulo_properties_file>");
        return 1;
    }

    // Gets paths
    Path inputPath = new Path(args[0]);
    Path failurePath = new Path(args[1]);
    String accumuloPropertiesFile = args[2];

    // Hadoop configuration
    Configuration conf = getConf();
    FileSystem fs = FileSystem.get(conf);

    // Connect to Accumulo
    AccumuloConfig accConf = new AccumuloConfig(accumuloPropertiesFile);
    Connector conn = Accumulo.connect(accConf);
    String tableName = accConf.getTable();

    // Check if the table exists
    if (!conn.tableOperations().exists(tableName)) {
        System.err.println("Table " + tableName + " does not exist - create the table before running this");
        return 1;
    }

    // Make the failure directory
    fs.mkdirs(failurePath);
    fs.setPermission(failurePath, new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL));

    // Remove the _SUCCESS file to prevent warning in accumulo
    fs.delete(new Path(inputPath + "/_SUCCESS"), false);

    // Set all permissions
    IngestUtils.setDirectoryPermsForAccumulo(fs, inputPath);

    // Import the files
    conn.tableOperations().importDirectory(tableName, inputPath.toString(), failurePath.toString(), false);

    // Delete the temporary directories
    fs.delete(failurePath, true);

    return 0;
}

From source file:gaffer.accumulostore.integration.AddElementsFromHdfsIT.java

License:Apache License

@Test
public void shouldAddElementsFromHdfsWhenOutputDirectoryAlreadyExists() throws Exception {
    final FileSystem fs = FileSystem.getLocal(createLocalConf());
    fs.mkdirs(new Path(outputDir));

    addElementsFromHdfs(ByteEntityKeyPackage.class);
    addElementsFromHdfs(ClassicKeyPackage.class);
}

From source file:gaffer.accumulostore.integration.AddElementsFromHdfsIT.java

License:Apache License

@Test
public void shouldAddElementsFromHdfsWhenFailureDirectoryAlreadyExists() throws Exception {
    final FileSystem fs = FileSystem.getLocal(createLocalConf());
    fs.mkdirs(new Path(failureDir));

    addElementsFromHdfs(ByteEntityKeyPackage.class);
    addElementsFromHdfs(ClassicKeyPackage.class);
}

From source file:gaffer.accumulostore.integration.AddElementsFromHdfsIT.java

License:Apache License

@Test
public void shouldThrowExceptionWhenAddElementsFromHdfsWhenOutputDirectoryContainsFiles() throws Exception {
    final FileSystem fs = FileSystem.getLocal(createLocalConf());
    fs.mkdirs(new Path(outputDir));
    try (final BufferedWriter writer = new BufferedWriter(
            new OutputStreamWriter(fs.create(new Path(outputDir + "/someFile.txt"), true)))) {
        writer.write("Some content");
    }//from www .  jav  a 2  s.  c o m

    try {
        addElementsFromHdfs(ByteEntityKeyPackage.class);
        fail("Exception expected");
    } catch (final OperationException e) {
        assertEquals("Output directory exists and is not empty: " + outputDir, e.getCause().getMessage());
    }

    try {
        addElementsFromHdfs(ClassicKeyPackage.class);
        fail("Exception expected");
    } catch (final OperationException e) {
        assertEquals("Output directory exists and is not empty: " + outputDir, e.getCause().getMessage());
    }
}