Example usage for org.apache.hadoop.fs FileSystem DEFAULT_FS

List of usage examples for org.apache.hadoop.fs FileSystem DEFAULT_FS

Introduction

In this page you can find the example usage for org.apache.hadoop.fs FileSystem DEFAULT_FS.

Prototype

String DEFAULT_FS

To view the source code for org.apache.hadoop.fs FileSystem DEFAULT_FS.

Click Source Link

Usage

From source file:com.datascience.hadoop.CsvHelper.java

License:Apache License

public Configuration buildConfiguration(String delimiter, String skipHeader, String recordSeparator,
        String[] columns) {/*  w w  w  .  j a  v  a2 s .  c o  m*/
    Configuration conf = new Configuration();
    conf.set("fs.default.name", "file:///");
    conf.set(CsvInputFormat.CSV_READER_DELIMITER, delimiter);
    conf.set(CsvInputFormat.CSV_READER_SKIP_HEADER, skipHeader);
    conf.set(CsvInputFormat.CSV_READER_RECORD_SEPARATOR, recordSeparator);
    conf.set(FileSystem.FS_DEFAULT_NAME_KEY, FileSystem.DEFAULT_FS);
    conf.setStrings(CsvInputFormat.CSV_READER_COLUMNS, columns);
    conf.set("io.compression.codecs",
            "org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec,org.apache.hadoop.io.compress.DeflateCodec,org.apache.hadoop.io.compress.SnappyCodec,org.apache.hadoop.io.compress.Lz4Codec");
    return conf;
}

From source file:org.apache.crunch.kafka.offset.hdfs.HDFSOffsetReaderTest.java

License:Apache License

@Before
public void setup() throws IOException {
    Configuration config = new Configuration();
    config.set(FileSystem.DEFAULT_FS, tempFolder.newFolder().getAbsolutePath());

    fileSystem = FileSystem.newInstance(config);
    basePath = new Path(tempFolder.newFolder().toString(), testName.getMethodName());

    writer = new HDFSOffsetWriter(config, basePath);

    reader = new HDFSOffsetReader(config, basePath);
}

From source file:org.apache.crunch.kafka.offset.hdfs.HDFSOffsetWriterTest.java

License:Apache License

@Before
public void setup() throws IOException {
    config = new Configuration();
    config.set(FileSystem.DEFAULT_FS, tempFolder.newFolder().getAbsolutePath());

    fileSystem = FileSystem.newInstance(config);
    basePath = new Path(tempFolder.newFolder().toString(), testName.getMethodName());

    writer = new HDFSOffsetWriter(config, basePath);
}

From source file:org.apache.drill.exec.dotdrill.TestDotDrillUtil.java

License:Apache License

@BeforeClass
public static void setup() throws Exception {
    Configuration conf = new Configuration();
    conf.set(FileSystem.FS_DEFAULT_NAME_KEY, FileSystem.DEFAULT_FS);
    dfs = new DrillFileSystem(conf);
    tempDir = dirTestWatcher.getTmpDir();
    tempPath = new Path(tempDir.getAbsolutePath());
}

From source file:org.apache.drill.exec.ExecTest.java

License:Apache License

/**
 * Creates instance of local file system.
 *
 * @return local file system/*www . j  a  v a 2  s .c  o  m*/
 */
public static FileSystem getLocalFileSystem() throws IOException {
    Configuration configuration = new Configuration();
    configuration.set(FileSystem.FS_DEFAULT_NAME_KEY, FileSystem.DEFAULT_FS);
    return FileSystem.get(configuration);
}

From source file:org.apache.drill.exec.physical.unit.TestMiniPlan.java

License:Apache License

@BeforeClass
public static void initFS() throws Exception {
    Configuration conf = new Configuration();
    conf.set(FileSystem.FS_DEFAULT_NAME_KEY, FileSystem.DEFAULT_FS);
    fs = new DrillFileSystem(conf);
}

From source file:org.apache.drill.exec.udf.dynamic.TestDynamicUDFSupport.java

License:Apache License

@Before
public void setupNewDrillbit() throws Exception {
    udfDir = dirTestWatcher.makeSubDir(Paths.get("udf"));
    Properties overrideProps = new Properties();
    overrideProps.setProperty(ExecConstants.UDF_DIRECTORY_ROOT, udfDir.getAbsolutePath());
    overrideProps.setProperty(ExecConstants.UDF_DIRECTORY_FS, FileSystem.DEFAULT_FS);
    updateTestCluster(1, DrillConfig.create(overrideProps));

    fsUri = getLocalFileSystem().getUri();
}

From source file:org.elasticsearch.hadoop.yarn.util.YarnUtils.java

License:Apache License

public static Map<String, String> setupEnv(Configuration cfg) {
    Map<String, String> env = new LinkedHashMap<String, String>(); // System.getenv()
    // add Hadoop Classpath
    for (String c : cfg.getStrings(YarnConfiguration.YARN_APPLICATION_CLASSPATH,
            YarnCompat.DEFAULT_PLATFORM_APPLICATION_CLASSPATH())) {
        addToEnv(env, Environment.CLASSPATH.name(), c.trim());
    }/*from   w ww. ja  va  2 s . com*/
    // add es-hadoop jar / current folder jars
    addToEnv(env, Environment.CLASSPATH.name(), "./*");

    //
    // some es-yarn constants
    //
    addToEnv(env, EsYarnConstants.FS_URI, cfg.get(FileSystem.FS_DEFAULT_NAME_KEY, FileSystem.DEFAULT_FS));

    return env;
}