Example usage for org.apache.hadoop.fs FileSystem getLocal

List of usage examples for org.apache.hadoop.fs FileSystem getLocal

Introduction

In this page you can find the example usage for org.apache.hadoop.fs FileSystem getLocal.

Prototype

public static LocalFileSystem getLocal(Configuration conf) throws IOException 

Source Link

Document

Get the local FileSystem.

Usage

From source file:com.asakusafw.runtime.directio.hadoop.SequenceFileFormatTest.java

License:Apache License

/**
 * Test for input invalid file./*from   w  ww  .jav a 2 s .  c  o  m*/
 * @throws Exception if failed
 */
@Test(expected = IOException.class)
public void input_invalid() throws Exception {
    LocalFileSystem fs = FileSystem.getLocal(conf);
    Path path = new Path(folder.newFile("testing").toURI());
    try (FSDataOutputStream output = fs.create(path)) {
        output.writeUTF("Hello, world!");
    }
    try (ModelInput<StringOption> in = format.createInput(StringOption.class, fs, path, 0,
            fs.getFileStatus(path).getLen(), new Counter())) {
        // do nothing
    }
}

From source file:com.asakusafw.runtime.directio.hadoop.SequenceFileFormatTest.java

License:Apache License

/**
 * Test method for output.//from   w  w  w. j a v a  2 s. c om
 * @throws Exception if failed
 */
@SuppressWarnings("deprecation")
@Test
public void output() throws Exception {
    final int count = 10000;
    LocalFileSystem fs = FileSystem.getLocal(conf);
    Path path = new Path(folder.newFile("testing").toURI());
    try (ModelOutput<StringOption> out = format.createOutput(StringOption.class, fs, path, new Counter())) {
        StringOption value = new StringOption();
        for (int i = 0; i < count; i++) {
            value.modify("Hello, world at " + i);
            out.write(value);
        }
    }
    try (SequenceFile.Reader reader = new SequenceFile.Reader(fs, path, conf)) {
        LongWritable k = new LongWritable();
        Text v = new Text();
        for (int i = 0; i < count; i++) {
            String answer = "Hello, world at " + i;
            assertThat(answer, reader.next(k, v), is(true));
            assertThat(answer, k.get(), is(1L));
            assertThat(answer, v.toString(), is(answer));
        }
        assertThat("eof", reader.next(k), is(false));
    }
}

From source file:com.asakusafw.runtime.directio.hadoop.SequenceFileFormatTest.java

License:Apache License

/**
 * compressed output.//from ww  w  . ja  v a2  s  .  c o  m
 * @throws Exception if failed
 */
@Test
public void output_compressed() throws Exception {
    LocalFileSystem fs = FileSystem.getLocal(conf);
    Path path = new Path(folder.newFile("testing").toURI());
    try (ModelOutput<StringOption> out = format.codec(new DefaultCodec()).createOutput(StringOption.class, fs,
            path, new Counter())) {
        out.write(new StringOption("Hello, world!"));
    }

    try (SequenceFile.Reader reader = new SequenceFile.Reader(fs, path, conf)) {
        assertThat(reader.getCompressionCodec(), instanceOf(DefaultCodec.class));
    }
}

From source file:com.asakusafw.runtime.directio.hadoop.SequenceFileFormatTest.java

License:Apache License

/**
 * compressed output./* w w  w.jav  a2  s.  c  o m*/
 * @throws Exception if failed
 */
@Test
public void output_no_compressed() throws Exception {
    LocalFileSystem fs = FileSystem.getLocal(conf);
    Path path = new Path(folder.newFile("testing.gz").toURI());
    try (ModelOutput<StringOption> out = format.codec(null).createOutput(StringOption.class, fs, path,
            new Counter())) {
        out.write(new StringOption("Hello, world!"));
    }
    try (SequenceFile.Reader reader = new SequenceFile.Reader(fs, path, conf)) {
        assertThat(reader.getCompressionCodec(), is(nullValue()));
    }
}

From source file:com.asakusafw.runtime.directio.hadoop.SequenceFileFormatTest.java

License:Apache License

/**
 * compressed output./*www . jav a2s  . c  o m*/
 * @throws Exception if failed
 */
@Test
public void output_compressed_conf() throws Exception {
    LocalFileSystem fs = FileSystem.getLocal(conf);
    Path path = new Path(folder.newFile("testing").toURI());
    format.getConf().set(SequenceFileFormat.KEY_COMPRESSION_CODEC, DefaultCodec.class.getName());
    try (ModelOutput<StringOption> out = format.createOutput(StringOption.class, fs, path, new Counter())) {
        out.write(new StringOption("Hello, world!"));
    }
    try (SequenceFile.Reader reader = new SequenceFile.Reader(fs, path, conf)) {
        assertThat(reader.getCompressionCodec(), instanceOf(DefaultCodec.class));
    }
}

From source file:com.asakusafw.runtime.directio.hadoop.SequenceFileFormatTest.java

License:Apache License

/**
 * invalid compressed output.//  w  w w  .  ja v a  2 s .c om
 * @throws Exception if failed
 */
@Test
public void output_compressed_invalid() throws Exception {
    LocalFileSystem fs = FileSystem.getLocal(conf);
    Path path = new Path(folder.newFile("testing").toURI());
    format.getConf().set(SequenceFileFormat.KEY_COMPRESSION_CODEC, "__INVALID__");
    try (ModelOutput<StringOption> out = format.createOutput(StringOption.class, fs, path, new Counter())) {
        out.write(new StringOption("Hello, world!"));
    }
    try (SequenceFile.Reader reader = new SequenceFile.Reader(fs, path, conf)) {
        assertThat(reader.getCompressionCodec(), is(nullValue()));
    }
}

From source file:com.asakusafw.runtime.io.sequencefile.SequenceFileUtilTest.java

License:Apache License

/**
 * Initializes the test.//from   ww  w. j  a va  2  s  .  c  om
 * @throws Exception if some errors were occurred
 */
@Before
public void setUp() throws Exception {
    conf = new Configuration();
    fs = FileSystem.getLocal(conf);
    workingDirectory = fs.getWorkingDirectory();
    fs.setWorkingDirectory(new Path(folder.getRoot().getAbsoluteFile().toURI()));
}

From source file:com.asakusafw.runtime.stage.launcher.LauncherOptionsParser.java

License:Apache License

private List<Path> consumeLibraryPaths(LinkedList<String> rest) throws IOException {
    List<String> names = consumeLibraryNames(rest);
    if (names.isEmpty()) {
        return Collections.emptyList();
    }//from   w  ww . j  a  v a 2s  .c  o m
    List<Path> results = new ArrayList<>();
    LocalFileSystem local = FileSystem.getLocal(configuration);
    for (String name : names) {
        Path path = new Path(name);
        FileSystem fs;
        if (path.toUri().getScheme() == null) {
            fs = local;
        } else {
            fs = path.getFileSystem(configuration);
        }
        path = fs.makeQualified(path);
        if (fs.exists(path) == false) {
            throw new FileNotFoundException(path.toString());
        }
        results.add(path);
    }
    return results;
}

From source file:com.asakusafw.runtime.stage.resource.StageResourceDriver.java

License:Apache License

/**
 * Creates a new instance./*w  w w.j  a  v a2 s  . co  m*/
 * @param configuration the current configuration
 * @throws IOException if failed to initialize this driver
 * @throws IllegalArgumentException if the parameter is {@code null}
 */
public StageResourceDriver(Configuration configuration) throws IOException {
    if (configuration == null) {
        throw new IllegalArgumentException("configuration must not be null"); //$NON-NLS-1$
    }
    this.configuration = configuration;
    this.localFileSystem = FileSystem.getLocal(configuration);
    this.accessMode = AccessMode.decode(configuration.get(KEY_ACCESS_MODE));
}

From source file:com.asakusafw.windgate.hadoopfs.ssh.AbstractSshHadoopFsMirrorTest.java

License:Apache License

private void put(FileList.Writer writer, String path, String... contents) throws IOException {
    Configuration conf = new Configuration();
    File temp = folder.newFile(path);
    FileSystem fs = FileSystem.getLocal(conf);
    try (ModelOutput<Text> output = TemporaryStorage.openOutput(conf, Text.class, new Path(temp.toURI()))) {
        for (String content : contents) {
            output.write(new Text(content));
        }/*from   w  w w .  j  a v  a2 s.  c  o  m*/
    }
    FileStatus status = fs.getFileStatus(new Path(temp.toURI()));
    try (FSDataInputStream src = fs.open(status.getPath());
            OutputStream dst = writer.openNext(status.getPath())) {
        byte[] buf = new byte[256];
        while (true) {
            int read = src.read(buf);
            if (read < 0) {
                break;
            }
            dst.write(buf, 0, read);
        }
    }
}