Example usage for org.apache.hadoop.fs RawLocalFileSystem RawLocalFileSystem

List of usage examples for org.apache.hadoop.fs RawLocalFileSystem RawLocalFileSystem

Introduction

In this page you can find the example usage for org.apache.hadoop.fs RawLocalFileSystem RawLocalFileSystem.

Prototype

public RawLocalFileSystem() 

Source Link

Usage

From source file:com.dasasian.chok.testutil.GenerateMapFiles.java

License:Apache License

/**
 * This generates the very simple MapFiles in chok/src/test/testMapFile[AB]/.
 * These files are supposed to simulate taking 2 large MapFiles and splitting the first one
 * into 4 shards, the second into 2 shards. We do not provide such a tool yet.
 * The results are checked in, so you should not need to run this. Is is provided
 * as a reference./*from   w w  w  .j  a  v a 2  s . c  o  m*/
 * @param args the arguments
 * @throws java.lang.Exception when and error occurs
 */
public static void main(String[] args) throws Exception {
    Configuration conf = new Configuration();
    conf.set("io.file.buffer.size", "4096");
    FileSystem fs = new RawLocalFileSystem();
    fs.setConf(conf);
    //
    File f = new File("src/test/testMapFileA/a1");
    MapFile.Writer w = new MapFile.Writer(conf, fs, f.getAbsolutePath(), Text.class, Text.class);
    write(w, "a.txt", "This is a test");
    write(w, "b.xml", "<name>test</name>");
    write(w, "c.log", "1/1/2009: test");
    w.close();
    //
    f = new File("src/test/testMapFileA/a2");
    w = new MapFile.Writer(conf, fs, f.getAbsolutePath(), Text.class, Text.class);
    write(w, "d.html", "<b>test</b>");
    write(w, "e.txt", "An e test");
    write(w, "f.log", "1/2/2009: test2");
    w.close();
    //
    f = new File("src/test/testMapFileA/a3");
    w = new MapFile.Writer(conf, fs, f.getAbsolutePath(), Text.class, Text.class);
    write(w, "g.log", "1/3/2009: more test");
    write(w, "h.txt", "Test in part 3");
    w.close();
    //
    f = new File("src/test/testMapFileA/a4");
    w = new MapFile.Writer(conf, fs, f.getAbsolutePath(), Text.class, Text.class);
    write(w, "i.xml", "<i>test</i>");
    write(w, "j.log", "1/4/2009: 4 test");
    write(w, "k.out", "test data");
    write(w, "l.txt", "line 4");
    w.close();
    //
    //
    f = new File("src/test/testMapFileB/b1");
    w = new MapFile.Writer(conf, fs, f.getAbsolutePath(), Text.class, Text.class);
    write(w, "u.txt", "Test U text");
    write(w, "v.xml", "<victor>foo</victor>");
    write(w, "w.txt", "where is test");
    w.close();
    //
    f = new File("src/test/testMapFileB/b2");
    w = new MapFile.Writer(conf, fs, f.getAbsolutePath(), Text.class, Text.class);
    write(w, "x.txt", "xrays ionize");
    write(w, "y.xml", "<yankee>foo</yankee>");
    write(w, "z.xml", "<zed>foo</zed>");
    w.close();
}

From source file:com.datatorrent.stram.util.FSUtil.java

License:Apache License

/**
 * Download the file from dfs to local file.
 *
 * @param fs// w  w  w.  j a v  a 2  s .  com
 * @param destinationFile
 * @param dfsFile
 * @param conf
 * @return
 * @throws IOException
 */
public static File copyToLocalFileSystem(FileSystem fs, String destinationPath, String destinationFile,
        String dfsFile, Configuration conf) throws IOException {
    File destinationDir = new File(destinationPath);
    if (!destinationDir.exists() && !destinationDir.mkdirs()) {
        throw new RuntimeException("Unable to create local directory");
    }
    RawLocalFileSystem localFileSystem = new RawLocalFileSystem();
    try {
        // allow app user to access local dir
        FsPermission permissions = new FsPermission(FsAction.ALL, FsAction.NONE, FsAction.NONE);
        localFileSystem.setPermission(new Path(destinationDir.getAbsolutePath()), permissions);

        Path dfsFilePath = new Path(dfsFile);
        File localFile = new File(destinationDir, destinationFile);
        FileUtil.copy(fs, dfsFilePath, localFile, false, conf);
        // set permissions on actual file to be read-only for user
        permissions = new FsPermission(FsAction.READ, FsAction.NONE, FsAction.NONE);
        localFileSystem.setPermission(new Path(localFile.getAbsolutePath()), permissions);
        return localFile;
    } finally {
        localFileSystem.close();
    }
}

From source file:com.ebay.erl.mobius.core.fs.MobiusLocalFileSystem.java

License:Apache License

public MobiusLocalFileSystem() {
    super(new RawLocalFileSystem());
}

From source file:com.lightboxtechnologies.spectrum.FsEntryHBaseCommonTest.java

License:Apache License

@Test
public void testUnmarshallBufferStream() throws IOException {
    ByteArrayOutputStream colSpec = new ByteArrayOutputStream();
    colSpec.write(BUFFER_STREAM);/*from  www  .  j a  va2s. c om*/
    colSpec.write('s');
    byte[] c = colSpec.toByteArray(), input = new byte[] { 0x01, 0x02, 0x03, 0x04 }, expected = new byte[4];
    StreamProxy proxy = (StreamProxy) unmarshall(c, input);
    InputStream stream = proxy.open(new RawLocalFileSystem(), null, null);
    assertEquals(4, stream.read(expected));
    assertArrayEquals(expected, input);
    assertEquals(0, stream.available());
}

From source file:com.lightboxtechnologies.spectrum.FsEntryHBaseCommonTest.java

License:Apache License

@Test
public void testPopulate() throws IOException {
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    Map<byte[], byte[]> inMap = new HashMap<byte[], byte[]>();
    Map<String, Object> actualMap = new HashMap<String, Object>(), expectedMap = new HashMap<String, Object>();

    Map<String, StreamProxy> actualStreams = new HashMap<String, StreamProxy>(),
            expectedStreams = new HashMap<String, StreamProxy>();

    // what about JSON?

    Date ts = new Date();

    expectedMap.put("num", 17L);
    expectedMap.put("whatevs", "a string");
    expectedMap.put("exfiltrated", ts.clone());

    expectedStreams.put("Content", new FileProxy("some/bullshit/file.dat"));
    expectedStreams.put("Slack", new BufferProxy(new byte[] { 0x02, 0x03, 0x05, 0x08, 0x13, 0x21 }));

    inMap.put(createColSpec(17L, "num"), Bytes.toBytes(17L));
    inMap.put(createColSpec("a string", "whatevs"), Bytes.toBytes("a string"));
    inMap.put(createColSpec(ts, "exfiltrated"), Bytes.toBytes(ts.getTime()));
    inMap.put(createColSpec(new FileProxy(""), "Content"), Bytes.toBytes("some/bullshit/file.dat"));
    inMap.put(createColSpec(new BufferProxy(null), "Slack"), new byte[] { 0x02, 0x03, 0x05, 0x08, 0x13, 0x21 });

    populate(inMap, actualMap, actualStreams);
    assertEquals(expectedMap, actualMap);
    assertEquals(expectedStreams.size(), actualStreams.size());
    assertTrue(actualStreams.containsKey("Content"));
    assertTrue(actualStreams.containsKey("Slack"));
    assertEquals("some/bullshit/file.dat", ((FileProxy) actualStreams.get("Content")).getPath());
    InputStream str = actualStreams.get("Slack").open(new RawLocalFileSystem(), null, null);
    byte[] tempBuf = new byte[6];
    assertEquals(6, str.read(tempBuf));/*from w  ww .  ja  v  a  2s  . co m*/
    assertEquals(0, str.available());
    assertArrayEquals(new byte[] { 0x02, 0x03, 0x05, 0x08, 0x13, 0x21 }, tempBuf);
}

From source file:com.produban.openbus.persistence.HDFSUtils.java

License:Apache License

public static FileSystem getFS(String path, Configuration conf) {
    try {//from   ww  w . j  a va2 s. co  m
        FileSystem ret = new Path(path).getFileSystem(conf);

        if (ret instanceof LocalFileSystem) {
            LOG.info("Using local filesystem and disabling checksums");
            ret = new RawLocalFileSystem();

            try {
                ((RawLocalFileSystem) ret).initialize(new URI(URI_CONFIG), new Configuration());
            } catch (URISyntaxException e) {
                throw new RuntimeException(e);
            }
        } else {
            LOG.info("No local filesystem " + conf.getStrings("fs.defaultFS"));
        }

        return ret;
    } catch (IOException e) {
        throw new RuntimeException(e);
    }
}

From source file:com.splicemachine.fs.localfs.SpliceFileSystem.java

License:Apache License

public SpliceFileSystem() {
    this(new RawLocalFileSystem() {
        @Override/*from  w  w  w .  j  av  a 2 s .  c  om*/
        public URI getUri() {
            return NAME;
        }

        @Override
        public FileStatus getFileStatus(Path p) throws IOException {
            return new SpliceFileStatus(super.getFileStatus(p));
        }
    });
}

From source file:com.thinkbiganalytics.kylo.hadoop.FileSystemUtilTest.java

License:Apache License

/**
 * Verify adding file systems to Hadoop configuration.
 *///from www.j av a2s. c o  m
@Test
public void registerFileSystems() {
    final Configuration conf = new Configuration(false);
    FileSystemUtil.registerFileSystems(
            Arrays.asList(new LocalFileSystem(), new MockFileSystem(), new RawLocalFileSystem()), conf);
    Assert.assertEquals(LocalFileSystem.class.getName(), conf.get("fs.file.impl"));
    Assert.assertEquals(MockFileSystem.class.getName(), conf.get("fs.mock.impl"));
}

From source file:gobblin.yarn.GobblinYarnAppLauncher.java

License:Apache License

private LogCopier buildLogCopier(Config config, Path sinkLogDir, Path appWorkDir) throws IOException {
    FileSystem rawLocalFs = this.closer.register(new RawLocalFileSystem());
    rawLocalFs.initialize(URI.create(ConfigurationKeys.LOCAL_FS_URI), new Configuration());

    LogCopier.Builder builder = LogCopier.newBuilder().useSrcFileSystem(this.fs).useDestFileSystem(rawLocalFs)
            .readFrom(getHdfsLogDir(appWorkDir)).writeTo(sinkLogDir).acceptsLogFileExtensions(
                    ImmutableSet.of(ApplicationConstants.STDOUT, ApplicationConstants.STDERR));
    if (config.hasPath(GobblinYarnConfigurationKeys.LOG_COPIER_MAX_FILE_SIZE)) {
        builder.useMaxBytesPerLogFile(config.getBytes(GobblinYarnConfigurationKeys.LOG_COPIER_MAX_FILE_SIZE));
    }//from w  ww . j  a  v  a2 s .  co m
    if (config.hasPath(GobblinYarnConfigurationKeys.LOG_COPIER_SCHEDULER)) {
        builder.useScheduler(config.getString(GobblinYarnConfigurationKeys.LOG_COPIER_SCHEDULER));
    }
    return builder.build();
}

From source file:it.crs4.pydoop.mapreduce.pipes.TestPipeApplication.java

License:Apache License

/**
 * test PipesMapRunner    test the transfer data from reader
 *
 * @throws Exception//from   w w  w. j a v  a  2 s  .co m
 */
@Test
public void testRunner() throws Exception {
    // clean old password files
    File[] psw = cleanTokenPasswordFile();
    try {
        JobID jobId = new JobID("201408272347", 0);
        TaskID taskId = new TaskID(jobId, TaskType.MAP, 0);
        TaskAttemptID taskAttemptid = new TaskAttemptID(taskId, 0);

        Job job = new Job(new Configuration());
        job.setJobID(jobId);
        Configuration conf = job.getConfiguration();
        conf.set(Submitter.IS_JAVA_RR, "true");
        conf.set(MRJobConfig.TASK_ATTEMPT_ID, taskAttemptid.toString());
        job.setInputFormatClass(DummyInputFormat.class);
        FileSystem fs = new RawLocalFileSystem();
        fs.setConf(conf);

        DummyInputFormat input_format = new DummyInputFormat();
        List<InputSplit> isplits = input_format.getSplits(job);

        InputSplit isplit = isplits.get(0);

        TaskAttemptContextImpl tcontext = new TaskAttemptContextImpl(conf, taskAttemptid);

        RecordReader<FloatWritable, NullWritable> rReader = input_format.createRecordReader(isplit, tcontext);

        TestMapContext context = new TestMapContext(conf, taskAttemptid, rReader, null, null, null, isplit);
        // stub for client
        File fCommand = getFileCommand("it.crs4.pydoop.mapreduce.pipes.PipeApplicationRunnableStub");
        conf.set(MRJobConfig.CACHE_LOCALFILES, fCommand.getAbsolutePath());
        // token for authorization
        Token<AMRMTokenIdentifier> token = new Token<AMRMTokenIdentifier>("user".getBytes(),
                "password".getBytes(), new Text("kind"), new Text("service"));
        TokenCache.setJobToken(token, job.getCredentials());
        conf.setBoolean(MRJobConfig.SKIP_RECORDS, true);
        PipesMapper<FloatWritable, NullWritable, IntWritable, Text> mapper = new PipesMapper<FloatWritable, NullWritable, IntWritable, Text>(
                context);

        initStdOut(conf);
        mapper.run(context);
        String stdOut = readStdOut(conf);

        // test part of translated data. As common file for client and test -
        // clients stdOut
        // check version
        assertTrue(stdOut.contains("CURRENT_PROTOCOL_VERSION:0"));
        // check key and value classes
        assertTrue(stdOut.contains("Key class:org.apache.hadoop.io.FloatWritable"));
        assertTrue(stdOut.contains("Value class:org.apache.hadoop.io.NullWritable"));
        // test have sent all data from reader
        assertTrue(stdOut.contains("value:0.0"));
        assertTrue(stdOut.contains("value:9.0"));

    } finally {
        if (psw != null) {
            // remove password files
            for (File file : psw) {
                file.deleteOnExit();
            }
        }
    }
}