Example usage for org.apache.hadoop.fs FileContext getLocalFSFileContext

List of usage examples for org.apache.hadoop.fs FileContext getLocalFSFileContext

Introduction

In this page you can find the example usage for org.apache.hadoop.fs FileContext getLocalFSFileContext.

Prototype

public static FileContext getLocalFSFileContext() throws UnsupportedFileSystemException 

Source Link

Usage

From source file:com.datatorrent.contrib.avro.AvroFileInputOperatorTest.java

License:Apache License

@Test
public void testSingleFileAvroReads() throws Exception {
    FileContext.getLocalFSFileContext().delete(new Path(new File(testMeta.dir).getAbsolutePath()), true);

    int cnt = 7;//from w  w  w . j  av a  2s  .com
    createAvroInput(cnt);
    writeAvroFile(new File(FILENAME));

    avroFileInput.output.setSink(output);
    avroFileInput.completedFilesPort.setSink(completedFilesPort);
    avroFileInput.errorRecordsPort.setSink(errorRecordsPort);
    avroFileInput.setDirectory(testMeta.dir);
    avroFileInput.setup(testMeta.context);

    avroFileInput.beginWindow(0);
    avroFileInput.emitTuples();
    avroFileInput.emitTuples();
    Assert.assertEquals("Record count", cnt, avroFileInput.recordCount);
    avroFileInput.endWindow();
    Assert.assertEquals("number tuples", cnt, output.collectedTuples.size());
    Assert.assertEquals("Error tuples", 0, errorRecordsPort.collectedTuples.size());
    Assert.assertEquals("Completed File", 1, completedFilesPort.collectedTuples.size());
    avroFileInput.teardown();

}

From source file:com.datatorrent.contrib.avro.AvroFileInputOperatorTest.java

License:Apache License

@Test
public void testMultipleFileAvroReads() throws Exception {
    FileContext.getLocalFSFileContext().delete(new Path(new File(testMeta.dir).getAbsolutePath()), true);

    int cnt = 7;//from ww w.  j  ava  2 s.  c  o m

    createAvroInput(cnt);

    writeAvroFile(new File(FILENAME));
    writeAvroFile(new File(OTHER_FILE));

    avroFileInput.output.setSink(output);
    avroFileInput.completedFilesPort.setSink(completedFilesPort);
    avroFileInput.errorRecordsPort.setSink(errorRecordsPort);
    avroFileInput.setDirectory(testMeta.dir);
    avroFileInput.setup(testMeta.context);

    avroFileInput.beginWindow(0);
    avroFileInput.emitTuples();
    avroFileInput.beginWindow(1);
    avroFileInput.emitTuples();

    Assert.assertEquals("number tuples after window 0", cnt, output.collectedTuples.size());

    avroFileInput.emitTuples();
    avroFileInput.endWindow();

    Assert.assertEquals("Error tuples", 0, errorRecordsPort.collectedTuples.size());
    Assert.assertEquals("number tuples after window 1", 2 * cnt, output.collectedTuples.size());
    Assert.assertEquals("Completed File", 2, completedFilesPort.collectedTuples.size());

    avroFileInput.teardown();

}

From source file:com.datatorrent.contrib.avro.AvroFileInputOperatorTest.java

License:Apache License

@Test
public void testInvalidFormatFailure() throws Exception {
    FileContext.getLocalFSFileContext().delete(new Path(new File(testMeta.dir).getAbsolutePath()), true);

    int cnt = 7;//w  ww.  j ava2  s  . c om
    writeErrorFile(cnt, new File(ERROR_FILE));

    avroFileInput.output.setSink(output);
    avroFileInput.setDirectory(testMeta.dir);
    avroFileInput.setup(testMeta.context);

    avroFileInput.beginWindow(0);
    avroFileInput.emitTuples();
    avroFileInput.emitTuples();
    avroFileInput.endWindow();

    Assert.assertEquals("number tuples after window 1", 0, output.collectedTuples.size());
    avroFileInput.teardown();
}

From source file:com.datatorrent.contrib.avro.AvroFileInputOperatorTest.java

License:Apache License

@Test
public void testApplication() throws IOException, Exception {
    try {/*www . j a  v a 2 s.  com*/
        FileContext.getLocalFSFileContext().delete(new Path(new File(testMeta.dir).getAbsolutePath()), true);
        int cnt = 7;
        createAvroInput(cnt);
        writeAvroFile(new File(FILENAME));
        createAvroInput(cnt - 2);
        writeAvroFile(new File(OTHER_FILE));
        avroFileInput.setDirectory(testMeta.dir);

        LocalMode lma = LocalMode.newInstance();
        Configuration conf = new Configuration(false);

        AvroReaderApplication avroReaderApplication = new AvroReaderApplication();
        avroReaderApplication.setAvroFileInputOperator(avroFileInput);
        lma.prepareDAG(avroReaderApplication, conf);

        LocalMode.Controller lc = lma.getController();
        lc.run(10000);// runs for 10 seconds and quits
    } catch (ConstraintViolationException e) {
        Assert.fail("constraint violations: " + e.getConstraintViolations());
    }
}

From source file:com.datatorrent.contrib.avro.AvroFileInputOperatorTest.java

License:Apache License

@Test
public void testApplicationWithPojoConversion() throws IOException, Exception {
    try {//from   w  w w . j  ava2s.  c  o  m
        FileContext.getLocalFSFileContext().delete(new Path(new File(testMeta.dir).getAbsolutePath()), true);
        int cnt = 7;
        createAvroInput(cnt);
        writeAvroFile(new File(FILENAME));
        createAvroInput(cnt - 2);
        writeAvroFile(new File(OTHER_FILE));

        avroFileInput.setDirectory(testMeta.dir);

        AvroToPojo avroToPojo = new AvroToPojo();
        avroToPojo.setPojoClass(SimpleOrder.class);

        LocalMode lma = LocalMode.newInstance();
        Configuration conf = new Configuration(false);

        AvroToPojoApplication avroToPojoApplication = new AvroToPojoApplication();
        avroToPojoApplication.setAvroFileInputOperator(avroFileInput);
        avroToPojoApplication.setAvroToPojo(avroToPojo);

        lma.prepareDAG(avroToPojoApplication, conf);
        LocalMode.Controller lc = lma.getController();
        lc.run(10000);// runs for 10 seconds and quits
    } catch (ConstraintViolationException e) {
        Assert.fail("constraint violations: " + e.getConstraintViolations());
    }
}

From source file:com.datatorrent.contrib.parquet.ParquetFilePOJOReaderTest.java

License:Apache License

@Test
public void testParquetReading() throws Exception {
    FileContext.getLocalFSFileContext().delete(new Path(new File(testMeta.dir).getAbsolutePath()), true);
    List<EventRecord> data = Lists.newArrayList();
    data.add(new EventRecord(1, "cust1", 12321L, true, 12.22f, 12.23));
    data.add(new EventRecord(2, "cust2", 12322L, true, 22.22f, 22.23));
    data.add(new EventRecord(3, "cust3", 12323L, true, 32.22f, 32.23));
    writeParquetFile(PARQUET_SCHEMA, new File(testMeta.dir, "data.parquet"), data);

    parquetFilePOJOReader.output.setSink(outputSink);
    parquetFilePOJOReader.setDirectory(testMeta.dir);
    parquetFilePOJOReader.setParquetSchema(PARQUET_SCHEMA);
    parquetFilePOJOReader.setup(testMeta.context);
    parquetFilePOJOReader.output.setup(testMeta.portContext);

    for (long wid = 0; wid < 2; wid++) {
        parquetFilePOJOReader.beginWindow(0);
        parquetFilePOJOReader.emitTuples();
        parquetFilePOJOReader.endWindow();
    }/*from   w ww.java  2  s . co  m*/

    Assert.assertEquals("number tuples", 3, outputSink.collectedTuples.size());
    parquetFilePOJOReader.teardown();

}

From source file:com.datatorrent.contrib.parquet.ParquetFilePOJOReaderTest.java

License:Apache License

@Test
public void testParquetReadingWithParquetToPojoMapping() throws Exception {
    FileContext.getLocalFSFileContext().delete(new Path(new File(testMeta.dir).getAbsolutePath()), true);
    List<EventRecord> data = Lists.newArrayList();
    data.add(new EventRecord(1, "cust1", 12321L, true, 12.22f, 12.23));
    data.add(new EventRecord(2, "cust2", 12322L, true, 22.22f, 22.23));
    data.add(new EventRecord(3, "cust3", 12323L, true, 32.22f, 32.23));
    writeParquetFile(PARQUET_SCHEMA, new File(testMeta.dir, "data.parquet"), data);

    parquetFilePOJOReader.output.setSink(outputSink);
    parquetFilePOJOReader.setDirectory(testMeta.dir);
    parquetFilePOJOReader.setParquetSchema(PARQUET_SCHEMA);
    parquetFilePOJOReader.setParquetToPOJOFieldsMapping(
            "event_id:event_id_v2:INTEGER,org_id:org_id_v2:STRING,long_id:long_id_v2:LONG,css_file_loaded:css_file_loaded_v2:BOOLEAN,float_val:float_val_v2:FLOAT,double_val:double_val_v2:DOUBLE");
    parquetFilePOJOReader.setup(testMeta.context);
    testMeta.portContext.getAttributes().put(Context.PortContext.TUPLE_CLASS, EventRecordV2.class);
    parquetFilePOJOReader.output.setup(testMeta.portContext);

    for (long wid = 0; wid < 2; wid++) {
        parquetFilePOJOReader.beginWindow(0);
        parquetFilePOJOReader.emitTuples();
        parquetFilePOJOReader.endWindow();
    }/*from w  w w. j  a  v a 2s  . c  o m*/

    Assert.assertEquals("number tuples", 3, outputSink.collectedTuples.size());
    parquetFilePOJOReader.teardown();

}

From source file:com.datatorrent.contrib.parquet.ParquetFilePOJOReaderTest.java

License:Apache License

@Test
public void testParquetEmptyFile() throws Exception {
    FileContext.getLocalFSFileContext().delete(new Path(new File(testMeta.dir).getAbsolutePath()), true);
    List<EventRecord> data = Lists.newArrayList();
    writeParquetFile(PARQUET_SCHEMA, new File(testMeta.dir, "data.parquet"), data);

    parquetFilePOJOReader.output.setSink(outputSink);
    parquetFilePOJOReader.setDirectory(testMeta.dir);
    parquetFilePOJOReader.setParquetSchema(PARQUET_SCHEMA);
    parquetFilePOJOReader.setup(testMeta.context);
    testMeta.portContext.getAttributes().put(Context.PortContext.TUPLE_CLASS, EventRecordV2.class);
    parquetFilePOJOReader.output.setup(testMeta.portContext);

    for (long wid = 0; wid < 2; wid++) {
        parquetFilePOJOReader.beginWindow(0);
        parquetFilePOJOReader.emitTuples();
        parquetFilePOJOReader.endWindow();
    }/* ww w.  j a  va2 s.com*/

    Assert.assertEquals("number tuples", 0, outputSink.collectedTuples.size());
    parquetFilePOJOReader.teardown();

}

From source file:com.datatorrent.contrib.parquet.ParquetFilePOJOReaderTest.java

License:Apache License

@Test
public void testParquetIncorrectFormat() throws Exception {
    FileContext.getLocalFSFileContext().delete(new Path(new File(testMeta.dir).getAbsolutePath()), true);
    List<String> allLines = Lists.newArrayList();
    HashSet<String> lines = Sets.newHashSet();
    for (int line = 0; line < 5; line++) {
        lines.add("f0" + "l" + line);
    }//w  w  w .j  a va2s. co  m
    allLines.addAll(lines);
    File testFile = new File(testMeta.dir, "file0");
    FileUtils.write(testFile, StringUtils.join(lines, '\n'));

    parquetFilePOJOReader.output.setSink(outputSink);
    parquetFilePOJOReader.setDirectory(testMeta.dir);
    parquetFilePOJOReader.setParquetSchema(PARQUET_SCHEMA);
    parquetFilePOJOReader.setParquetToPOJOFieldsMapping(
            "event_id:event_id_v2:INTEGER,org_id:org_id_v2:STRING,long_id:long_id_v2:LONG,css_file_loaded:css_file_loaded_v2:BOOLEAN,float_val:float_val_v2:FLOAT,double_val:double_val_v2:DOUBLE");
    parquetFilePOJOReader.setup(testMeta.context);
    testMeta.portContext.getAttributes().put(Context.PortContext.TUPLE_CLASS, EventRecordV2.class);
    parquetFilePOJOReader.output.setup(testMeta.portContext);

    for (long wid = 0; wid < 2; wid++) {
        parquetFilePOJOReader.beginWindow(0);
        parquetFilePOJOReader.emitTuples();
        parquetFilePOJOReader.endWindow();
    }

    Assert.assertEquals("number tuples", 0, outputSink.collectedTuples.size());
    parquetFilePOJOReader.teardown();

}

From source file:com.datatorrent.contrib.parquet.ParquetFilePOJOReaderTest.java

License:Apache License

@Test
public void testApplication() throws IOException, Exception {
    try {/*from w  w  w  .j  a  v a  2  s.c om*/
        FileContext.getLocalFSFileContext().delete(new Path(new File(testMeta.dir).getAbsolutePath()), true);
        List<EventRecord> data = Lists.newArrayList();
        data.add(new EventRecord(1, "cust1", 12321L, true, 12.22f, 12.23));
        data.add(new EventRecord(2, "cust2", 12322L, true, 22.22f, 22.23));
        data.add(new EventRecord(3, "cust3", 12323L, true, 32.22f, 32.23));
        writeParquetFile(PARQUET_SCHEMA, new File(testMeta.dir, "data.parquet"), data);
        parquetFilePOJOReader.setDirectory(testMeta.dir);
        parquetFilePOJOReader.setParquetSchema(PARQUET_SCHEMA);
        LocalMode lma = LocalMode.newInstance();
        Configuration conf = new Configuration(false);
        ParquetReaderApplication parquetReaderApplication = new ParquetReaderApplication();
        parquetReaderApplication.setParquetFilePOJOReader(parquetFilePOJOReader);
        lma.prepareDAG(parquetReaderApplication, conf);
        LocalMode.Controller lc = lma.getController();
        lc.run(10000);// runs for 10 seconds and quits
    } catch (ConstraintViolationException e) {
        Assert.fail("constraint violations: " + e.getConstraintViolations());
    }
}