Example usage for org.apache.hadoop.fs RawLocalFileSystem setConf

List of usage examples for org.apache.hadoop.fs RawLocalFileSystem setConf

Introduction

In this page you can find the example usage for org.apache.hadoop.fs RawLocalFileSystem setConf.

Prototype

@Override
    public void setConf(Configuration conf) 

Source Link

Usage

From source file:org.apache.accumulo.examples.wikisearch.ingest.WikipediaMapperTest.java

License:Apache License

@Before
public void setup() throws Exception {

    conf.set(AggregatingRecordReader.START_TOKEN, "<page>");
    conf.set(AggregatingRecordReader.END_TOKEN, "</page>");
    conf.set(WikipediaConfiguration.TABLE_NAME, TABLE_NAME);
    conf.set(WikipediaConfiguration.NUM_PARTITIONS, "1");
    conf.set(WikipediaConfiguration.NUM_GROUPS, "1");

    MockInstance i = new MockInstance();
    c = i.getConnector("root", "pass");
    c.tableOperations().delete(METADATA_TABLE_NAME);
    c.tableOperations().delete(TABLE_NAME);
    c.tableOperations().delete(INDEX_TABLE_NAME);
    c.tableOperations().delete(RINDEX_TABLE_NAME);
    c.tableOperations().create(METADATA_TABLE_NAME);
    c.tableOperations().create(TABLE_NAME);
    c.tableOperations().create(INDEX_TABLE_NAME);
    c.tableOperations().create(RINDEX_TABLE_NAME);

    writerMap.put(new Text(METADATA_TABLE_NAME), c.createBatchWriter(METADATA_TABLE_NAME, 1000L, 1000L, 1));
    writerMap.put(new Text(TABLE_NAME), c.createBatchWriter(TABLE_NAME, 1000L, 1000L, 1));
    writerMap.put(new Text(INDEX_TABLE_NAME), c.createBatchWriter(INDEX_TABLE_NAME, 1000L, 1000L, 1));
    writerMap.put(new Text(RINDEX_TABLE_NAME), c.createBatchWriter(RINDEX_TABLE_NAME, 1000L, 1000L, 1));

    TaskAttemptID id = new TaskAttemptID();
    TaskAttemptContext context = new TaskAttemptContext(conf, id);

    RawLocalFileSystem fs = new RawLocalFileSystem();
    fs.setConf(conf);

    URL url = ClassLoader.getSystemResource("enwiki-20110901-001.xml");
    Assert.assertNotNull(url);//from  w  w w. jav  a  2  s .  com
    File data = new File(url.toURI());
    Path tmpFile = new Path(data.getAbsolutePath());

    // Setup the Mapper
    InputSplit split = new FileSplit(tmpFile, 0, fs.pathToFile(tmpFile).length(), null);
    AggregatingRecordReader rr = new AggregatingRecordReader();
    Path ocPath = new Path(tmpFile, "oc");
    OutputCommitter oc = new FileOutputCommitter(ocPath, context);
    fs.deleteOnExit(ocPath);
    StandaloneStatusReporter sr = new StandaloneStatusReporter();
    rr.initialize(split, context);
    MockAccumuloRecordWriter rw = new MockAccumuloRecordWriter();
    WikipediaMapper mapper = new WikipediaMapper();

    // Load data into Mock Accumulo
    Mapper<LongWritable, Text, Text, Mutation>.Context con = mapper.new Context(conf, id, rr, rw, oc, sr,
            split);
    mapper.run(con);

    // Flush and close record writers.
    rw.close(context);

}

From source file:org.apache.accumulo.examples.wikisearch.logic.TestQueryLogic.java

License:Apache License

@Before
public void setup() throws Exception {

    Logger.getLogger(AbstractQueryLogic.class).setLevel(Level.DEBUG);
    Logger.getLogger(QueryLogic.class).setLevel(Level.DEBUG);
    Logger.getLogger(RangeCalculator.class).setLevel(Level.DEBUG);

    conf.set(AggregatingRecordReader.START_TOKEN, "<page>");
    conf.set(AggregatingRecordReader.END_TOKEN, "</page>");
    conf.set(WikipediaConfiguration.TABLE_NAME, TABLE_NAME);
    conf.set(WikipediaConfiguration.NUM_PARTITIONS, "1");
    conf.set(WikipediaConfiguration.NUM_GROUPS, "1");

    MockInstance i = new MockInstance();
    c = i.getConnector("root", new PasswordToken(""));
    WikipediaIngester.createTables(c.tableOperations(), TABLE_NAME, false);
    for (String table : TABLE_NAMES) {
        writerMap.put(new Text(table), c.createBatchWriter(table, 1000L, 1000L, 1));
    }//w  ww. j a  v  a 2s . c  o  m

    TaskAttemptID id = new TaskAttemptID("fake", 1, TaskType.MAP, 1, 1);
    TaskAttemptContext context = new TaskAttemptContextImpl(conf, id);

    RawLocalFileSystem fs = new RawLocalFileSystem();
    fs.setConf(conf);

    URL url = ClassLoader.getSystemResource("enwiki-20110901-001.xml");
    Assert.assertNotNull(url);
    File data = new File(url.toURI());
    Path tmpFile = new Path(data.getAbsolutePath());

    // Setup the Mapper
    WikipediaInputSplit split = new WikipediaInputSplit(
            new FileSplit(tmpFile, 0, fs.pathToFile(tmpFile).length(), null), 0);
    AggregatingRecordReader rr = new AggregatingRecordReader();
    Path ocPath = new Path(tmpFile, "oc");
    OutputCommitter oc = new FileOutputCommitter(ocPath, context);
    fs.deleteOnExit(ocPath);
    StandaloneStatusReporter sr = new StandaloneStatusReporter();
    rr.initialize(split, context);
    MockAccumuloRecordWriter rw = new MockAccumuloRecordWriter();
    WikipediaMapper mapper = new WikipediaMapper();

    // there are times I wonder, "Why do Java people think this is good?" then I drink more whiskey
    final MapContextImpl<LongWritable, Text, Text, Mutation> mapContext = new MapContextImpl<LongWritable, Text, Text, Mutation>(
            conf, id, rr, rw, oc, sr, split);
    // Load data into Mock Accumulo
    Mapper<LongWritable, Text, Text, Mutation>.Context con = mapper.new Context() {
        /**
         * Get the input split for this map.
         */
        public InputSplit getInputSplit() {
            return mapContext.getInputSplit();
        }

        @Override
        public LongWritable getCurrentKey() throws IOException, InterruptedException {
            return mapContext.getCurrentKey();
        }

        @Override
        public Text getCurrentValue() throws IOException, InterruptedException {
            return mapContext.getCurrentValue();
        }

        @Override
        public boolean nextKeyValue() throws IOException, InterruptedException {
            return mapContext.nextKeyValue();
        }

        @Override
        public Counter getCounter(Enum<?> counterName) {
            return mapContext.getCounter(counterName);
        }

        @Override
        public Counter getCounter(String groupName, String counterName) {
            return mapContext.getCounter(groupName, counterName);
        }

        @Override
        public OutputCommitter getOutputCommitter() {
            return mapContext.getOutputCommitter();
        }

        @Override
        public void write(Text key, Mutation value) throws IOException, InterruptedException {
            mapContext.write(key, value);
        }

        @Override
        public String getStatus() {
            return mapContext.getStatus();
        }

        @Override
        public TaskAttemptID getTaskAttemptID() {
            return mapContext.getTaskAttemptID();
        }

        @Override
        public void setStatus(String msg) {
            mapContext.setStatus(msg);
        }

        @Override
        public Path[] getArchiveClassPaths() {
            return mapContext.getArchiveClassPaths();
        }

        @Override
        public String[] getArchiveTimestamps() {
            return mapContext.getArchiveTimestamps();
        }

        @Override
        public URI[] getCacheArchives() throws IOException {
            return mapContext.getCacheArchives();
        }

        @Override
        public URI[] getCacheFiles() throws IOException {
            return mapContext.getCacheArchives();
        }

        @Override
        public Class<? extends Reducer<?, ?, ?, ?>> getCombinerClass() throws ClassNotFoundException {
            return mapContext.getCombinerClass();
        }

        @Override
        public Configuration getConfiguration() {
            return mapContext.getConfiguration();
        }

        @Override
        public Path[] getFileClassPaths() {
            return mapContext.getFileClassPaths();
        }

        @Override
        public String[] getFileTimestamps() {
            return mapContext.getFileTimestamps();
        }

        @Override
        public RawComparator<?> getGroupingComparator() {
            return mapContext.getGroupingComparator();
        }

        @Override
        public Class<? extends InputFormat<?, ?>> getInputFormatClass() throws ClassNotFoundException {
            return mapContext.getInputFormatClass();
        }

        @Override
        public String getJar() {
            return mapContext.getJar();
        }

        @Override
        public JobID getJobID() {
            return mapContext.getJobID();
        }

        @Override
        public String getJobName() {
            return mapContext.getJobName();
        }

        /*@Override
        public boolean userClassesTakesPrecedence() {
          return mapContext.userClassesTakesPrecedence();
        }*/

        @Override
        public boolean getJobSetupCleanupNeeded() {
            return mapContext.getJobSetupCleanupNeeded();
        }

        @Override
        public boolean getTaskCleanupNeeded() {
            return mapContext.getTaskCleanupNeeded();
        }

        @Override
        public Path[] getLocalCacheArchives() throws IOException {
            return mapContext.getLocalCacheArchives();
        }

        @Override
        public Path[] getLocalCacheFiles() throws IOException {
            return mapContext.getLocalCacheFiles();
        }

        @Override
        public Class<?> getMapOutputKeyClass() {
            return mapContext.getMapOutputKeyClass();
        }

        @Override
        public Class<?> getMapOutputValueClass() {
            return mapContext.getMapOutputValueClass();
        }

        @Override
        public Class<? extends Mapper<?, ?, ?, ?>> getMapperClass() throws ClassNotFoundException {
            return mapContext.getMapperClass();
        }

        @Override
        public int getMaxMapAttempts() {
            return mapContext.getMaxMapAttempts();
        }

        @Override
        public int getMaxReduceAttempts() {
            return mapContext.getMaxReduceAttempts();
        }

        @Override
        public int getNumReduceTasks() {
            return mapContext.getNumReduceTasks();
        }

        @Override
        public Class<? extends OutputFormat<?, ?>> getOutputFormatClass() throws ClassNotFoundException {
            return mapContext.getOutputFormatClass();
        }

        @Override
        public Class<?> getOutputKeyClass() {
            return mapContext.getOutputKeyClass();
        }

        @Override
        public Class<?> getOutputValueClass() {
            return mapContext.getOutputValueClass();
        }

        @Override
        public Class<? extends Partitioner<?, ?>> getPartitionerClass() throws ClassNotFoundException {
            return mapContext.getPartitionerClass();
        }

        @Override
        public Class<? extends Reducer<?, ?, ?, ?>> getReducerClass() throws ClassNotFoundException {
            return mapContext.getReducerClass();
        }

        @Override
        public RawComparator<?> getSortComparator() {
            return mapContext.getSortComparator();
        }

        @Override
        public boolean getSymlink() {
            return mapContext.getSymlink();
        }

        @Override
        public Path getWorkingDirectory() throws IOException {
            return mapContext.getWorkingDirectory();
        }

        @Override
        public void progress() {
            mapContext.progress();
        }

        @Override
        public boolean getProfileEnabled() {
            return mapContext.getProfileEnabled();
        }

        @Override
        public String getProfileParams() {
            return mapContext.getProfileParams();
        }

        @Override
        public IntegerRanges getProfileTaskRange(boolean isMap) {
            return mapContext.getProfileTaskRange(isMap);
        }

        @Override
        public String getUser() {
            return mapContext.getUser();
        }

        @Override
        public Credentials getCredentials() {
            return mapContext.getCredentials();
        }

        @Override
        public float getProgress() {
            return mapContext.getProgress();
        }
    };

    mapper.run(con);

    // Flush and close record writers.
    rw.close(context);

    table = new QueryLogic();
    table.setMetadataTableName(METADATA_TABLE_NAME);
    table.setTableName(TABLE_NAME);
    table.setIndexTableName(INDEX_TABLE_NAME);
    table.setReverseIndexTableName(RINDEX_TABLE_NAME);
    table.setUseReadAheadIterator(false);
    table.setUnevaluatedFields(Collections.singletonList("TEXT"));
}

From source file:org.apache.lucene.cassandra.HadoopFile.java

License:Apache License

public HadoopFile(String canonicalPath) {
    // canonicalPath = "index1/thisfile";
    // TODO Auto-generated constructor stub
    logger.trace("File(String canonicalPath: {})", canonicalPath);

    // set default file system to local file system
    conf.set("fs.file.impl", "org.apache.hadoop.fs.LocalFileSystem");

    // must set a conf here to the underlying FS, or it barks
    RawLocalFileSystem rawLFS = new RawLocalFileSystem();
    rawLFS.setConf(conf);
    thePrivateFile = new LocalFileSystem(rawLFS);
    thePrivateFile.setVerifyChecksum(false);

    path = new Path(thePrivateFile.getWorkingDirectory(), canonicalPath);

    long length;//from  www. j a va  2 s  .  c o m
    try {
        length = thePrivateFile.getFileStatus(path).getLen();
        logger.info("length() {}", length);
        System.out.println("output file: " + path);
        System.out.println("exist: " + thePrivateFile.exists(path));
        logger.trace("thePrivateFile.getFileStatus {}", thePrivateFile.getFileStatus(path));

        if (!thePrivateFile.exists(path)) {
            thePrivateFile.create(path, true);
        } else {
            // thePrivateFile.create(path, true);
        }
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }

    System.out.println("output file: " + path);
    System.out.println("output file: " + thePrivateFile.getWorkingDirectory());

}

From source file:org.apache.lucene.cassandra.HadoopFile.java

License:Apache License

public HadoopFile(File dir, String file) { // TODO create file in a directory.
    logger.trace("File(File dir  {}, String file {})", dir.getPath(), file);

    // set default file system to local file system
    conf.set("fs.file.impl", "org.apache.hadoop.fs.LocalFileSystem");

    // must set a conf here to the underlying FS, or it barks
    RawLocalFileSystem rawLFS = new RawLocalFileSystem();
    rawLFS.setConf(conf);
    thePrivateFile = new LocalFileSystem(rawLFS);
    thePrivateFile.setVerifyChecksum(false);
    path = new Path(dir.getPath() + "/" + file);
    try {//from   w  w w  .  ja v a  2 s  . co m
        if (!thePrivateFile.exists(path)) {
            thePrivateFile.create(path, true);
        } else {
            // thePrivateFile.create(path, true);
        }
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
    this.exists();

}

From source file:org.apache.lucene.cassandra.HadoopRandomAccessFile.java

License:Apache License

public HadoopRandomAccessFile(File dir, String mode) throws IOException {
    logger.trace("RandomAccessFile(File dir {}, String mode {})", dir.getPath(), mode);

    // set default file system to local file system
    conf.set("fs.file.impl", "org.apache.hadoop.fs.LocalFileSystem");

    // must set a conf here to the underlying FS, or it barks
    RawLocalFileSystem rawLFS = new RawLocalFileSystem();
    rawLFS.setConf(conf);
    thePrivateFile = new LocalFileSystem(rawLFS);
    thePrivateFile.setVerifyChecksum(false);

    path = new Path(thePrivateFile.getWorkingDirectory(), dir.getPath());
    in = thePrivateFile.open(path);/*from  w  ww .  j  a  v a2  s . c om*/
    //        out = thePrivateFile.create(path, true);
    isOpen = true;

}

From source file:org.icgc.dcc.submission.sftp.SftpServerServiceTest.java

License:Open Source License

private static RawLocalFileSystem fileSystem() {
    RawLocalFileSystem localFileSystem = new RawLocalFileSystem();
    localFileSystem.setConf(new Configuration());

    return localFileSystem;
}