Example usage for org.apache.hadoop.fs FileSystem getLocal

List of usage examples for org.apache.hadoop.fs FileSystem getLocal

Introduction

In this page you can find the example usage for org.apache.hadoop.fs FileSystem getLocal.

Prototype

public static LocalFileSystem getLocal(Configuration conf) throws IOException 

Source Link

Document

Get the local FileSystem.

Usage

From source file:com.inmobi.databus.partition.TestClusterReaderEmptyStream.java

License:Apache License

@BeforeTest
public void setup() throws Exception {
    // setup cluster
    consumerNumber = 1;/* ww w  .ja  va  2  s  .co  m*/
    fs = FileSystem.getLocal(conf);
    fsUri = fs.getUri().toString();
    streamDir = new Path(new Path(TestUtil.getConfiguredRootDir(), this.getClass().getSimpleName()), testStream)
            .makeQualified(fs);
    HadoopUtil.setupHadoopCluster(conf, null, null, null, streamDir, false);
    inputFormatClass = SequenceFileInputFormat.class.getName();
    partitionMinList = new TreeSet<Integer>();
    for (int i = 0; i < 60; i++) {
        partitionMinList.add(i);
    }
    chkPoints = new TreeMap<Integer, PartitionCheckpoint>();
    partitionCheckpointList = new PartitionCheckpointList(chkPoints);
}

From source file:com.inmobi.databus.partition.TestLeastCheckpoint.java

License:Apache License

@BeforeTest
public void setup() throws Exception {
    fs = FileSystem.getLocal(new Configuration());
    rootDir = new Path(TestUtil.getConfiguredRootDir(), this.getClass().getSimpleName());
    chkPoints = new HashMap<Integer, PartitionCheckpoint>();
    createCheckpointList();/*w  ww . j  a v a2  s .  c o m*/
}

From source file:com.inmobi.databus.partition.TestPartitionReaderHadoopStream.java

License:Apache License

@BeforeTest
public void setup() throws Exception {
    consumerNumber = 1;//w  ww  .j av  a2  s. co  m
    // setup fs
    files = new String[] { HadoopUtil.files[1], HadoopUtil.files[3], HadoopUtil.files[5] };
    fs = FileSystem.getLocal(conf);
    streamDir = new Path(new Path(TestUtil.getConfiguredRootDir(), this.getClass().getSimpleName()), testStream)
            .makeQualified(fs);
    HadoopUtil.setupHadoopCluster(conf, files, null, databusFiles, streamDir, false);
    inputFormatClass = SequenceFileInputFormat.class.getName();
    partitionMinList = new TreeSet<Integer>();
    for (int i = 0; i < 60; i++) {
        partitionMinList.add(i);
    }
    Map<Integer, PartitionCheckpoint> chkpoints = new TreeMap<Integer, PartitionCheckpoint>();
    partitionCheckpointList = new PartitionCheckpointList(chkpoints);
}

From source file:com.inmobi.databus.partition.TestPartitionReaderWaitingHadoopStream.java

License:Apache License

@BeforeMethod
public void setup() throws Exception {
    consumerNumber = 1;/*from w  w  w. j  a  va  2  s  .c  o m*/
    conf = new Configuration();
    files = new String[] { HadoopUtil.files[1], HadoopUtil.files[3], HadoopUtil.files[5] };
    newFiles = new String[] { HadoopUtil.files[6], HadoopUtil.files[7], HadoopUtil.files[8] };
    // setup fs
    fs = FileSystem.getLocal(conf);
    streamDir = new Path(new Path(TestUtil.getConfiguredRootDir(), this.getClass().getSimpleName()), testStream)
            .makeQualified(fs);
    HadoopUtil.setupHadoopCluster(conf, files, null, databusFiles, streamDir, false);
    inputFormatClass = SequenceFileInputFormat.class.getName();
    partitionMinList = new HashSet<Integer>();
    for (int i = 0; i < 60; i++) {
        partitionMinList.add(i);
    }
    Map<Integer, PartitionCheckpoint> list = new HashMap<Integer, PartitionCheckpoint>();
    partitionCheckpointlist = new PartitionCheckpointList(list);
}

From source file:com.inmobi.databus.partition.TestPartitionReaderWithLeastFullCheckpoint.java

License:Apache License

@BeforeMethod
public void setup() throws Exception {
    consumerNumber = 1;//from   ww  w  .ja  v  a 2 s .  com
    files = new String[] { HadoopUtil.files[1], HadoopUtil.files[3], HadoopUtil.files[5] };
    databusFiles = new Path[6];
    conf = new Configuration();
    fs = FileSystem.getLocal(conf);
    streamDir = new Path(new Path(TestUtil.getConfiguredRootDir(), this.getClass().getSimpleName()), testStream)
            .makeQualified(fs);
    // initialize config
    HadoopUtil.setupHadoopCluster(conf, files, null, databusFiles, streamDir, true);
    inputFormatClass = SequenceFileInputFormat.class.getCanonicalName();
    partitionMinList = new HashSet<Integer>();
    for (int i = 0; i < 60; i++) {
        partitionMinList.add(i);
    }
    pchkPoints = new TreeMap<Integer, PartitionCheckpoint>();
    partitionCheckpointList = new PartitionCheckpointList(pchkPoints);
}

From source file:com.inmobi.databus.purge.DataPurgerServiceTest.java

License:Apache License

private void testPurgerService(String testfilename, int numofhourstoadd, boolean checkifexists,
        boolean checktrashexists) throws Exception {
    DatabusConfigParser configparser = new DatabusConfigParser(testfilename);
    DatabusConfig config = configparser.getConfig();

    for (Cluster cluster : config.getClusters().values()) {
        TestDataPurgerService service = new TestDataPurgerService(config, cluster);

        FileSystem fs = FileSystem.getLocal(new Configuration());
        fs.delete(new Path(cluster.getRootDir()), true);

        Calendar todaysdate = new GregorianCalendar(Calendar.getInstance().getTimeZone());
        todaysdate.add(Calendar.HOUR, numofhourstoadd);

        createTestPurgefiles(fs, cluster, todaysdate);

        service.runOnce();//  w ww. j ava  2 s  .  co  m

        verifyPurgefiles(fs, cluster, todaysdate, checkifexists, checktrashexists);
        fs.delete(new Path(cluster.getRootDir()), true);
        fs.close();
    }
}

From source file:com.inmobi.databus.readers.TestHadoopStreamReader.java

License:Apache License

@BeforeTest
public void setup() throws Exception {
    consumerNumber = 1;/*w  ww . j a va 2 s. com*/
    files = new String[] { HadoopUtil.files[1], HadoopUtil.files[3], HadoopUtil.files[5] };
    conf = new Configuration();
    fs = FileSystem.getLocal(conf);
    streamDir = new Path(new Path(TestUtil.getConfiguredRootDir(), this.getClass().getSimpleName()), testStream)
            .makeQualified(fs);
    // initialize config
    HadoopUtil.setupHadoopCluster(conf, files, null, finalFiles, streamDir, false);
    inputFormatClass = SequenceFileInputFormat.class.getCanonicalName();
    encoded = false;
    partitionMinList = new TreeSet<Integer>();
    for (int i = 0; i < 60; i++) {
        partitionMinList.add(i);
    }
    chkPoints = new TreeMap<Integer, PartitionCheckpoint>();
    partitionCheckpointList = new PartitionCheckpointList(chkPoints);
}

From source file:com.inmobi.messaging.consumer.databus.TestConsumerPartitionMinList.java

License:Apache License

@AfterTest
public void cleanUp() throws IOException {
    testConsumer.close();/* ww w . j  ava 2s. c  o  m*/
    FileSystem fs = FileSystem.getLocal(new Configuration());
    fs.delete(new Path(chkpointPath).getParent(), true);
}

From source file:com.inmobi.messaging.consumer.hadoop.TestAbstractHadoopConsumer.java

License:Apache License

public void cleanup() throws IOException {
    FileSystem lfs = FileSystem.getLocal(conf);
    for (Path rootDir : rootDirs) {
        LOG.debug("Cleaning up the dir: " + rootDir.getParent());
        lfs.delete(rootDir.getParent(), true);
    }/*from ww  w.  j  a  va  2  s .  c o m*/
    lfs.delete(new Path(chkpointPathPrefix).getParent(), true);
}

From source file:com.inmobi.messaging.consumer.hadoop.TestConsumerPartitionRetention.java

License:Apache License

@AfterTest
public void cleanup() throws Exception {
    FileSystem lfs = FileSystem.getLocal(conf);
    for (Path rootDir : rootDirs) {
        LOG.debug("Cleaning Up the dir: " + rootDir.getParent());
        lfs.delete(rootDir.getParent(), true);
    }//  w w w.j  a  va  2 s  .  c o  m
    lfs.delete(new Path(chkpointPath).getParent(), true);
}