Example usage for org.apache.hadoop.mapred JobConf getInt

List of usage examples for org.apache.hadoop.mapred JobConf getInt

Introduction

In this page you can find the example usage for org.apache.hadoop.mapred JobConf getInt.

Prototype

public int getInt(String name, int defaultValue) 

Source Link

Document

Get the value of the name property as an int.

Usage

From source file:DistribCountingReducer.java

License:Apache License

@Override
public void configure(JobConf conf) {
    minFreqPercent = conf.getInt("DISTRCOUNT.minFreqPercent", 20);
    datasetSize = conf.getInt("DISTRCOUNT.datasetSize", 1000);
}

From source file:BinomialSamplerMapper.java

License:Apache License

@Override
public void configure(JobConf conf) {
    reducersNum = conf.getInt("PARMM.reducersNum", 64);
    datasetSize = conf.getInt("PARMM.datasetSize", 1000);
}

From source file:PartitionMapper.java

License:Apache License

@Override
public void configure(JobConf conf) {
    reducersNum = conf.getInt("PARMM.reducersNum", 64);
}

From source file:RandIntPartSamplerMapper.java

License:Apache License

@Override
public void configure(JobConf conf) {
    id = conf.getInt("mapred.task.partition", -1);
    reducersNum = conf.getInt("PARMM.reducersNum", 1000);
    try {/*from w  w  w .j av a  2 s . c o m*/
        int id = conf.getInt("mapred.task.partition", -1);
        System.out.println("id: " + id);
        IntWritable[] toSampleArr = DefaultStringifier.loadArray(conf, "PARMM.toSampleArr_" + id,
                IntWritable.class);
        toSample = 0;
        for (IntWritable toSampleRed : toSampleArr) {
            toSample += toSampleRed.get();
        }
        System.out.println("toSample: " + toSample);
        sampleDestinations = new IntWritable[toSample];
        int i = 0;
        for (int k = 0; k < toSampleArr.length; k++) {
            for (int j = 0; j < toSampleArr[k].get(); j++) {
                sampleDestinations[i++] = new IntWritable(k);
            }
        }
        Collections.shuffle(Arrays.asList(sampleDestinations));
    } catch (IOException e) {
    }
}

From source file:FIMReducer.java

License:Apache License

@Override
public void configure(JobConf conf) {
    minFreqPercent = conf.getInt("PARMM.minFreqPercent", 20);
    sampleSize = conf.getInt("PARMM.sampleSize", 1000);
    epsilon = conf.getFloat("PARMM.epsilon", 0.05f);
    id = conf.getInt("mapred.task.partition", -1);
    set = false;/*from   www.j  a  v  a  2s  . c o  m*/
}

From source file:SleepJob.java

License:Apache License

public void configure(JobConf job) {
    this.mapSleepCount = job.getInt("sleep.job.map.sleep.count", mapSleepCount);
    this.reduceSleepCount = job.getInt("sleep.job.reduce.sleep.count", reduceSleepCount);
    this.mapSleepDuration = job.getLong("sleep.job.map.sleep.time", 100) / mapSleepCount;
    this.reduceSleepDuration = job.getLong("sleep.job.reduce.sleep.time", 100) / reduceSleepCount;
}

From source file:InputSamplerMapper.java

License:Apache License

@Override
public void configure(JobConf conf) {
    id = conf.getInt("mapred.task.partition", -1);
    try {//from w w w .  j a  v  a 2  s .co m
        map = new MapWritable();
        Path[] localFiles = DistributedCache.getLocalCacheFiles(conf);
        BufferedInputStream in = new BufferedInputStream(new FileInputStream(localFiles[0].toString()));
        map.readFields(new DataInputStream(in));
    } catch (IOException e) {
        System.err.println(e.getMessage());
    }
}

From source file:SleepJobWithArray.java

License:Apache License

public void configure(JobConf job) {
    this.mapSleepCount = job.getInt("sleep.job.map.sleep.count", mapSleepCount);
    this.initBigArray = job.getBoolean("initBigArray", false);
    this.bigArraySize = job.getInt("bigArraySize", bigArraySize);
    this.reduceSleepCount = job.getInt("sleep.job.reduce.sleep.count", reduceSleepCount);
    this.mapSleepDuration = job.getLong("sleep.job.map.sleep.time", 100) / mapSleepCount;
    this.reduceSleepDuration = job.getLong("sleep.job.reduce.sleep.time", 100) / reduceSleepCount;
}

From source file:alluxio.client.hadoop.AbstractIOMapper.java

License:Apache License

@Override
public void configure(JobConf conf) {
    setConf(conf);/*from   w w w .ja  va 2s  .c o  m*/
    try {
        mFS = FileSystem.get(conf);
    } catch (Exception e) {
        throw new RuntimeException("Cannot create file system.", e);
    }
    mBufferSize = conf.getInt("test.io.file.buffer.size", 4096);
    mBuffer = new byte[mBufferSize];
    try {
        mHostname = InetAddress.getLocalHost().getHostName();
    } catch (Exception e) {
        mHostname = "localhost";
    }
}

From source file:cascading.flow.Flow.java

License:Open Source License

public static int getMaxConcurrentSteps(JobConf jobConf) {
    return jobConf.getInt("cascading.flow.maxconcurrentsteps", 0);
}