Example usage for org.apache.hadoop.conf Configuration getDouble

List of usage examples for org.apache.hadoop.conf Configuration getDouble

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration getDouble.

Prototype

public double getDouble(String name, double defaultValue) 

Source Link

Document

Get the value of the name property as a double.

Usage

From source file:clustering.inverted_index.InvertedIndexReducer.java

License:Apache License

@Override
protected void setup(Context context) throws IOException, InterruptedException {
    Configuration conf = context.getConfiguration();
    int deci_num = conf.getInt("deci.number", 4);
    StringBuilder stringBuilder = new StringBuilder();
    stringBuilder.append("0.");
    for (int i = 0; i < deci_num; i++) {
        stringBuilder.append('0');
    }//from  ww w .j  a va 2s .co m
    this.decimalFormat = new DecimalFormat(stringBuilder.toString());
    this.pruning = conf.getBoolean("pruning", false);
    this.pruningThreshold = conf.getDouble("pruning.threshold", 0.001d);
}

From source file:clustering.mst.FinalReducer.java

License:Apache License

@Override
protected void setup(Context context) throws IOException, InterruptedException {
    Configuration conf = context.getConfiguration();
    this.threshold = conf.getDouble("final.threshold", 0.5d);
    if (context.getCacheFiles() != null && context.getCacheFiles().length > 0) {

        FileReader fileReader = new FileReader("./docCnt");
        BufferedReader bufferedReader = new BufferedReader(fileReader);
        String line = bufferedReader.readLine();
        this.unionFind = new UnionFind(Integer.parseInt(line) + 1);

        bufferedReader.close();/*w  w  w . jav  a2  s . co  m*/
        fileReader.close();
    }
}

From source file:clustering.tf_idf.TermFreqReducer.java

License:Apache License

@Override
protected void setup(Context context) throws IOException, InterruptedException {
    Configuration conf = context.getConfiguration();
    try {/*  www.  j a  v  a  2 s  .c o  m*/
        this.weight = conf.getDouble("gname.weight", 1.0d);
    } catch (NumberFormatException e) {
        e.printStackTrace();
    }
}

From source file:com.splicemachine.orc.input.OrcMapreduceRecordReader.java

License:Open Source License

@Override
public void initialize(InputSplit inputSplit, TaskAttemptContext taskAttemptContext)
        throws IOException, InterruptedException {
    OrcNewSplit orcNewSplit = (OrcNewSplit) inputSplit;
    Configuration configuration = taskAttemptContext.getConfiguration();
    double maxMergeDistance = configuration.getDouble(MAX_MERGE_DISTANCE, MAX_MERGE_DISTANCE_DEFAULT);
    double maxReadSize = configuration.getDouble(MAX_READ_SIZE, MAX_READ_SIZE_DEFAULT);
    double streamBufferSize = configuration.getDouble(STREAM_BUFFER_SIZE, STREAM_BUFFER_SIZE_DEFAULT);
    Path path = orcNewSplit.getPath();
    FileSystem fileSystem = FileSystem.get(path.toUri(), configuration);
    long size = fileSystem.getFileStatus(path).getLen();
    FSDataInputStream inputStream = fileSystem.open(path);
    rowStruct = getRowStruct(configuration);
    predicate = getSplicePredicate(configuration);
    List<Integer> partitions = getPartitionIds(configuration);
    List<Integer> columnIds = getColumnIds(configuration);

    List<String> values = null;
    try {//from www.j a v  a  2s  . c  om
        values = Warehouse.getPartValuesFromPartName(((OrcNewSplit) inputSplit).getPath().toString());
    } catch (MetaException me) {
        throw new IOException(me);
    }
    OrcDataSource orcDataSource = new HdfsOrcDataSource(path.toString(), size,
            new DataSize(maxMergeDistance, DataSize.Unit.MEGABYTE),
            new DataSize(maxReadSize, DataSize.Unit.MEGABYTE),
            new DataSize(streamBufferSize, DataSize.Unit.MEGABYTE), inputStream);
    OrcReader orcReader = new OrcReader(orcDataSource, new OrcMetadataReader(),
            new DataSize(maxMergeDistance, DataSize.Unit.MEGABYTE),
            new DataSize(maxReadSize, DataSize.Unit.MEGABYTE));
    orcRecordReader = orcReader.createRecordReader(getColumnsAndTypes(columnIds, rowStruct), predicate,
            HIVE_STORAGE_TIME_ZONE, new AggregatedMemoryContext(), partitions, values);
}

From source file:edu.iu.ccd.CCDMPCollectiveMapper.java

License:Apache License

/**
 * Mapper configuration.//from ww w  .j  av  a  2 s  . co m
 */
@Override
protected void setup(Context context) {
    LOG.info(
            "start setup: " + new SimpleDateFormat("yyyyMMdd_HHmmss").format(Calendar.getInstance().getTime()));
    long startTime = System.currentTimeMillis();
    Configuration configuration = context.getConfiguration();
    r = configuration.getInt(Constants.R, 100);
    lambda = configuration.getDouble(Constants.LAMBDA, 0.001);
    numIterations = configuration.getInt(Constants.NUM_ITERATIONS, 100);
    numThreads = configuration.getInt(Constants.NUM_THREADS, 16);
    modelDirPath = configuration.get(Constants.MODEL_DIR, "");
    numModelSlices = configuration.getInt(Constants.NUM_MODEL_SLICES, 2);
    testFilePath = configuration.get(Constants.TEST_FILE_PATH, "");
    rmseIteInterval = 1;
    printRMSE = false;
    testRMSE = 0.0;
    computeTime = 0L;
    prepareResTime = 0L;
    totalNumV = 0L;
    waitTime = 0L;
    long endTime = System.currentTimeMillis();
    LOG.info("config (ms): " + (endTime - startTime));
    LOG.info("R " + r);
    LOG.info("Lambda " + lambda);
    LOG.info("No. Iterations " + numIterations);
    LOG.info("No. Threads " + numThreads);
    LOG.info("Model Dir Path " + modelDirPath);
    LOG.info("No. Model Slices " + numModelSlices);
    LOG.info("TEST FILE PATH " + testFilePath);
}

From source file:edu.iu.lda.LDAMPCollectiveMapper.java

License:Apache License

/**
 * Mapper configuration.//from  w  w w. jav a2 s.c o m
 */
@Override
protected void setup(Context context) {
    LOG.info(
            "start setup: " + new SimpleDateFormat("yyyyMMdd_HHmmss").format(Calendar.getInstance().getTime()));
    long startTime = System.currentTimeMillis();
    Configuration configuration = context.getConfiguration();
    numTopics = configuration.getInt(Constants.NUM_TOPICS, 100);
    alpha = configuration.getDouble(Constants.ALPHA, 0.1);
    beta = configuration.getDouble(Constants.BETA, 0.001);
    numIterations = configuration.getInt(Constants.NUM_ITERATIONS, 100);
    numThreads = configuration.getInt(Constants.NUM_THREADS, 16);
    scheduleRatio = configuration.getDouble(Constants.SCHEDULE_RATIO, 2.0);
    minBound = configuration.getInt(Constants.MIN_BOUND, Constants.TRAIN_MIN_THRESHOLD);
    maxBound = configuration.getInt(Constants.MAX_BOUND, Constants.TRAIN_MAX_THRESHOLD);
    if (minBound <= 0 || minBound > 100) {
        minBound = Constants.TRAIN_MIN_THRESHOLD;
    }
    if (maxBound <= 0 || maxBound > 100) {
        maxBound = Constants.TRAIN_MAX_THRESHOLD;
    }
    if (maxBound < minBound) {
        maxBound = minBound;
    }
    if (maxBound == 100) {
        minBound = 100;
        enableTuning = false;
    } else {
        enableTuning = true;
    }
    time = enableTuning ? 1000L : 1000000000L;
    hasOverTrained = false;
    lastUnderTrainIte = 0;
    breakPeriod = 0;
    modelDirPath = configuration.get(Constants.MODEL_DIR, "");
    printModel = configuration.getBoolean(Constants.PRINT_MODEL, false);
    printInterval = 10;
    freeInterval = 10;
    numModelSlices = 2;
    computeTime = 0L;
    waitTime = 0L;
    long endTime = System.currentTimeMillis();
    LOG.info("config (ms): " + (endTime - startTime));
    LOG.info("Num Topics " + numTopics);
    LOG.info("Alpha " + alpha);
    LOG.info("Beta " + beta);
    LOG.info("Num Iterations " + numIterations);
    LOG.info("numThreads\\scheduleRaito " + numThreads + "\\" + scheduleRatio);
    LOG.info("enableTuning\\Time\\Bounds " + enableTuning + "\\" + time + "\\" + minBound + "\\" + maxBound);
    LOG.info("Model Dir Path " + modelDirPath);
    LOG.info("Print Model " + printModel);
    LOG.info("Model Slices " + numModelSlices);
    LOG.info("Container Memory " + configuration.get("mapreduce.map.collective.memory.mb"));
    LOG.info("Java Memory " + configuration.get("mapreduce.map.collective.java.opts"));
}

From source file:edu.iu.sgd.SGDCollectiveMapper.java

License:Apache License

/**
 * Mapper configuration./*from   w ww  .j  a va  2 s  .  c om*/
 */
@Override
protected void setup(Context context) {
    LOG.info(
            "start setup: " + new SimpleDateFormat("yyyyMMdd_HHmmss").format(Calendar.getInstance().getTime()));
    long startTime = System.currentTimeMillis();
    Configuration configuration = context.getConfiguration();
    r = configuration.getInt(Constants.R, 100);
    lambda = configuration.getDouble(Constants.LAMBDA, 0.001);
    epsilon = configuration.getDouble(Constants.EPSILON, 0.001);
    numIterations = configuration.getInt(Constants.NUM_ITERATIONS, 100);
    trainRatio = configuration.getInt(Constants.TRAIN_RATIO, Constants.TARGET_BOUND);
    if (trainRatio <= 0 || trainRatio > 100) {
        trainRatio = Constants.TARGET_BOUND;
    }
    if (trainRatio == 100) {
        enableTuning = false;
    } else {
        enableTuning = true;
    }
    time = enableTuning ? 1000L : 1000000000L;
    numThreads = configuration.getInt(Constants.NUM_THREADS, 16);
    scheduleRatio = configuration.getDouble(Constants.SCHEDULE_RATIO, 2.0);
    modelDirPath = configuration.get(Constants.MODEL_DIR, "");
    testFilePath = configuration.get(Constants.TEST_FILE_PATH, "");
    numModelSlices = 2;
    rmseIteInterval = 5;
    freeInterval = 20;
    rmse = 0.0;
    testRMSE = 0.0;
    computeTime = 0L;
    waitTime = 0L;

    totalNumV = 0L;
    totalNumCols = 0L;
    oneOverSqrtR = 1.0 / Math.sqrt(r);
    random = new Random(System.currentTimeMillis());
    long endTime = System.currentTimeMillis();
    LOG.info("config (ms): " + (endTime - startTime));
    LOG.info("R " + r);
    LOG.info("Lambda " + lambda);
    LOG.info("Epsilon " + epsilon);
    LOG.info("Num Iterations " + numIterations);
    LOG.info("Num Threads " + numThreads + " " + scheduleRatio);
    LOG.info("enableTuning\\Time\\Bound " + enableTuning + "\\" + time + "\\" + trainRatio);
    LOG.info("Model Slices " + numModelSlices);
    LOG.info("Model Dir Path " + modelDirPath);
    LOG.info("TEST FILE PATH " + testFilePath);
    LOG.info("Container Memory " + configuration.get("mapreduce.map.collective.memory.mb"));
    LOG.info("Java Memory " + configuration.get("mapreduce.map.collective.java.opts"));
}

From source file:edu.usc.pgroup.louvain.hadoop.MapCommunity.java

License:Apache License

@Override
protected void setup(Context context) throws IOException, InterruptedException {
    Configuration configuration = context.getConfiguration();

    verbose = configuration.getBoolean(LouvainMR.VERBOSE, false);
    nb_pass = configuration.getInt(LouvainMR.NB_PASS, 0);
    precision = configuration.getDouble(LouvainMR.PRECISION, 0.000001);
    display_level = configuration.getInt(LouvainMR.DISPLAY_LEVEL, -1);
    outpath = configuration.get(LouvainMR.OUT_PATH);

    System.out.println("verbose = " + verbose);
    System.out.println("display_level = " + display_level);
    System.out.println("outpath = " + outpath);

    super.setup(context);

}

From source file:edu.usc.pgroup.louvain.hadoop.ReduceCommunity.java

License:Apache License

@Override
protected void setup(Context context) throws IOException, InterruptedException {
    Configuration configuration = context.getConfiguration();
    verbose = configuration.getBoolean(LouvainMR.VERBOSE, false);
    precision = configuration.getDouble(LouvainMR.PRECISION, 0.000001);
    display_level = configuration.getInt(LouvainMR.DISPLAY_LEVEL, -1);
    this.outpath = configuration.get(LouvainMR.OUT_PATH);
    System.out.println("verbose = " + verbose);
    System.out.println("display_level = " + display_level);
    System.out.println("outpath = " + outpath);

    super.setup(context);
}

From source file:org.apache.tez.common.TestTezUtils.java

License:Apache License

private void checkConf(Configuration conf) {
    Assert.assertEquals(conf.get("test1"), "value1");
    Assert.assertTrue(conf.getBoolean("test2", false));
    Assert.assertEquals(conf.getDouble("test3", 0), 1.2345, 1e-15);
    Assert.assertEquals(conf.getInt("test4", 0), 34567);
    Assert.assertEquals(conf.getLong("test5", 0), 1234567890L);
    String tmp[] = conf.getStrings("test6");
    Assert.assertEquals(tmp.length, 3);//  w ww  .  java  2 s. c  o m
    Assert.assertEquals(tmp[0], "S1");
    Assert.assertEquals(tmp[1], "S2");
    Assert.assertEquals(tmp[2], "S3");

}