Example usage for org.apache.hadoop.io IntWritable get

List of usage examples for org.apache.hadoop.io IntWritable get

Introduction

In this page you can find the example usage for org.apache.hadoop.io IntWritable get.

Prototype

public int get() 

Source Link

Document

Return the value of this IntWritable.

Usage

From source file:org.apache.impala.hive.executor.TestUdf.java

License:Apache License

public double evaluate(IntWritable a, double b) {
    if (a == null)
        return -1;
    return ((double) a.get()) + b;
}

From source file:org.apache.impala.hive.executor.TestUdf.java

License:Apache License

public int evaluate(IntWritable a, int b, int c, IntWritable d) {
    if (a == null || d == null)
        return -1;
    return a.get() + b + c + d.get();
}

From source file:org.apache.jena.grande.giraph.FoafShortestPathsVertex.java

License:Apache License

@Override
public void compute(Iterable<IntWritable> msgIterator) throws IOException {
    log.debug("compute(...)::{}#{} ...", getId(), getSuperstep());
    if ((getSuperstep() == 0) || (getSuperstep() == 1)) {
        setValue(new IntWritable(Integer.MAX_VALUE));
    }//from w w w .jav  a  2s .  co m
    int minDist = isSource() ? 0 : Integer.MAX_VALUE;
    log.debug("compute(...)::{}#{}: min = {}, value = {}",
            new Object[] { getId(), getSuperstep(), minDist, getValue() });
    for (IntWritable msg : msgIterator) {
        log.debug("compute(...)::{}#{}: <--[{}]-- from ?", new Object[] { getId(), getSuperstep(), msg });
        minDist = Math.min(minDist, msg.get());
        log.debug("compute(...)::{}#{}: min = {}", new Object[] { getId(), getSuperstep(), minDist });
    }
    if (minDist < getValue().get()) {
        setValue(new IntWritable(minDist));
        log.debug("compute(...)::{}#{}: value = {}", new Object[] { getId(), getSuperstep(), getValue() });
        for (Edge<NodeWritable, NodeWritable> edge : getEdges()) {
            log.debug("compute(...)::{}#{}: {} --[{}]--> {}",
                    new Object[] { getId(), getSuperstep(), getId(), minDist + 1, edge.getTargetVertexId() });
            sendMessage(edge.getTargetVertexId(), new IntWritable(minDist + 1));
        }
    }
    voteToHalt();
}

From source file:org.apache.jena.grande.giraph.sssps.SingleSourceShortestPaths.java

License:Apache License

@Override
public void compute(Iterable<IntWritable> msgIterator) throws IOException {
    log.debug("compute(...)::{}#{} ...", getId(), getSuperstep());
    if ((getSuperstep() == 0) || (getSuperstep() == 1)) {
        setValue(new IntWritable(Integer.MAX_VALUE));
    }/*from w  w w  .  ja v  a  2s  .co m*/
    int minDist = isSource() ? 0 : Integer.MAX_VALUE;
    log.debug("compute(...)::{}#{}: min = {}, value = {}",
            new Object[] { getId(), getSuperstep(), minDist, getValue() });
    for (IntWritable msg : msgIterator) {
        log.debug("compute(...)::{}#{}: <--[{}]-- from ?", new Object[] { getId(), getSuperstep(), msg });
        minDist = Math.min(minDist, msg.get());
        log.debug("compute(...)::{}#{}: min = {}", new Object[] { getId(), getSuperstep(), minDist });
    }
    if (minDist < getValue().get()) {
        setValue(new IntWritable(minDist));
        log.debug("compute(...)::{}#{}: value = {}", new Object[] { getId(), getSuperstep(), getValue() });
        for (Edge<IntWritable, NullWritable> edge : getEdges()) {
            log.debug("compute(...)::{}#{}: {} --[{}]--> {}",
                    new Object[] { getId(), getSuperstep(), getId(), minDist + 1, edge.getTargetVertexId() });
            sendMessage(edge.getTargetVertexId(), new IntWritable(minDist + 1));
        }
    }
    voteToHalt();
}

From source file:org.apache.jena.tdbloader4.StatsReducer.java

License:Apache License

@Override
public void reduce(Text key, Iterable<IntWritable> values, Context context)
        throws IOException, InterruptedException {
    Iterator<IntWritable> iter = values.iterator();
    int sum = 0;/*from  w  w  w  .j ava2  s.  co m*/
    while (iter.hasNext()) {
        IntWritable v = iter.next();
        log.debug("< ({}, {}", key, v);
        sum += v.get();
    }
    value.set(sum);
    context.write(key, value);
    log.debug("> ({}, {})", key, value);
}

From source file:org.apache.kylin.engine.mr.steps.MergeDictionaryMapper.java

License:Apache License

@Override
protected void doMap(IntWritable key, NullWritable value, Context context)
        throws IOException, InterruptedException {

    int index = key.get();

    if (index < tblColRefs.length) {
        // merge dictionary
        TblColRef col = tblColRefs[index];
        List<DictionaryInfo> dictInfos = Lists.newArrayList();
        for (CubeSegment segment : mergingSegments) {
            if (segment.getDictResPath(col) != null) {
                DictionaryInfo dictInfo = dictMgr.getDictionaryInfo(segment.getDictResPath(col));
                if (dictInfo != null && !dictInfos.contains(dictInfo)) {
                    dictInfos.add(dictInfo);
                }//w ww. j av  a  2s  .  c  o m
            }
        }

        DictionaryInfo mergedDictInfo = dictMgr.mergeDictionary(dictInfos);
        String tblCol = col.getTableAlias() + ":" + col.getName();
        String dictInfoPath = mergedDictInfo == null ? "" : mergedDictInfo.getResourcePath();

        context.write(new IntWritable(-1), new Text(tblCol + "=" + dictInfoPath));

    } else {
        // merge statistics
        KylinConfig kylinConfig = AbstractHadoopJob.loadKylinConfigFromHdfs(
                new SerializableConfiguration(context.getConfiguration()),
                context.getConfiguration().get(BatchConstants.ARG_META_URL));

        final String cubeName = context.getConfiguration().get(BatchConstants.ARG_CUBE_NAME);
        final String segmentId = context.getConfiguration().get(BatchConstants.ARG_SEGMENT_ID);
        final String statOutputPath = context.getConfiguration()
                .get(MergeDictionaryJob.OPTION_OUTPUT_PATH_STAT.getOpt());
        CubeInstance cubeInstance = CubeManager.getInstance(kylinConfig).getCube(cubeName);

        logger.info("Statistics output path: {}", statOutputPath);

        CubeSegment newSegment = cubeInstance.getSegmentById(segmentId);
        ResourceStore rs = ResourceStore.getStore(kylinConfig);

        Map<Long, HLLCounter> cuboidHLLMap = Maps.newHashMap();
        Configuration conf = null;
        int averageSamplingPercentage = 0;

        for (CubeSegment cubeSegment : mergingSegments) {
            String filePath = cubeSegment.getStatisticsResourcePath();
            InputStream is = rs.getResource(filePath).inputStream;
            File tempFile;
            FileOutputStream tempFileStream = null;

            try {
                tempFile = File.createTempFile(segmentId, ".seq");
                tempFileStream = new FileOutputStream(tempFile);
                org.apache.commons.io.IOUtils.copy(is, tempFileStream);
            } finally {
                IOUtils.closeStream(is);
                IOUtils.closeStream(tempFileStream);
            }

            FileSystem fs = HadoopUtil.getFileSystem("file:///" + tempFile.getAbsolutePath());
            SequenceFile.Reader reader = null;
            try {
                conf = HadoopUtil.getCurrentConfiguration();
                //noinspection deprecation
                reader = new SequenceFile.Reader(fs, new Path(tempFile.getAbsolutePath()), conf);
                LongWritable keyW = (LongWritable) ReflectionUtils.newInstance(reader.getKeyClass(), conf);
                BytesWritable valueW = (BytesWritable) ReflectionUtils.newInstance(reader.getValueClass(),
                        conf);

                while (reader.next(keyW, valueW)) {
                    if (keyW.get() == 0L) {
                        // sampling percentage;
                        averageSamplingPercentage += Bytes.toInt(valueW.getBytes());
                    } else if (keyW.get() > 0) {
                        HLLCounter hll = new HLLCounter(kylinConfig.getCubeStatsHLLPrecision());
                        ByteArray byteArray = new ByteArray(valueW.getBytes());
                        hll.readRegisters(byteArray.asBuffer());

                        if (cuboidHLLMap.get(keyW.get()) != null) {
                            cuboidHLLMap.get(keyW.get()).merge(hll);
                        } else {
                            cuboidHLLMap.put(keyW.get(), hll);
                        }
                    }
                }
            } catch (Exception e) {
                e.printStackTrace();
                throw e;
            } finally {
                IOUtils.closeStream(reader);
            }
        }

        averageSamplingPercentage = averageSamplingPercentage / mergingSegments.size();
        CubeStatsWriter.writeCuboidStatistics(conf, new Path(statOutputPath), cuboidHLLMap,
                averageSamplingPercentage);
        Path statisticsFilePath = new Path(statOutputPath,
                BatchConstants.CFG_STATISTICS_CUBOID_ESTIMATION_FILENAME);

        FileSystem fs = HadoopUtil.getFileSystem(statisticsFilePath, conf);
        FSDataInputStream fis = fs.open(statisticsFilePath);

        try {
            // put the statistics to metadata store
            String statisticsFileName = newSegment.getStatisticsResourcePath();
            rs.putResource(statisticsFileName, fis, System.currentTimeMillis());
        } finally {
            IOUtils.closeStream(fis);
        }

        context.write(new IntWritable(-1), new Text(""));
    }
}

From source file:org.apache.kylin.job.hadoop.cardinality.ColumnCardinalityReducer.java

License:Apache License

@Override
public void reduce(IntWritable key, Iterable<BytesWritable> values, Context context)
        throws IOException, InterruptedException {
    int skey = key.get();
    for (BytesWritable v : values) {
        ByteBuffer buffer = ByteBuffer.wrap(v.getBytes());
        HyperLogLogPlusCounter hll = new HyperLogLogPlusCounter();
        hll.readRegisters(buffer);//from www.ja  va  2s.co  m
        getHllc(skey).merge(hll);
        hll.clear();
    }
}

From source file:org.apache.kylin.source.hive.cardinality.ColumnCardinalityReducer.java

License:Apache License

@Override
public void doReduce(IntWritable key, Iterable<BytesWritable> values, Context context)
        throws IOException, InterruptedException {
    int skey = key.get();
    for (BytesWritable v : values) {
        ByteBuffer buffer = ByteBuffer.wrap(v.getBytes());
        HLLCounter hll = new HLLCounter();
        hll.readRegisters(buffer);//  w ww.ja v a2 s  .  c  o  m
        getHllc(skey).merge(hll);
        hll.clear();
    }
}

From source file:org.apache.mahout.cf.taste.hadoop.als.ALS.java

License:Apache License

public static OpenIntObjectHashMap<Vector> readMatrixByRowsFromDistributedCache(int numEntities,
        Configuration conf) throws IOException {

    IntWritable rowIndex = new IntWritable();
    VectorWritable row = new VectorWritable();

    OpenIntObjectHashMap<Vector> featureMatrix = numEntities > 0 ? new OpenIntObjectHashMap<Vector>(numEntities)
            : new OpenIntObjectHashMap<Vector>();

    Path[] cachedFiles = HadoopUtil.getCachedFiles(conf);
    LocalFileSystem localFs = FileSystem.getLocal(conf);

    for (Path cachedFile : cachedFiles) {

        SequenceFile.Reader reader = null;
        try {/*from ww  w. ja  va2  s  .  c  om*/
            reader = new SequenceFile.Reader(localFs, cachedFile, conf);
            while (reader.next(rowIndex, row)) {
                featureMatrix.put(rowIndex.get(), row.get());
            }
        } finally {
            Closeables.close(reader, true);
        }
    }

    Preconditions.checkState(!featureMatrix.isEmpty(), "Feature matrix is empty");
    return featureMatrix;
}

From source file:org.apache.mahout.cf.taste.hadoop.als.eval.InMemoryFactorizationEvaluator.java

License:Apache License

private Matrix readMatrix(Path dir) throws IOException {

    Matrix matrix = new SparseMatrix(new int[] { Integer.MAX_VALUE, Integer.MAX_VALUE });

    FileSystem fs = dir.getFileSystem(getConf());
    for (FileStatus seqFile : fs.globStatus(new Path(dir, "part-*"))) {
        Path path = seqFile.getPath();
        SequenceFile.Reader reader = null;
        try {/*from   ww w.ja  va 2 s . c  om*/
            reader = new SequenceFile.Reader(fs, path, getConf());
            IntWritable key = new IntWritable();
            VectorWritable value = new VectorWritable();
            while (reader.next(key, value)) {
                int row = key.get();
                Iterator<Vector.Element> elementsIterator = value.get().iterateNonZero();
                while (elementsIterator.hasNext()) {
                    Vector.Element element = elementsIterator.next();
                    matrix.set(row, element.index(), element.get());
                }
            }
        } finally {
            Closeables.closeQuietly(reader);
        }
    }
    return matrix;
}