Example usage for org.apache.hadoop.io NullWritable get

List of usage examples for org.apache.hadoop.io NullWritable get

Introduction

In this page you can find the example usage for org.apache.hadoop.io NullWritable get.

Prototype

public static NullWritable get() 

Source Link

Document

Returns the single instance of this class.

Usage

From source file:nl.tudelft.graphalytics.giraph.algorithms.bfs.BreadthFirstSearchComputationTest.java

License:Apache License

@Override
public BreadthFirstSearchOutput executeDirectedBreadthFirstSearch(GraphStructure graph,
        BreadthFirstSearchParameters parameters) throws Exception {
    GiraphConfiguration configuration = new GiraphConfiguration();
    configuration.setComputationClass(BreadthFirstSearchComputation.class);
    BreadthFirstSearchConfiguration.SOURCE_VERTEX.set(configuration, parameters.getSourceVertex());

    TestGraph<LongWritable, LongWritable, NullWritable> inputGraph = GiraphTestGraphLoader
            .createGraph(configuration, graph, new LongWritable(Long.MAX_VALUE), NullWritable.get());

    TestGraph<LongWritable, LongWritable, NullWritable> result = InternalVertexRunner
            .runWithInMemoryOutput(configuration, inputGraph);

    Map<Long, Long> pathLengths = new HashMap<>();
    for (Map.Entry<LongWritable, Vertex<LongWritable, LongWritable, NullWritable>> vertexEntry : result
            .getVertices().entrySet()) {
        pathLengths.put(vertexEntry.getKey().get(), vertexEntry.getValue().getValue().get());
    }/*ww  w . jav  a  2  s . c om*/

    return new BreadthFirstSearchOutput(pathLengths);
}

From source file:nl.tudelft.graphalytics.giraph.algorithms.cdlp.CommunityDetectionLPComputationTest.java

License:Apache License

@Override
public CommunityDetectionLPOutput executeUndirectedCommunityDetection(GraphStructure graph,
        CommunityDetectionLPParameters parameters) throws Exception {
    GiraphConfiguration configuration = configurationFromParameters(
            UndirectedCommunityDetectionLPComputation.class, parameters);

    TestGraph<LongWritable, LongWritable, NullWritable> inputGraph = GiraphTestGraphLoader
            .createGraph(configuration, graph, new LongWritable(), NullWritable.get());

    TestGraph<LongWritable, LongWritable, NullWritable> result = InternalVertexRunner
            .runWithInMemoryOutput(configuration, inputGraph);

    return outputFromResultGraph(result);
}

From source file:nl.tudelft.graphalytics.giraph.algorithms.ffm.ForestFireModelComputationTest.java

License:Apache License

private GraphStructure performTest(Class<? extends Computation> computationClass, GraphStructure graph,
        ForestFireModelParameters parameters) throws Exception {
    GiraphConfiguration configuration = new GiraphConfiguration();
    configuration.setComputationClass(computationClass);
    configuration.setWorkerContextClass(ForestFireModelWorkerContext.class);
    ForestFireModelConfiguration.AVAILABLE_VERTEX_ID.set(configuration, parameters.getMaxId() + 1);
    ForestFireModelConfiguration.BACKWARD_PROBABILITY.set(configuration, parameters.getRRatio());
    ForestFireModelConfiguration.FORWARD_PROBABILITY.set(configuration, parameters.getPRatio());
    ForestFireModelConfiguration.MAX_ITERATIONS.set(configuration, parameters.getMaxIterations());
    ForestFireModelConfiguration.NEW_VERTICES.set(configuration, parameters.getNumNewVertices());

    TestGraph<LongWritable, ForestFireModelData, NullWritable> inputGraph = GiraphTestGraphLoader
            .createGraph(configuration, graph, new ForestFireModelData(), NullWritable.get());

    TestGraph<LongWritable, ForestFireModelData, NullWritable> result = InternalVertexRunner
            .runWithInMemoryOutput(configuration, inputGraph);

    Map<Long, Set<Long>> resultEdges = new HashMap<>();
    for (Map.Entry<LongWritable, Vertex<LongWritable, ForestFireModelData, NullWritable>> vertexEntry : result
            .getVertices().entrySet()) {
        resultEdges.put(vertexEntry.getKey().get(), new HashSet<Long>());
        for (Edge<LongWritable, NullWritable> edge : vertexEntry.getValue().getEdges()) {
            resultEdges.get(vertexEntry.getKey().get()).add(edge.getTargetVertexId().get());
        }/*  w  ww  . j a  v a2s .  c  om*/
    }
    return new GraphStructure(resultEdges);
}

From source file:nl.tudelft.graphalytics.giraph.algorithms.lcc.LocalClusteringCoefficientComputationTest.java

License:Apache License

private static LocalClusteringCoefficientOutput executeLocalClusteringCoefficient(
        Class<? extends Computation> computationClass, GraphStructure graph) throws Exception {
    GiraphConfiguration configuration = new GiraphConfiguration();
    configuration.setComputationClass(computationClass);

    TestGraph<LongWritable, DoubleWritable, NullWritable> inputGraph = GiraphTestGraphLoader
            .createGraph(configuration, graph, new DoubleWritable(-1), NullWritable.get());

    TestGraph<LongWritable, DoubleWritable, NullWritable> result = InternalVertexRunner
            .runWithInMemoryOutput(configuration, inputGraph);

    Map<Long, Double> localClusteringCoefficients = new HashMap<>();
    for (Map.Entry<LongWritable, Vertex<LongWritable, DoubleWritable, NullWritable>> vertexEntry : result
            .getVertices().entrySet()) {
        localClusteringCoefficients.put(vertexEntry.getKey().get(), vertexEntry.getValue().getValue().get());
    }/*from w  w w  . j a  va2  s .  c o m*/
    return new LocalClusteringCoefficientOutput(localClusteringCoefficients);
}

From source file:nl.tudelft.graphalytics.giraph.algorithms.pr.PageRankComputationTest.java

License:Apache License

@Override
public PageRankOutput executeDirectedPageRank(GraphStructure graph, PageRankParameters parameters)
        throws Exception {
    GiraphConfiguration configuration = new GiraphConfiguration();
    configuration.setComputationClass(PageRankComputation.class);
    configuration.setMasterComputeClass(PageRankMasterComputation.class);
    configuration.setWorkerContextClass(PageRankWorkerContext.class);
    PageRankConfiguration.DAMPING_FACTOR.set(configuration, parameters.getDampingFactor());
    PageRankConfiguration.NUMBER_OF_ITERATIONS.set(configuration, parameters.getNumberOfIterations());

    TestGraph<LongWritable, DoubleWritable, NullWritable> inputGraph = GiraphTestGraphLoader
            .createGraph(configuration, graph, new DoubleWritable(), NullWritable.get());

    TestGraph<LongWritable, DoubleWritable, NullWritable> result = InternalVertexRunner
            .runWithInMemoryOutput(configuration, inputGraph);

    Map<Long, Double> pageRanks = new HashMap<>();
    for (Map.Entry<LongWritable, Vertex<LongWritable, DoubleWritable, NullWritable>> vertexEntry : result
            .getVertices().entrySet()) {
        pageRanks.put(vertexEntry.getKey().get(), vertexEntry.getValue().getValue().get());
    }//from w ww  . j  av a 2 s . c o m

    return new PageRankOutput(pageRanks);
}

From source file:nl.tudelft.graphalytics.giraph.algorithms.wcc.WeaklyConnectedComponentsComputationTest.java

License:Apache License

private static WeaklyConnectedComponentsOutput executeConnectedComponents(
        Class<? extends Computation> computationClass, GraphStructure graph) throws Exception {
    GiraphConfiguration configuration = new GiraphConfiguration();
    configuration.setComputationClass(computationClass);

    TestGraph<LongWritable, LongWritable, NullWritable> inputGraph = GiraphTestGraphLoader
            .createGraph(configuration, graph, new LongWritable(-1), NullWritable.get());

    TestGraph<LongWritable, LongWritable, NullWritable> result = InternalVertexRunner
            .runWithInMemoryOutput(configuration, inputGraph);

    Map<Long, Long> pathLengths = new HashMap<>();
    for (Map.Entry<LongWritable, Vertex<LongWritable, LongWritable, NullWritable>> vertexEntry : result
            .getVertices().entrySet()) {
        pathLengths.put(vertexEntry.getKey().get(), vertexEntry.getValue().getValue().get());
    }/*  w  ww . j a v a2 s .c o  m*/

    return new WeaklyConnectedComponentsOutput(pathLengths);
}

From source file:nl.tudelft.graphalytics.mapreducev2.conversion.DirectedVertexReducer.java

License:Apache License

@Override
protected void reduce(LongWritable key, Iterable<EdgeData> values, Context context)
        throws IOException, InterruptedException {
    // Fill separate buffers for incoming and outgoing edges
    StringBuffer sbIn = new StringBuffer();
    StringBuffer sbOut = new StringBuffer();

    // Loop through the messages and add them to the buffers
    boolean foundIn = false, foundOut = false;
    for (EdgeData edge : values) {
        if (edge.getTargetId() == key.get()) {
            // Ignore, this self-edge was added to force this vertex's existence
        } else if (edge.isOutgoing()) {
            if (foundOut)
                sbOut.append(',');
            sbOut.append(edge.getTargetId());
            foundOut = true;//  www.  j av  a2  s.c o  m
        } else {
            if (foundIn)
                sbIn.append(',');
            sbIn.append(edge.getTargetId());
            foundIn = true;
        }
    }

    // Combine the vertex ID and neighbour lists using Marcin's format
    StringBuffer out = new StringBuffer(key.toString());
    out.append("\t#").append(sbIn.toString()).append("\t@").append(sbOut.toString());
    if (!foundOut)
        out.append('\t');

    // Output the constructed line
    outValue.set(out.toString());
    context.write(NullWritable.get(), outValue);
}

From source file:nl.tudelft.graphalytics.mapreducev2.conversion.UndirectedVertexReducer.java

License:Apache License

@Override
protected void reduce(LongWritable key, Iterable<LongWritable> values, Context context)
        throws IOException, InterruptedException {
    // Combine the vertex ID and neighbour IDs using a StringBuilder
    StringBuilder sb = new StringBuilder();
    sb.append(key.get());//  w  ww  .  ja va2  s  . c o  m
    for (LongWritable neighbour : values) {
        sb.append(' ').append(neighbour.get());
    }

    // Output the constructed line
    outValue.set(sb.toString());
    context.write(NullWritable.get(), outValue);
}

From source file:oracle.kv.hadoop.hive.table.TableSerDe.java

License:Open Source License

@Override
public Writable serialize(Object obj, ObjectInspector objectInspector) throws SerDeException {

    LOG.debug("obj = " + obj + ", objectInspector = " + objectInspector.getClass().getSimpleName());

    final StructObjectInspector structInspector = (StructObjectInspector) objectInspector;

    final List<? extends StructField> structFields = structInspector.getAllStructFieldRefs();

    final List<String> hiveColumnNames = getSerdeParams().getColumnNames();

    if (structFields.size() != hiveColumnNames.size()) {
        final String msg = "Number of Hive columns to serialize " + structFields.size()
                + "] does not equal number of columns [" + hiveColumnNames.size()
                + "] specified in the created Hive " + "table [name=" + getHiveTableName() + "]";
        LOG.error(msg);// w  w w.j  av a2  s  .  co m
        throw new SerDeException(new IllegalArgumentException(msg));
    }

    kvMapWritable.clear();

    for (int i = 0; i < structFields.size(); i++) {

        final StructField structField = structFields.get(i);
        final String hiveColumnName = hiveColumnNames.get(i);

        if (structField != null) {

            /* Currently assume field is Hive primitive type. */

            final AbstractPrimitiveObjectInspector fieldObjInspector = (AbstractPrimitiveObjectInspector) structField
                    .getFieldObjectInspector();

            final Object fieldData = structInspector.getStructFieldData(obj, structField);

            Writable fieldValue = (Writable) fieldObjInspector.getPrimitiveWritableObject(fieldData);
            if (fieldValue == null) {
                if (PrimitiveCategory.STRING.equals(fieldObjInspector.getPrimitiveCategory())) {

                    fieldValue = NullWritable.get();
                } else {
                    fieldValue = new IntWritable(0);
                }
            }

            kvMapWritable.put(new Text(hiveColumnName), fieldValue);
        }
    }
    return kvMapWritable;
}

From source file:org.ankus.mapreduce.algorithms.clustering.kmeans.KMeansClusterAssignFinalMapper.java

License:Apache License

@Override
protected void map(Object key, Text value, Context context) throws IOException, InterruptedException {
    String[] columns = value.toString().split(mDelimiter);
    int clusterIndex = -1;

    String writeValueStr = "";
    /**//from w  w  w.  j  a  v a2  s.  co  m
     * cluster index get
     */
    double distMin = 99999999;
    for (int k = 0; k < mClusterCnt; k++) {
        double attrDistanceSum = 0;
        double attrCnt = 0;

        for (int i = 0; i < columns.length; i++) {
            double distAttr = 0;

            if (CommonMethods.isContainIndex(mIndexArr, i, true)
                    && !CommonMethods.isContainIndex(mExceptionIndexArr, i, false)) {
                attrCnt++;
                if (CommonMethods.isContainIndex(mNominalIndexArr, i, false)) {
                    distAttr = mClusters[k].getAttributeDistance(i, columns[i],
                            ConfigurationVariable.NOMINAL_ATTRIBUTE);
                } else
                    distAttr = mClusters[k].getAttributeDistance(i, columns[i],
                            ConfigurationVariable.NUMERIC_ATTRIBUTE);

                attrDistanceSum += Math.pow(distAttr, 2);

                if (k == 0)
                    writeValueStr += columns[i] + mDelimiter;
            }

        }

        double dist = Math.sqrt(attrDistanceSum);
        if (dist < distMin) {
            distMin = dist;
            clusterIndex = k;
        }
    }

    //      context.write(NullWritable.get(), new Text(value + mDelimiter + clusterIndex + mDelimiter + distMin));
    context.write(NullWritable.get(), new Text(writeValueStr + clusterIndex + mDelimiter + distMin));
}