Example usage for org.apache.hadoop.io BooleanWritable BooleanWritable

List of usage examples for org.apache.hadoop.io BooleanWritable BooleanWritable

Introduction

In this page you can find the example usage for org.apache.hadoop.io BooleanWritable BooleanWritable.

Prototype

public BooleanWritable(boolean value) 

Source Link

Usage

From source file:edu.ub.ahstfg.io.DocumentDistance.java

License:Open Source License

/**
 * Constructor specifying the document descriptor and the distance to centroid.
 * @param doc Document descriptor.//from ww  w  . j  a va  2  s.c  o  m
 * @param distance Distance to centroid.
 */
public DocumentDistance(DocumentDescriptor doc, double distance) {
    this.doc = doc;
    this.distance = new DoubleWritable(distance);
    stub = new BooleanWritable(false);
}

From source file:eu.stratosphere.hadoopcompatibility.datatypes.DefaultStratosphereTypeConverter.java

License:Apache License

@SuppressWarnings("unchecked")
private <T> T convert(Record stratosphereType, int pos, Class<T> hadoopType) {
    if (hadoopType == LongWritable.class) {
        return (T) new LongWritable((stratosphereType.getField(pos, LongValue.class)).getValue());
    }/*  www . ja  v a2 s.c o  m*/
    if (hadoopType == org.apache.hadoop.io.Text.class) {
        return (T) new Text((stratosphereType.getField(pos, StringValue.class)).getValue());
    }
    if (hadoopType == org.apache.hadoop.io.IntWritable.class) {
        return (T) new IntWritable((stratosphereType.getField(pos, IntValue.class)).getValue());
    }
    if (hadoopType == org.apache.hadoop.io.FloatWritable.class) {
        return (T) new FloatWritable((stratosphereType.getField(pos, FloatValue.class)).getValue());
    }
    if (hadoopType == org.apache.hadoop.io.DoubleWritable.class) {
        return (T) new DoubleWritable((stratosphereType.getField(pos, DoubleValue.class)).getValue());
    }
    if (hadoopType == org.apache.hadoop.io.BooleanWritable.class) {
        return (T) new BooleanWritable((stratosphereType.getField(pos, BooleanValue.class)).getValue());
    }
    if (hadoopType == org.apache.hadoop.io.ByteWritable.class) {
        return (T) new ByteWritable((stratosphereType.getField(pos, ByteValue.class)).getValue());
    }

    throw new RuntimeException("Unable to convert Stratosphere type ("
            + stratosphereType.getClass().getCanonicalName() + ") to Hadoop.");
}

From source file:hivemall.anomaly.ChangeFinderUDF.java

License:Apache License

@Override
public ObjectInspector initialize(@Nonnull ObjectInspector[] argOIs) throws UDFArgumentException {
    if (argOIs.length < 1 || argOIs.length > 2) {
        throw new UDFArgumentException(
                "_FUNC_(double|array<double> x [, const string options]) takes 1 or 2 arguments: "
                        + Arrays.toString(argOIs));
    }// ww  w .  j ava  2 s  . c o  m

    this._params = new Parameters();
    if (argOIs.length == 2) {
        String options = HiveUtils.getConstString(argOIs[1]);
        processOptions(options);
    }

    ObjectInspector argOI0 = argOIs[0];
    if (HiveUtils.isListOI(argOI0)) {
        ListObjectInspector listOI = HiveUtils.asListOI(argOI0);
        this._changeFinder = new ChangeFinder2D(_params, listOI);
    } else if (HiveUtils.isNumberOI(argOI0)) {
        PrimitiveObjectInspector xOI = HiveUtils.asDoubleCompatibleOI(argOI0);
        this._changeFinder = new ChangeFinder1D(_params, xOI);
    }

    this._scores = new double[2];

    final Object[] result;
    final ArrayList<String> fieldNames = new ArrayList<String>();
    final ArrayList<ObjectInspector> fieldOIs = new ArrayList<ObjectInspector>();
    fieldNames.add("outlier_score");
    fieldOIs.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
    fieldNames.add("changepoint_score");
    fieldOIs.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
    if (_params.outlierThreshold != -1d) {
        fieldNames.add("is_outlier");
        fieldOIs.add(PrimitiveObjectInspectorFactory.writableBooleanObjectInspector);
        this._isOutlier = new BooleanWritable(false);
        if (_params.changepointThreshold != -1d) {
            fieldNames.add("is_changepoint");
            fieldOIs.add(PrimitiveObjectInspectorFactory.writableBooleanObjectInspector);
            result = new Object[4];
            this._isChangepoint = new BooleanWritable(false);
            result[3] = _isChangepoint;
        } else {
            result = new Object[3];
        }
        result[2] = _isOutlier;
    } else {
        result = new Object[2];
    }
    this._outlierScore = new DoubleWritable(0d);
    result[0] = _outlierScore;
    this._changepointScore = new DoubleWritable(0d);
    result[1] = _changepointScore;
    this._result = result;

    return ObjectInspectorFactory.getStandardStructObjectInspector(fieldNames, fieldOIs);
}

From source file:hivemall.anomaly.SingularSpectrumTransformUDF.java

License:Apache License

@Override
public ObjectInspector initialize(@Nonnull ObjectInspector[] argOIs) throws UDFArgumentException {
    if (argOIs.length < 1 || argOIs.length > 2) {
        throw new UDFArgumentException(
                "_FUNC_(double|array<double> x [, const string options]) takes 1 or 2 arguments: "
                        + Arrays.toString(argOIs));
    }//w ww  .  ja va  2 s . c o  m

    this._params = new Parameters();
    if (argOIs.length == 2) {
        String options = HiveUtils.getConstString(argOIs[1]);
        processOptions(options);
    }

    ObjectInspector argOI0 = argOIs[0];
    PrimitiveObjectInspector xOI = HiveUtils.asDoubleCompatibleOI(argOI0);
    this._sst = new SingularSpectrumTransform(_params, xOI);

    this._scores = new double[1];

    final Object[] result;
    final ArrayList<String> fieldNames = new ArrayList<String>();
    final ArrayList<ObjectInspector> fieldOIs = new ArrayList<ObjectInspector>();
    fieldNames.add("changepoint_score");
    fieldOIs.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
    if (_params.changepointThreshold != -1d) {
        fieldNames.add("is_changepoint");
        fieldOIs.add(PrimitiveObjectInspectorFactory.writableBooleanObjectInspector);
        result = new Object[2];
        this._isChangepoint = new BooleanWritable(false);
        result[1] = _isChangepoint;
    } else {
        result = new Object[1];
    }
    this._changepointScore = new DoubleWritable(0.d);
    result[0] = _changepointScore;
    this._result = result;

    return ObjectInspectorFactory.getStandardStructObjectInspector(fieldNames, fieldOIs);
}

From source file:hivemall.utils.hadoop.WritableUtils.java

License:Open Source License

public static BooleanWritable val(final boolean v) {
    return new BooleanWritable(v);
}

From source file:hivemall.utils.hadoop.WritableUtils.java

License:Open Source License

public static Writable toWritable(Object object) {
    if (object == null) {
        return null; //return NullWritable.get();
    }//from   w w w. j  a  v  a2  s.  co  m
    if (object instanceof Writable) {
        return (Writable) object;
    }
    if (object instanceof String) {
        return new Text((String) object);
    }
    if (object instanceof Long) {
        return new VLongWritable((Long) object);
    }
    if (object instanceof Integer) {
        return new VIntWritable((Integer) object);
    }
    if (object instanceof Byte) {
        return new ByteWritable((Byte) object);
    }
    if (object instanceof Double) {
        return new DoubleWritable((Double) object);
    }
    if (object instanceof Float) {
        return new FloatWritable((Float) object);
    }
    if (object instanceof Boolean) {
        return new BooleanWritable((Boolean) object);
    }
    if (object instanceof byte[]) {
        return new BytesWritable((byte[]) object);
    }
    return new BytesWritable(object.toString().getBytes());
}

From source file:hydrograph.engine.cascading.scheme.hive.parquet.ParquetWritableUtils.java

License:Apache License

private static Writable createPrimitive(final Object obj, final PrimitiveObjectInspector inspector)
        throws SerDeException {
    if (obj == null) {
        return null;
    }/*from   w w  w  .  ja  v a  2  s.co m*/

    switch (inspector.getPrimitiveCategory()) {
    case VOID:
        return null;
    case BOOLEAN:
        return new BooleanWritable(
                ((BooleanObjectInspector) inspector).get(new BooleanWritable((boolean) obj)));
    case BYTE:
        return new ByteWritable(((ByteObjectInspector) inspector).get(new ByteWritable((byte) obj)));
    case DOUBLE:
        return new DoubleWritable(((DoubleObjectInspector) inspector).get(new DoubleWritable((double) obj)));
    case FLOAT:
        return new FloatWritable(((FloatObjectInspector) inspector).get(new FloatWritable((float) obj)));
    case INT:
        return new IntWritable(((IntObjectInspector) inspector).get(new IntWritable((int) obj)));
    case LONG:
        return new LongWritable(((LongObjectInspector) inspector).get(new LongWritable((long) obj)));
    case SHORT:
        return new ShortWritable(((ShortObjectInspector) inspector).get(new ShortWritable((short) obj)));
    case STRING:
        String v;
        if (obj instanceof Long) {
            SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd");
            Date date = new Date((long) obj);
            v = df.format(date);
        } else if (obj instanceof BigDecimal) {
            BigDecimal bigDecimalObj = (BigDecimal) obj;
            v = bigDecimalObj.toString();
        } else {
            v = ((StringObjectInspector) inspector).getPrimitiveJavaObject(obj);
        }
        try {
            return new BytesWritable(v.getBytes("UTF-8"));
        } catch (UnsupportedEncodingException e) {
            throw new SerDeException("Failed to encode string in UTF-8", e);
        }
    case DECIMAL:
        HiveDecimal hd;
        if (obj instanceof Double) {
            hd = HiveDecimal.create(new BigDecimal((Double) obj));
        } else if (obj instanceof BigDecimal) {
            hd = HiveDecimal.create((BigDecimal) obj);
        } else {
            // if "obj" is other than Double or BigDecimal and a vaild
            // number, .toString, will get its correct number representation
            // and a BigDecimal object will be created
            hd = HiveDecimal.create(new BigDecimal(obj.toString()));
        }
        return new HiveDecimalWritable(hd);
    case TIMESTAMP:
        return new TimestampWritable(((TimestampObjectInspector) inspector)
                .getPrimitiveJavaObject(new TimestampWritable(new Timestamp((long) obj))));
    case DATE:
        return new DateWritable(((DateObjectInspector) inspector)
                .getPrimitiveJavaObject(new DateWritable(new Date((long) obj))));
    case CHAR:
        String strippedValue = ((HiveCharObjectInspector) inspector).getPrimitiveJavaObject(obj)
                .getStrippedValue();
        return new BytesWritable(Binary.fromString(strippedValue).getBytes());
    case VARCHAR:
        String value = ((HiveVarcharObjectInspector) inspector).getPrimitiveJavaObject(obj).getValue();
        return new BytesWritable(Binary.fromString(value).getBytes());
    default:
        throw new SerDeException("Unknown primitive : " + inspector.getPrimitiveCategory());
    }
}

From source file:inflater.computation.SimpleComputation.java

License:MIT License

@Override
public void compute(Vertex<LongWritable, VertexValuesWritable, EdgeValuesWritable> vertex,
        Iterable<MessageWritable> messages) throws IOException {

    if (getSuperstep() == 0) {
        long scale = getConf().getLong(SCALE, 30);
        kValue = getConf().getDouble(KVALUE, -1);
        /*/*from  w ww .  j av a  2 s . com*/
         * calculate canvas size, if it's not set
         */
        canvasSize = Math.round(getTotalNumVertices() * 0.618 * 0.7 * scale);
        /*
         * calculate kValue - the optimized distance between vertices
         */
        if (kValue < 0) {
            kValue = Math.sqrt(canvasSize * canvasSize / getTotalNumVertices());
            getConf().setDouble(KVALUE, kValue);
        }
        gravity = new Coordinate(canvasSize / 2.0d, canvasSize / 2.0d);

        /*
         * We randomly position vertices in a square sized half of the
         * canvas
         */
        Random rd = new Random();
        double x = rd.nextDouble() * canvasSize;
        double y = rd.nextDouble() * canvasSize;
        vertex.getValue().getCoordinate().get().setXY(x, y);
    } else {
        Coordinate self = vertex.getValue().getCoordinate().get();
        Coordinate disp = new Coordinate();

        for (MessageWritable message : messages) {
            /*
             * Ignore messages from self
             */
            if (message.getSenderId().equals(vertex.getId()))
                continue;

            Coordinate ref = message.getSenderCoordinate().get();
            boolean linked = message.getLinked().get();
            disp = ComputationUtil.addCoor(disp, resolveForce(self, ref, linked));
        }

        disp = ComputationUtil.addCoor(disp, resolveGravity(self));
        applyChanges(vertex, disp);
    }

    /*
     * We send messages with "unlinked" to all vertices. Then send messages
     * with "linked" to linked vertices.
     */
    MessageWritable toAll = new MessageWritable(vertex.getId(), vertex.getValue().getCoordinate(),
            new BooleanWritable(false));
    for (long i = 1; i <= getTotalNumVertices(); i++) {
        sendMessage(new LongWritable(i), toAll);
    }

    MessageWritable toLinked = new MessageWritable(vertex.getId(), vertex.getValue().getCoordinate(),
            new BooleanWritable(true));
    sendMessageToAllEdges(vertex, toLinked);

}

From source file:inflater.datatypes.writable.MessageWritable.java

License:MIT License

public MessageWritable(long senderId, Coordinate senderCoordinate, boolean linked) {
    this(new LongWritable(senderId), new CoordinateWritable(senderCoordinate), new BooleanWritable(linked));
}

From source file:it.crs4.pydoop.mapreduce.pipes.TestPipeApplication.java

License:Apache License

/**
 * test org.apache.hadoop.mapreduce.pipes.PipesReducer
 * test the transfer of data: key and value
 *
 * @throws Exception// w w w.j  a va2 s  .  co m
 */
@Test
public void testPipesReducer() throws Exception {
    System.err.println("testPipesReducer");

    File[] psw = cleanTokenPasswordFile();
    try {
        JobID jobId = new JobID("201408272347", 0);
        TaskID taskId = new TaskID(jobId, TaskType.MAP, 0);
        TaskAttemptID taskAttemptid = new TaskAttemptID(taskId, 0);

        Job job = new Job(new Configuration());
        job.setJobID(jobId);
        Configuration conf = job.getConfiguration();
        conf.set(MRJobConfig.TASK_ATTEMPT_ID, taskAttemptid.toString());
        FileSystem fs = new RawLocalFileSystem();
        fs.setConf(conf);

        File fCommand = getFileCommand("it.crs4.pydoop.mapreduce.pipes.PipeReducerStub");
        conf.set(MRJobConfig.CACHE_LOCALFILES, fCommand.getAbsolutePath());
        System.err.println("fCommand" + fCommand.getAbsolutePath());

        Token<AMRMTokenIdentifier> token = new Token<AMRMTokenIdentifier>("user".getBytes(),
                "password".getBytes(), new Text("kind"), new Text("service"));
        TokenCache.setJobToken(token, job.getCredentials());
        conf.setBoolean(MRJobConfig.SKIP_RECORDS, true);

        TestReporter reporter = new TestReporter();
        DummyInputFormat input_format = new DummyInputFormat();
        List<InputSplit> isplits = input_format.getSplits(job);
        InputSplit isplit = isplits.get(0);
        TaskAttemptContextImpl tcontext = new TaskAttemptContextImpl(conf, taskAttemptid);

        RecordWriter<IntWritable, Text> writer = new TestRecordWriter(
                new FileOutputStream(workSpace.getAbsolutePath() + File.separator + "outfile"));

        BooleanWritable bw = new BooleanWritable(true);
        List<Text> texts = new ArrayList<Text>();
        texts.add(new Text("first"));
        texts.add(new Text("second"));
        texts.add(new Text("third"));

        DummyRawKeyValueIterator kvit = new DummyRawKeyValueIterator();

        ReduceContextImpl<BooleanWritable, Text, IntWritable, Text> context = new ReduceContextImpl<BooleanWritable, Text, IntWritable, Text>(
                conf, taskAttemptid, kvit, null, null, writer, null, null, null, BooleanWritable.class,
                Text.class);

        PipesReducer<BooleanWritable, Text, IntWritable, Text> reducer = new PipesReducer<BooleanWritable, Text, IntWritable, Text>();
        reducer.setup(context);

        initStdOut(conf);
        reducer.reduce(bw, texts, context);
        reducer.cleanup(context);
        String stdOut = readStdOut(conf);

        // test data: key
        assertTrue(stdOut.contains("reducer key :true"));
        // and values
        assertTrue(stdOut.contains("reduce value  :first"));
        assertTrue(stdOut.contains("reduce value  :second"));
        assertTrue(stdOut.contains("reduce value  :third"));

    } finally {
        if (psw != null) {
            // remove password files
            for (File file : psw) {
                file.deleteOnExit();
            }
        }
    }

}