Example usage for org.apache.hadoop.io Text getLength

List of usage examples for org.apache.hadoop.io Text getLength

Introduction

In this page you can find the example usage for org.apache.hadoop.io Text getLength.

Prototype

@Override
public int getLength() 

Source Link

Document

Returns the number of bytes in the byte array

Usage

From source file:org.apache.fluo.integration.client.FluoAdminImplIT.java

License:Apache License

@Test
public void testInitializeConfig() throws Exception {

    // stop oracle to avoid spurious exceptions when initializing
    oserver.stop();/* w  ww. j a v  a  2s . c o  m*/

    FluoConfiguration localConfig = new FluoConfiguration(config);
    localConfig.setProperty("fluo.test123", "${fluo.connection.application.name}");
    Assert.assertEquals(localConfig.getApplicationName(), localConfig.getString("fluo.test123"));

    try (FluoAdmin admin = new FluoAdminImpl(localConfig)) {

        InitializationOptions opts = new InitializationOptions().setClearZookeeper(true).setClearTable(true);
        admin.initialize(opts);

        // verify locality groups were set on the table
        Instance inst = new ZooKeeperInstance(config.getAccumuloInstance(), config.getAccumuloZookeepers());
        Connector conn = inst.getConnector(config.getAccumuloUser(),
                new PasswordToken(config.getAccumuloPassword()));
        Map<String, Set<Text>> localityGroups = conn.tableOperations()
                .getLocalityGroups(config.getAccumuloTable());
        Assert.assertEquals("Unexpected locality group count.", 1, localityGroups.size());
        Entry<String, Set<Text>> localityGroup = localityGroups.entrySet().iterator().next();
        Assert.assertEquals("'notify' locality group not found.", ColumnConstants.NOTIFY_LOCALITY_GROUP_NAME,
                localityGroup.getKey());
        Assert.assertEquals("'notify' locality group does not contain exactly 1 column family.", 1,
                localityGroup.getValue().size());
        Text colFam = localityGroup.getValue().iterator().next();
        Assert.assertTrue("'notify' locality group does not contain the correct column family.",
                ColumnConstants.NOTIFY_CF.contentEquals(colFam.getBytes(), 0, colFam.getLength()));
    }

    try (FluoClientImpl client = new FluoClientImpl(localConfig)) {
        FluoConfiguration sharedConfig = client.getSharedConfiguration();
        Assert.assertEquals(localConfig.getApplicationName(), sharedConfig.getString("fluo.test123"));
        Assert.assertEquals(localConfig.getApplicationName(), sharedConfig.getApplicationName());
    }
}

From source file:org.apache.fluo.recipes.spark.AccumuloRangePartitioner.java

License:Apache License

public AccumuloRangePartitioner(Collection<Text> listSplits) {
    this.splits = new ArrayList<>(listSplits.size());
    for (Text text : listSplits) {
        splits.add(Bytes.of(text.getBytes(), 0, text.getLength()));
    }/*  www.ja  va  2s  . com*/
}

From source file:org.apache.gora.accumulo.store.AccumuloStore.java

License:Apache License

Text pad(Text key, int bytes) {
    if (key.getLength() < bytes)
        key = new Text(key);

    while (key.getLength() < bytes) {
        key.append(new byte[] { 0 }, 0, 1);
    }/*  w  ww .j  ava2s.c o  m*/

    return key;
}

From source file:org.apache.hama.pipes.BinaryProtocol.java

License:Apache License

/**
 * Write the given object to the stream. If it is a Text or BytesWritable,
 * write it directly. Otherwise, write it to a buffer and then write the
 * length and data to the stream.//from   w  ww  .j a va2  s .c  om
 * 
 * @param obj the object to write
 * @throws IOException
 */
protected void writeObject(Writable obj) throws IOException {
    // For Text and BytesWritable, encode them directly, so that they end up
    // in C++ as the natural translations.
    if (obj instanceof Text) {
        Text t = (Text) obj;
        int len = t.getLength();
        WritableUtils.writeVInt(stream, len);
        stream.write(t.getBytes(), 0, len);
    } else if (obj instanceof BytesWritable) {
        BytesWritable b = (BytesWritable) obj;
        int len = b.getLength();
        WritableUtils.writeVInt(stream, len);
        stream.write(b.getBytes(), 0, len);
    } else {
        buffer.reset();
        obj.write(buffer);
        int length = buffer.getLength();
        WritableUtils.writeVInt(stream, length);
        stream.write(buffer.getData(), 0, length);
    }
}

From source file:org.apache.hama.pipes.protocol.BinaryProtocol.java

License:Apache License

/**
 * Write the given object to the stream. If it is a IntWritable, LongWritable,
 * FloatWritable, DoubleWritable, Text or BytesWritable, write it directly.
 * Otherwise, write it to a buffer and then write the length and data to the
 * stream.//from   www .  j a  v  a 2s.  c  o  m
 * 
 * @param obj the object to write
 * @throws IOException
 */
protected void writeObject(Writable obj) throws IOException {
    // For basic types IntWritable, LongWritable, Text and BytesWritable,
    // encode them directly, so that they end up
    // in C++ as the natural translations.
    if (obj instanceof Text) {
        Text t = (Text) obj;
        int len = t.getLength();
        WritableUtils.writeVInt(this.outStream, len);
        this.outStream.write(t.getBytes(), 0, len);

    } else if (obj instanceof BytesWritable) {
        BytesWritable b = (BytesWritable) obj;
        int len = b.getLength();
        WritableUtils.writeVInt(this.outStream, len);
        this.outStream.write(b.getBytes(), 0, len);

    } else if (obj instanceof IntWritable) {
        WritableUtils.writeVInt(this.outStream, ((IntWritable) obj).get());

    } else if (obj instanceof LongWritable) {
        WritableUtils.writeVLong(this.outStream, ((LongWritable) obj).get());

    } else {
        // Note: FloatWritable and DoubleWritable are written here
        obj.write(this.outStream);
    }
}

From source file:org.apache.hive.common.util.HiveStringUtils.java

License:Apache License

public static int getTextUtfLength(Text t) {
    byte[] data = t.getBytes();
    int len = 0;/* w  ww .j  av a 2  s  .  c o m*/
    for (int i = 0; i < t.getLength(); i++) {
        if (isUtfStartByte(data[i])) {
            len++;
        }
    }
    return len;
}

From source file:org.apache.hive.pdktest.Rot13.java

License:Apache License

public Text evaluate(Text s) {
    StringBuilder out = new StringBuilder(s.getLength());
    char[] ca = s.toString().toCharArray();
    for (char c : ca) {
        if (c >= 'a' && c <= 'm') {
            c += 13;/*from ww  w .j a  v a 2s. co  m*/
        } else if (c >= 'n' && c <= 'z') {
            c -= 13;
        } else if (c >= 'A' && c <= 'M') {
            c += 13;
        } else if (c >= 'N' && c <= 'Z') {
            c -= 13;
        }
        out.append(c);
    }
    t.set(out.toString());
    return t;
}

From source file:org.apache.hyracks.hdfs.lib.TextKeyValueParserFactory.java

License:Apache License

@Override
public IKeyValueParser<LongWritable, Text> createKeyValueParser(final IHyracksTaskContext ctx)
        throws HyracksDataException {

    final ArrayTupleBuilder tb = new ArrayTupleBuilder(1);
    final FrameTupleAppender appender = new FrameTupleAppender(new VSizeFrame(ctx));

    return new IKeyValueParser<LongWritable, Text>() {

        @Override//from  w  w  w. j  a v a  2  s.  com
        public void open(IFrameWriter writer) {

        }

        @Override
        public void parse(LongWritable key, Text value, IFrameWriter writer, String fileString)
                throws HyracksDataException {
            tb.reset();
            tb.addField(value.getBytes(), 0, value.getLength());
            FrameUtils.appendToWriter(writer, appender, tb.getFieldEndOffsets(), tb.getByteArray(), 0,
                    tb.getSize());
        }

        @Override
        public void close(IFrameWriter writer) throws HyracksDataException {
            appender.write(writer, false);
        }

    };
}

From source file:org.apache.hyracks.imru.dataflow.Hdtest.java

License:Apache License

public static JobSpecification createJob() throws Exception {
    JobSpecification spec = new JobSpecification();
    spec.setFrameSize(4096);/*from   www .  j av a2 s  .  c o m*/

    String PATH_TO_HADOOP_CONF = "/home/wangrui/a/imru/hadoop-0.20.2/conf";
    String HDFS_INPUT_PATH = "/customer/customer.tbl,/customer_result/part-0";
    JobConf conf = new JobConf();
    conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/core-site.xml"));
    conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/mapred-site.xml"));
    conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/hdfs-site.xml"));
    FileInputFormat.setInputPaths(conf, HDFS_INPUT_PATH);
    conf.setInputFormat(TextInputFormat.class);
    RecordDescriptor recordDesc = new RecordDescriptor(
            new ISerializerDeserializer[] { UTF8StringSerializerDeserializer.INSTANCE });
    InputSplit[] splits = conf.getInputFormat().getSplits(conf, 1);
    HDFSReadOperatorDescriptor readOperator = new HDFSReadOperatorDescriptor(spec, recordDesc, conf, splits,
            new String[] { "NC0", "NC1" }, new IKeyValueParserFactory<LongWritable, Text>() {
                @Override
                public IKeyValueParser<LongWritable, Text> createKeyValueParser(final IHyracksTaskContext ctx) {
                    return new IKeyValueParser<LongWritable, Text>() {
                        TupleWriter tupleWriter;

                        @Override
                        public void open(IFrameWriter writer) throws HyracksDataException {
                            tupleWriter = new TupleWriter(ctx, writer, 1);
                        }

                        @Override
                        public void parse(LongWritable key, Text value, IFrameWriter writer, String fileString)
                                throws HyracksDataException {
                            try {
                                tupleWriter.write(value.getBytes(), 0, value.getLength());
                                tupleWriter.finishField();
                                tupleWriter.finishTuple();
                            } catch (IOException e) {
                                throw new HyracksDataException(e);
                            }
                        }

                        @Override
                        public void close(IFrameWriter writer) throws HyracksDataException {
                            tupleWriter.close();
                        }
                    };
                }

            });

    // createPartitionConstraint(spec, readOperator, new String[] {"NC0"});
    PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, readOperator, new String[] { "NC0", "NC1" });

    IOperatorDescriptor writer = new HDFSOD(spec, null, null, null);
    // createPartitionConstraint(spec, writer, outSplits);

    spec.connect(new OneToOneConnectorDescriptor(spec), readOperator, 0, writer, 0);

    spec.addRoot(writer);
    return spec;
}

From source file:org.apache.ignite.hadoop.io.TextPartiallyRawComparator.java

License:Apache License

/** {@inheritDoc} */
@Override/*from  w w  w  .j a v  a  2  s .com*/
public int compare(Text val1, long val2Ptr, int val2Len) {
    int len2 = WritableUtils.decodeVIntSize(GridUnsafe.getByte(val2Ptr));

    return HadoopUtils.compareBytes(val1.getBytes(), val1.getLength(), val2Ptr + len2, val2Len - len2);
}