Example usage for org.apache.hadoop.io Text getBytes

List of usage examples for org.apache.hadoop.io Text getBytes

Introduction

In this page you can find the example usage for org.apache.hadoop.io Text getBytes.

Prototype

@Override
public byte[] getBytes() 

Source Link

Document

Returns the raw bytes; however, only data up to #getLength() is valid.

Usage

From source file:org.teiid.translator.accumulo.AccumuloQueryExecution.java

License:Open Source License

private byte[] buildValue(String pattern, Text cq, Value value) {
    if (pattern == null) {
        return value.get();
    }//from   ww  w  . jav  a  2  s . co m
    pattern = pattern.substring(1, pattern.length() - 1); // remove the curleys
    if (pattern.equals(AccumuloMetadataProcessor.ValueIn.VALUE.name())) {
        return value.get();
    } else if (pattern.equals(AccumuloMetadataProcessor.ValueIn.CQ.name())) {
        return cq.getBytes();
    }
    return null;
}

From source file:org.teiid.translator.accumulo.AccumuloUpdateExecution.java

License:Open Source License

private void performUpdate(Update update)
        throws TranslatorException, TableNotFoundException, MutationsRejectedException {
    Table table = update.getTable().getMetadataObject();

    if (update.getParameterValues() != null) {
        throw new TranslatorException(AccumuloPlugin.Event.TEIID19005,
                AccumuloPlugin.Util.gs(AccumuloPlugin.Event.TEIID19005));
    }/*from w w w.  java  2 s .  c  om*/

    AccumuloQueryVisitor visitor = new AccumuloQueryVisitor(this.aef);
    visitor.visitNode(update.getWhere());
    if (!visitor.exceptions.isEmpty()) {
        throw visitor.exceptions.get(0);
    }

    Connector connector = this.connection.getInstance();
    BatchWriter writer = createBatchWriter(table, connector);

    Text prevRow = null;
    Iterator<Entry<Key, Value>> results = AccumuloQueryExecution.runQuery(this.aef,
            this.connection.getInstance(), this.connection.getAuthorizations(), visitor.getRanges(), table,
            visitor.scanIterators());
    while (results.hasNext()) {
        Key key = results.next().getKey();
        Text rowId = key.getRow();

        if (prevRow == null || !prevRow.equals(rowId)) {
            prevRow = rowId;
            List<SetClause> changes = update.getChanges();
            for (SetClause clause : changes) {
                Column column = clause.getSymbol().getMetadataObject();
                if (SQLStringVisitor.getRecordName(column).equalsIgnoreCase(AccumuloMetadataProcessor.ROWID)) {
                    throw new TranslatorException(AccumuloPlugin.Event.TEIID19002,
                            AccumuloPlugin.Util.gs(AccumuloPlugin.Event.TEIID19002, table.getName()));
                }
                Expression value = clause.getValue();
                if (value instanceof Literal) {
                    Mutation mutation = buildMutation(rowId.getBytes(), column, ((Literal) value).getValue());
                    writer.addMutation(mutation);
                } else {
                    throw new TranslatorException(AccumuloPlugin.Event.TEIID19001,
                            AccumuloPlugin.Util.gs(AccumuloPlugin.Event.TEIID19001));
                }
            }
            this.updateCount++;
        }
    }
    writer.close();
}

From source file:org.trend.hgraph.mapreduce.lib.input.CalculateInputSplitReducer.java

License:Apache License

@Override
protected void reduce(Text regionKey, Iterable<Text> values, Context context)
        throws IOException, InterruptedException {
    String regionName = Bytes.toString(regionKey.getBytes()).trim();
    List<String> rowKeys = new ArrayList<String>();
    String row = null;/*from  w w w  .  j av  a  2s. c  o m*/
    HRegionLocation location = null;
    int count = 0;

    System.out.println("start to processing region:" + regionName);
    for (Text rowKey : values) {
        row = getKeyString(rowKey.getBytes());
        LOGGER.debug("row=" + row);
        rowKeys.add(row);
        if (count == 0) {
            location = vertexTable.getRegionLocation(rowKey.getBytes(), false);
        }
        count++;
    }

    if (mappersForOneRegion > count) {
        throw new IllegalArgumentException(MAPPERS_FOR_ONE_REGION + " shall not bigger than total count:"
                + count + " for region:" + regionName);
    }

    int baseBuckets = count / mappersForOneRegion;
    int extraBuckets = count % mappersForOneRegion;
    if (baseBuckets < 2) {
        throw new IllegalStateException("baseBuckets:" + baseBuckets
                + " shall bigger than 2, otherwise it will make one or more pairs with duplicate of start/end rowKeys");
    }

    int[] bucketsForEachMapper = new int[mappersForOneRegion];
    for (int a = bucketsForEachMapper.length - 1; a >= 0; a--) {
        bucketsForEachMapper[a] = baseBuckets;
        if (extraBuckets > 0) {
            bucketsForEachMapper[a] = bucketsForEachMapper[a] + 1;
            extraBuckets--;
        }
    }

    System.out.println("bucketsForEachMapper=" + Arrays.toString(bucketsForEachMapper));
    int buckets = 0;
    int idx = 0;
    String startRowKey = null;
    String endRowKey = null;
    byte[] endKey = location.getRegionInfo().getEndKey();
    for (int a = 0; a < bucketsForEachMapper.length; a++) {
        buckets = bucketsForEachMapper[a];
        startRowKey = rowKeys.get(idx);
        if ((a + 1) == bucketsForEachMapper.length) {
            if (!Arrays.equals(HConstants.EMPTY_END_ROW, endKey)) {
                endRowKey = Bytes.toString(endKey);
            } else {
                idx = idx + buckets - 1;
                endRowKey = rowKeys.get(idx);
            }
        } else {
            idx = idx + buckets;
            endRowKey = rowKeys.get(idx);
        }

        // write one regionName, startRowKey and endRowKey pair
        context.write(new Text(regionName + DELIMITER + startRowKey + DELIMITER + endRowKey),
                NullWritable.get());
        context.getCounter(Counters.ROW_COUNT).increment(1L);
    }
    System.out.println("processing region:" + regionName + " compeleted");
    // do housekeeping
    rowKeys.clear();
    rowKeys = null;
}

From source file:org.trend.hgraph.mapreduce.pagerank.CalculateIntermediatePageRankMapper.java

License:Apache License

@Override
protected void map(final Text key, final DoubleWritable value, final Context context)
        throws IOException, InterruptedException {
    String rowKey = Bytes.toString(key.getBytes()).trim();
    double pageRank = value.get();
    // write current pageRank to tmp
    Utils.writePageRank(vertexTable, rowKey, tmpPageRankCq, pageRank);

    Configuration conf = context.getConfiguration();
    List<String> outgoingRowKeys = null;

    context.getCounter(Counters.VERTEX_COUNT).increment(1);
    outgoingRowKeys = getOutgoingRowKeys(conf, vertexTable, edgeTable, rowKey,
            context.getCounter(Counters.GET_OUTGOING_VERTICES_TIME_CONSUMED));
    dispatchPageRank(outgoingRowKeys, pageRank, conf, edgeTable,
            context.getCounter(Counters.DISPATCH_PR_TIME_CONSUMED),
            context.getCounter(Counters.OUTGOING_EDGE_COUNT), new ContextWriterStrategy() {
                @Override// w  ww .  j  a v a  2  s  . c o  m
                public void write(String key, double value) throws IOException, InterruptedException {
                    context.write(new Text(key), new DoubleWritable(value));
                }
            });
}

From source file:org.trend.hgraph.mapreduce.pagerank.CalculatePageRankReducer.java

License:Apache License

@Override
protected void reduce(Text key, Iterable<DoubleWritable> incomingPageRanks, Context context)
        throws IOException, InterruptedException {

    String rowkey = Bytes.toString(key.getBytes()).trim();
    double incomingPageRankSum = 0.0D;
    StopWatch sw = new StopWatch();
    sw.start();//w ww.  j  a  v  a 2  s.com
    for (DoubleWritable incomingPageRank : incomingPageRanks) {
        incomingPageRankSum = incomingPageRankSum + incomingPageRank.get();
    }
    // calculate new pageRank here
    double newPageRank = (dampingFactor * incomingPageRankSum) + ((1.0D - dampingFactor) / verticesTotalCnt);
    sw.stop();
    context.getCounter(Counters.CAL_NEW_PR_TIME_CONSUMED).increment(sw.getTime());

    sw.reset();
    sw.start();
    double oldPageRank = Utils.getPageRank(vertexTable, rowkey, Constants.PAGE_RANK_CQ_TMP_NAME);
    if (!pageRankEquals(oldPageRank, newPageRank, pageRankCompareScale)) {
        // collect pageRank changing count with counter
        context.getCounter(Counters.CHANGED_PAGE_RANK_COUNT).increment(1);
    }
    sw.stop();
    context.getCounter(Counters.CMP_OLD_NEW_PR_TIME_CONSUMED).increment(sw.getTime());

    context.write(key, new DoubleWritable(newPageRank));
}

From source file:org.ujmp.hadoop.HadoopMap.java

License:Open Source License

public V get(Object key) {
    try {//www .  j a  v  a  2s . c o  m
        prepareReader();
        Text k = new Text(SerializationUtil.serialize((Serializable) key));
        Text v = new Text();
        Text t = (Text) reader.get(k, v);
        if (t == null || t.getBytes() == null || t.getBytes().length == 0) {
            return null;
        }
        return (V) SerializationUtil.deserialize(t.getBytes());
    } catch (Exception e) {
        throw new RuntimeException("could not get value for key: " + key, e);
    }
}

From source file:parquet.hive.serde.primitive.ParquetStringInspector.java

License:Apache License

@Override
public Object set(final Object o, final Text text) {
    return new BinaryWritable(text == null ? null : Binary.fromByteArray(text.getBytes()));
}

From source file:test.KeyValueTextOutputFormat.java

License:Apache License

/**
 * create the final out file, and output row by row. After one row is
 * appended, a configured row separator is appended
 * /*from   w  w  w .j a  v a2  s . co m*/
 * @param jc
 *          the job configuration file
 * @param outPath
 *          the final output file to be created
 * @param valueClass
 *          the value class used for create
 * @param isCompressed
 *          whether the content is compressed or not
 * @param tableProperties
 *          the tableProperties of this file's corresponding table
 * @param progress
 *          progress used for status report
 * @return the RecordWriter
 */
@Override
public RecordWriter getHiveRecordWriter(JobConf jc, Path outPath, Class<? extends Writable> valueClass,
        boolean isCompressed, Properties tableProperties, Progressable progress) throws IOException {
    int rowSeparator = 0;
    String rowSeparatorString = tableProperties.getProperty(Constants.LINE_DELIM, "\n");
    try {
        rowSeparator = Byte.parseByte(rowSeparatorString);
    } catch (NumberFormatException e) {
        rowSeparator = rowSeparatorString.charAt(0);
    }

    final int finalRowSeparator = rowSeparator;
    FileSystem fs = outPath.getFileSystem(jc);
    final OutputStream outStream = Utilities.createCompressedStream(jc, fs.create(outPath), isCompressed);
    final byte[] key = "key".getBytes();
    final byte[] split = "\t".getBytes();
    return new RecordWriter() {
        public void write(Writable r) throws IOException {
            if (r instanceof Text) {
                Text tr = (Text) r;
                outStream.write(key);
                outStream.write(split);
                outStream.write(tr.getBytes(), 0, tr.getLength());
                outStream.write(finalRowSeparator);
            } else {
                // DynamicSerDe always writes out BytesWritable
                BytesWritable bw = (BytesWritable) r;
                outStream.write(bw.get(), 0, bw.getSize());
                outStream.write(finalRowSeparator);
            }
        }

        public void close(boolean abort) throws IOException {
            outStream.close();
        }
    };
}

From source file:tv.icntv.grade.film.dbcollect.db.TableDBReducer.java

License:Apache License

@Override
protected void reduce(Text key, Iterable<Text> values, Context context)
        throws IOException, InterruptedException {
    if (null == key) {
        return;//from   w w w .j a v  a 2 s  .co  m
    }
    List<FilmMsg> list = loadThread.execute(new DBLoad(new String[] { key.toString() }, key.toString()));
    if (null == list || list.isEmpty()) {
        return;
    }

    Put put = new Put(key.getBytes());
    put.add(Bytes.toBytes("base"), Bytes.toBytes("msg"), Bytes.toBytes(JSON.toJSONString(list)));
    context.write(new ImmutableBytesWritable(key.getBytes()), put);
}

From source file:tv.icntv.log.stb.cdnserver.CdnServerParserMaper.java

License:CDDL license

public static Text transformTextToUTF8(Text text, String encoding) {
    String value = null;/*from  w  w w . jav a  2  s .c om*/
    try {
        value = new String(text.getBytes(), 0, text.getLength(), encoding);
    } catch (UnsupportedEncodingException e) {
        e.printStackTrace();
    }
    return new Text(value);
}