Example usage for org.apache.hadoop.util StringUtils hexStringToByte

List of usage examples for org.apache.hadoop.util StringUtils hexStringToByte

Introduction

In this page you can find the example usage for org.apache.hadoop.util StringUtils hexStringToByte.

Prototype

public static byte[] hexStringToByte(String hex) 

Source Link

Document

Given a hexstring this will return the byte array corresponding to the string

Usage

From source file:cascalog.TupleMemoryInputFormat.java

License:Open Source License

public static Object getObject(JobConf conf, String key) {
    String s = conf.get(key);/* w  w  w. ja  v  a2 s  .c  o m*/
    if (s == null)
        return null;
    byte[] val = StringUtils.hexStringToByte(s);
    return deserialize(val);
}

From source file:ch.cern.db.hdfs.DistributedFileSystemMetadata.java

License:GNU General Public License

/**
 * Returns a disk id (0-based) index from the Hdfs VolumeId object. There is
 * currently no public API to get at the volume id. We'll have to get it by
 * accessing the internals./*from  w w w  .  j av a  2  s  .  c o m*/
 */
public static int getDiskId(VolumeId hdfsVolumeId) {
    // Initialize the diskId as -1 to indicate it is unknown
    int diskId = -1;

    if (hdfsVolumeId != null) {
        String volumeIdString = hdfsVolumeId.toString();

        byte[] volumeIdBytes = StringUtils.hexStringToByte(volumeIdString);
        if (volumeIdBytes != null && volumeIdBytes.length == 4) {
            diskId = Utils.toInt(volumeIdBytes);
        } else if (volumeIdBytes.length == 1) {
            diskId = (int) volumeIdBytes[0]; // support hadoop-2.0.2
        }
    }

    return diskId;
}

From source file:com.bigstep.datalake.JsonUtil.java

License:Apache License

/** Convert a Json map to a MD5MD5CRC32FileChecksum. */
public static MD5MD5CRC32FileChecksum toMD5MD5CRC32FileChecksum(final Map<?, ?> json) throws IOException {
    if (json == null) {
        return null;
    }/*from   w  w w.j a  va  2  s. co  m*/

    final Map<?, ?> m = (Map<?, ?>) json.get(FileChecksum.class.getSimpleName());
    final String algorithm = (String) m.get("algorithm");
    final int length = ((Number) m.get("length")).intValue();
    final byte[] bytes = StringUtils.hexStringToByte((String) m.get("bytes"));

    final DataInputStream in = new DataInputStream(new ByteArrayInputStream(bytes));
    final DataChecksum.Type crcType = MD5MD5CRC32FileChecksum.getCrcTypeFromAlgorithmName(algorithm);
    final MD5MD5CRC32FileChecksum checksum;

    // Recreate what DFSClient would have returned.
    switch (crcType) {
    case CRC32:
        checksum = new MD5MD5CRC32GzipFileChecksum();
        break;
    case CRC32C:
        checksum = new MD5MD5CRC32CastagnoliFileChecksum();
        break;
    default:
        throw new IOException("Unknown algorithm: " + algorithm);
    }
    checksum.readFields(in);

    //check algorithm name
    if (!checksum.getAlgorithmName().equals(algorithm)) {
        throw new IOException(
                "Algorithm not matched. Expected " + algorithm + ", Received " + checksum.getAlgorithmName());
    }
    //check length
    if (length != checksum.getLength()) {
        throw new IOException(
                "Length not matched: length=" + length + ", checksum.getLength()=" + checksum.getLength());
    }

    return checksum;
}

From source file:com.pigai.hadoop.HttpFSFileSystem.java

License:Apache License

@Override
public FileChecksum getFileChecksum(Path f) throws IOException {
    Map<String, String> params = new HashMap<String, String>();
    params.put(OP_PARAM, Operation.GETFILECHECKSUM.toString());
    HttpURLConnection conn = getConnection(Operation.GETFILECHECKSUM.getMethod(), params, f, true);
    validateResponse(conn, HttpURLConnection.HTTP_OK);
    final JSONObject json = (JSONObject) ((JSONObject) jsonParse(conn)).get(FILE_CHECKSUM_JSON);
    return new FileChecksum() {
        @Override/*ww w  .  ja v a  2  s  .  c  o m*/
        public String getAlgorithmName() {
            return (String) json.get(CHECKSUM_ALGORITHM_JSON);
        }

        @Override
        public int getLength() {
            return ((Long) json.get(CHECKSUM_LENGTH_JSON)).intValue();
        }

        @Override
        public byte[] getBytes() {
            return StringUtils.hexStringToByte((String) json.get(CHECKSUM_BYTES_JSON));
        }

        public void write(DataOutput out) throws IOException {
            throw new UnsupportedOperationException();
        }

        public void readFields(DataInput in) throws IOException {
            throw new UnsupportedOperationException();
        }
    };
}

From source file:com.vertica.hadoop.VerticaConfiguration.java

License:Apache License

/**
 * Return static input parameters if set
 * //from   w ww  .  ja  v a2  s  .c  o  m
 * @return Collection of list of objects representing input parameters
 * @throws IOException
 */
public Collection<List<Object>> getInputParameters() throws IOException {
    Collection<List<Object>> values = null;
    String[] query_params = conf.getStrings(QUERY_PARAMS_PROP);
    if (query_params != null) {
        values = new ArrayList<List<Object>>();
        for (String str_params : query_params) {
            DataInputBuffer in = new DataInputBuffer();
            in.reset(StringUtils.hexStringToByte(str_params), str_params.length());
            int sz = in.readInt();
            ArrayList<Object> params = new ArrayList<Object>();
            for (int count = 0; count < sz; count++) {
                int type = in.readInt();
                params.add(VerticaRecord.readField(type, in));
            }
            values.add(params);
        }
    }
    return values;
}

From source file:org.kiji.mapreduce.RMWBulkImporter.java

License:Apache License

/** {@inheritDoc} */
@Override/*from   w  w  w. j  av a  2s.c  o m*/
public void produce(LongWritable inputKey, Text value, KijiTableContext context) throws IOException {
    // Input line must be formatted as: "start-key:end-key".
    // The start and end keys are encoded in hexadecimal.
    final String line = value.toString();
    final String[] split = line.split(":", 2);
    Preconditions.checkState(split.length == 2,
            String.format("Unable to process input line specifying the row-key range: '%s'.", line));
    final String hexStartRowKey = split[0];
    final String hexEndRowKey = split[1];

    LOG.info("Scanning table '{}' from start-key: '{}' to end-key: '{}'.", mTable.getURI(), hexStartRowKey,
            hexEndRowKey);

    final EntityId startRowKey = HBaseEntityId.fromHBaseRowKey(StringUtils.hexStringToByte(hexStartRowKey));
    final EntityId endRowKey = HBaseEntityId.fromHBaseRowKey(StringUtils.hexStringToByte(hexEndRowKey));

    // This data request must be adapted to your table:
    final KijiDataRequest dataRequest = KijiDataRequest.builder()
            .addColumns(ColumnsDef.create().addFamily("info")).build();

    final KijiRowScanner scanner = mReader.getScanner(dataRequest,
            new KijiScannerOptions().setStartRow(startRowKey).setStopRow(endRowKey));
    try {
        for (KijiRowData inputRow : scanner) {
            LOG.debug("Processing input row with key '{}'.", inputRow.getEntityId());
            processInputRow(inputRow, context);

            // Report progress to the task tracker:
            // this is necessary to prevent the task tracker from killing the task by timeout.
            context.progress();
        }

    } finally {
        scanner.close();
    }
}