Example usage for org.apache.commons.codec.binary Base64 encodeBase64URLSafeString

List of usage examples for org.apache.commons.codec.binary Base64 encodeBase64URLSafeString

Introduction

In this page you can find the example usage for org.apache.commons.codec.binary Base64 encodeBase64URLSafeString.

Prototype

public static String encodeBase64URLSafeString(final byte[] binaryData) 

Source Link

Document

Encodes binary data using a URL-safe variation of the base64 algorithm but does not chunk the output.

Usage

From source file:org.apache.abdera2.common.security.HashHelper.java

public static String sig(PrivateKey key, String alg, byte[] mat) {
    try {//  w w w.j a v  a 2s. co  m
        Signature sig = Signature.getInstance(alg);
        sig.initSign((PrivateKey) key);
        sig.update(mat);
        byte[] dat = sig.sign();
        return Base64.encodeBase64URLSafeString(dat);
    } catch (Throwable t) {
        throw ExceptionHelper.propogate(t);
    }
}

From source file:org.apache.abdera2.common.security.HashHelper.java

public static String hmac(Key key, String alg, byte[] mat) {
    try {//w w  w. j  av  a  2 s  . co  m
        Mac mac = Mac.getInstance(alg);
        mac.init(key);
        mac.update(mat, 0, mat.length);
        byte[] sig = mac.doFinal();
        return Base64.encodeBase64URLSafeString(sig);
    } catch (Throwable t) {
        throw ExceptionHelper.propogate(t);
    }
}

From source file:org.apache.ambari.view.hive.resources.jobs.Aggregator.java

protected JobImpl atsOnlyJob(HiveQueryId atsHiveQuery, TezDagId atsTezDag) {
    JobImpl atsJob = new JobImpl();
    atsJob.setId(atsHiveQuery.entity);/*from w w  w. j ava  2  s  .  com*/
    fillAtsJobFields(atsJob, atsHiveQuery, atsTezDag);

    String query = atsHiveQuery.query;
    atsJob.setTitle(query.substring(0, (query.length() > 42) ? 42 : query.length()));

    atsJob.setQueryFile("fakefile://" + Base64.encodeBase64URLSafeString(query.getBytes())); // fake queryFile
    return atsJob;
}

From source file:org.apache.ambari.view.hive.resources.jobs.Aggregator.java

protected static String hexStringToUrlSafeBase64(String hexString) {
    byte[] decoded = new byte[hexString.length() / 2];

    for (int i = 0; i < hexString.length(); i += 2) {
        decoded[i / 2] = (byte) Integer
                .parseInt(String.format("%c%c", hexString.charAt(i), hexString.charAt(i + 1)), 16);
    }//w ww.  j  a v  a  2s  .  c  o  m
    return Base64.encodeBase64URLSafeString(decoded);
}

From source file:org.apache.cloudstack.framework.jobs.impl.JobSerializerHelper.java

public static String toObjectSerializedString(Serializable object) {
    assert (object != null);

    ByteArrayOutputStream bs = new ByteArrayOutputStream();
    try {//from www  .  j  a  v  a  2 s. com
        ObjectOutputStream os = new ObjectOutputStream(bs);
        os.writeObject(object);
        os.close();
        bs.close();

        return Base64.encodeBase64URLSafeString(bs.toByteArray());
    } catch (IOException e) {
        throw new CloudRuntimeException("Unable to serialize: " + object, e);
    }
}

From source file:org.apache.cloudstack.framework.security.keys.KeysManagerImpl.java

private static String getBase64EncodedRandomKey(int nBits) {
    SecureRandom random;// w ww  .  j  av a2 s .  c  o m
    try {
        random = SecureRandom.getInstance("SHA1PRNG");
        byte[] keyBytes = new byte[nBits / 8];
        random.nextBytes(keyBytes);
        return Base64.encodeBase64URLSafeString(keyBytes);
    } catch (NoSuchAlgorithmException e) {
        s_logger.error("Unhandled exception: ", e);
    }
    return null;
}

From source file:org.apache.crunch.types.avro.Avros.java

public static PType<Union> unionOf(PType<?>... ptypes) {
    List<Schema> schemas = Lists.newArrayList();
    MessageDigest md;//from  w  w w  . ja v  a  2s  . c o  m
    try {
        md = MessageDigest.getInstance("MD5");
    } catch (NoSuchAlgorithmException e) {
        throw new RuntimeException(e);
    }
    for (int i = 0; i < ptypes.length; i++) {
        AvroType atype = (AvroType) ptypes[i];
        Schema schema = atype.getSchema();
        if (!schemas.contains(schema)) {
            schemas.add(schema);
            md.update(schema.toString().getBytes(Charsets.UTF_8));
        }
    }
    List<Schema.Field> fields = Lists.newArrayList(new Schema.Field("index", Schema.create(Type.INT), "", null),
            new Schema.Field("value", Schema.createUnion(schemas), "", null));

    String schemaName = "union" + Base64.encodeBase64URLSafeString(md.digest()).replace('-', 'x');
    Schema schema = Schema.createRecord(schemaName, "", "crunch", false);
    schema.setFields(fields);
    return new AvroType<Union>(Union.class, schema, new UnionRecordToTuple(ptypes),
            new TupleToUnionRecord(schema, ptypes), new UnionDeepCopier(ptypes), null, ptypes);
}

From source file:org.apache.crunch.types.avro.Avros.java

private static Schema createTupleSchema(String tupleName, String[] fieldNames, PType<?>[] ptypes)
        throws RuntimeException {
    // Guarantee each tuple schema has a globally unique name
    List<Schema.Field> fields = Lists.newArrayList();
    MessageDigest md;/*from  w  w  w.j  a v a  2s. c  om*/
    try {
        md = MessageDigest.getInstance("MD5");
    } catch (NoSuchAlgorithmException e) {
        throw new RuntimeException(e);
    }
    for (int i = 0; i < ptypes.length; i++) {
        AvroType atype = (AvroType) ptypes[i];
        Schema fieldSchema = allowNulls(atype.getSchema());
        fields.add(new Schema.Field(fieldNames[i], fieldSchema, "", null));
        md.update(fieldNames[i].getBytes(Charsets.UTF_8));
        md.update(fieldSchema.toString().getBytes(Charsets.UTF_8));
    }
    String schemaName, schemaNamespace;
    if (tupleName.isEmpty()) {
        schemaName = "tuple" + Base64.encodeBase64URLSafeString(md.digest()).replace('-', 'x');
        schemaNamespace = "crunch";
    } else {
        int splitIndex = tupleName.lastIndexOf('.');
        if (splitIndex == -1) {
            schemaName = tupleName;
            schemaNamespace = "crunch";
        } else {
            schemaName = tupleName.substring(splitIndex + 1);
            schemaNamespace = tupleName.substring(0, splitIndex);
        }
    }
    Schema schema = Schema.createRecord(schemaName, "", schemaNamespace, false);
    schema.setFields(fields);
    return schema;
}

From source file:org.apache.falcon.hive.util.EventSourcerUtils.java

public static void updateEventMetadata(ReplicationEventMetadata data, final String dbName,
        final String tableName, final String srcFilename, final String tgtFilename) {
    if (data == null || data.getEventFileMetadata() == null) {
        return;//from   w  ww.  j a  va  2 s.com
    }
    StringBuilder key = new StringBuilder();

    if (StringUtils.isNotEmpty(dbName)) {
        key.append(Base64.encodeBase64URLSafeString(dbName.toLowerCase().getBytes()));
    }
    key.append(DelimiterUtils.FIELD_DELIM);
    if (StringUtils.isNotEmpty(tableName)) {
        key.append(Base64.encodeBase64URLSafeString(tableName.toLowerCase().getBytes()));
    }

    StringBuilder value = new StringBuilder();
    if (StringUtils.isNotEmpty(srcFilename)) {
        value.append(srcFilename);
    }
    value.append(DelimiterUtils.FIELD_DELIM);

    if (StringUtils.isNotEmpty(tgtFilename)) {
        value.append(tgtFilename);
    }

    data.getEventFileMetadata().put(key.toString(), value.toString());
}

From source file:org.apache.giraph.rexster.utils.RexsterUtils.java

/**
 * Provide the Authentication string used for the HTTP connection with
 * Rexster.//from  w  w w  .  j  av  a2 s. c om
 *
 * @param  username   username to connect to HTTP
 * @param  password   password to connect to HTTP
 * @return String     the authentication string
 */
private static String getHTTPAuthString(String username, String password) {

    if (username.isEmpty()) {
        return null;
    } else {
        return "Basic " + Base64
                .encodeBase64URLSafeString((username + ":" + password).getBytes(Charset.forName("UTF-8")));
    }
}