Example usage for org.apache.commons.codec.binary Base64 encodeBase64URLSafeString

List of usage examples for org.apache.commons.codec.binary Base64 encodeBase64URLSafeString

Introduction

In this page you can find the example usage for org.apache.commons.codec.binary Base64 encodeBase64URLSafeString.

Prototype

public static String encodeBase64URLSafeString(final byte[] binaryData) 

Source Link

Document

Encodes binary data using a URL-safe variation of the base64 algorithm but does not chunk the output.

Usage

From source file:org.apache.hadoop.crypto.key.kms.server.KMSServerJSONUtils.java

@SuppressWarnings("unchecked")
public static Map toJSON(KeyProvider.KeyVersion keyVersion) {
    Map json = new LinkedHashMap();
    if (keyVersion != null) {
        json.put(KMSRESTConstants.NAME_FIELD, keyVersion.getName());
        json.put(KMSRESTConstants.VERSION_NAME_FIELD, keyVersion.getVersionName());
        json.put(KMSRESTConstants.MATERIAL_FIELD, Base64.encodeBase64URLSafeString(keyVersion.getMaterial()));
    }//from  w w w  .j a va2 s.  co  m
    return json;
}

From source file:org.apache.hadoop.crypto.key.kms.server.KMSServerJSONUtils.java

@SuppressWarnings("unchecked")
public static Map toJSON(EncryptedKeyVersion encryptedKeyVersion) {
    Map json = new LinkedHashMap();
    if (encryptedKeyVersion != null) {
        json.put(KMSRESTConstants.VERSION_NAME_FIELD, encryptedKeyVersion.getEncryptionKeyVersionName());
        json.put(KMSRESTConstants.IV_FIELD,
                Base64.encodeBase64URLSafeString(encryptedKeyVersion.getEncryptedKeyIv()));
        json.put(KMSRESTConstants.ENCRYPTED_KEY_VERSION_FIELD,
                toJSON(encryptedKeyVersion.getEncryptedKeyVersion()));
    }//from   ww  w.  j av a 2s . co  m
    return json;
}

From source file:org.apache.hadoop.gateway.encrypturi.impl.EncryptUriProcessor.java

private String encode(String string) throws UnsupportedEncodingException {
    EncryptionResult result = cryptoService.encryptForCluster(clusterName, EncryptUriDescriptor.PASSWORD_ALIAS,
            string.getBytes("UTF-8"));
    string = Base64.encodeBase64URLSafeString(result.toByteAray());
    return string;
}

From source file:org.apache.hadoop.gateway.securequery.SecureQueryEncryptProcessor.java

private String encode(String string) throws UnsupportedEncodingException {
    EncryptionResult result = cryptoService.encryptForCluster(clusterName, "encryptQueryString",
            string.getBytes("UTF-8"));
    string = Base64.encodeBase64URLSafeString(result.toByteAray());
    return string;
}

From source file:org.apache.hadoop.gateway.services.registry.impl.DefaultServiceRegistryService.java

public String getRegistrationCode(String clusterName) {
    String code = generateRegCode(16);
    byte[] signature = crypto.sign("SHA256withRSA", "gateway-identity", code);
    String encodedSig = Base64.encodeBase64URLSafeString(signature);

    return code + "::" + encodedSig;
}

From source file:org.apache.hadoop.gateway.services.security.token.impl.JWTToken.java

public String getPayloadToSign() {
    StringBuffer sb = new StringBuffer();
    try {//from   w  w  w.ja  v  a  2  s .  c  o  m
        sb.append(Base64.encodeBase64URLSafeString(header.getBytes("UTF-8")));
        sb.append(".");
        sb.append(Base64.encodeBase64URLSafeString(claims.getBytes("UTF-8")));
    } catch (UnsupportedEncodingException e) {
        log.unsupportedEncoding(e);
    }

    return sb.toString();
}

From source file:org.apache.hadoop.gateway.services.security.token.impl.JWTToken.java

public String toString() {
    StringBuffer sb = new StringBuffer();
    try {/*from w  ww.  ja v  a 2 s.c o m*/
        sb.append(Base64.encodeBase64URLSafeString(header.getBytes("UTF-8")));
        sb.append(".");
        sb.append(Base64.encodeBase64URLSafeString(claims.getBytes("UTF-8")));
        sb.append(".");
        sb.append(Base64.encodeBase64URLSafeString(payload));
    } catch (UnsupportedEncodingException e) {
        log.unsupportedEncoding(e);
    }

    log.renderingJWTTokenForTheWire(sb.toString());

    return sb.toString();
}

From source file:org.apache.hadoop.hive.metastore.hbase.HBaseReadWrite.java

private String printOnePartition(Result result) throws IOException, TException {
    byte[] key = result.getRow();
    HBaseUtils.StorageDescriptorParts sdParts = HBaseUtils.deserializePartition(key,
            result.getValue(CATALOG_CF, CATALOG_COL), this);
    StringBuilder builder = new StringBuilder();
    builder.append(dumpThriftObject(sdParts.containingPartition)).append(" sdHash: ")
            .append(Base64.encodeBase64URLSafeString(sdParts.sdHash)).append(" stats:");
    NavigableMap<byte[], byte[]> statsCols = result.getFamilyMap(STATS_CF);
    for (Map.Entry<byte[], byte[]> statsCol : statsCols.entrySet()) {
        builder.append(" column ").append(new String(statsCol.getKey(), HBaseUtils.ENCODING)).append(": ");
        ColumnStatistics pcs = buildColStats(key, false);
        ColumnStatisticsObj cso = HBaseUtils.deserializeStatsForOneColumn(pcs, statsCol.getValue());
        builder.append(dumpThriftObject(cso));
    }//from   ww w . j  av a2 s .  c om
    return builder.toString();
}

From source file:org.apache.hadoop.hive.metastore.hbase.HBaseReadWrite.java

private String printOneTable(Result result) throws IOException, TException {
    byte[] key = result.getRow();
    HBaseUtils.StorageDescriptorParts sdParts = HBaseUtils.deserializeTable(key,
            result.getValue(CATALOG_CF, CATALOG_COL));
    StringBuilder builder = new StringBuilder();
    builder.append(dumpThriftObject(sdParts.containingTable)).append(" sdHash: ")
            .append(Base64.encodeBase64URLSafeString(sdParts.sdHash)).append(" stats:");
    NavigableMap<byte[], byte[]> statsCols = result.getFamilyMap(STATS_CF);
    for (Map.Entry<byte[], byte[]> statsCol : statsCols.entrySet()) {
        builder.append(" column ").append(new String(statsCol.getKey(), HBaseUtils.ENCODING)).append(": ");
        ColumnStatistics pcs = buildColStats(key, true);
        ColumnStatisticsObj cso = HBaseUtils.deserializeStatsForOneColumn(pcs, statsCol.getValue());
        builder.append(dumpThriftObject(cso));
    }/*from  w  w w .  j  ava  2s.co m*/
    // Add the primary key
    List<SQLPrimaryKey> pk = getPrimaryKey(sdParts.containingTable.getDbName(),
            sdParts.containingTable.getTableName());
    if (pk != null && pk.size() > 0) {
        builder.append(" primary key: ");
        for (SQLPrimaryKey pkcol : pk)
            builder.append(dumpThriftObject(pkcol));
    }

    // Add any foreign keys
    List<SQLForeignKey> fks = getForeignKeys(sdParts.containingTable.getDbName(),
            sdParts.containingTable.getTableName());
    if (fks != null && fks.size() > 0) {
        builder.append(" foreign keys: ");
        for (SQLForeignKey fkcol : fks)
            builder.append(dumpThriftObject(fkcol));

    }
    return builder.toString();
}

From source file:org.apache.hadoop.hive.metastore.hbase.HBaseReadWrite.java

/**
 * Print out a storage descriptor./*from  w  w w .  j a v  a2 s  . co  m*/
 * @param hash hash that is the key of the storage descriptor
 * @return string version of the storage descriptor
 */
String printStorageDescriptor(byte[] hash) throws IOException, TException {
    byte[] serialized = read(SD_TABLE, hash, CATALOG_CF, CATALOG_COL);
    if (serialized == null)
        return noSuch(Base64.encodeBase64URLSafeString(hash), "storage descriptor");
    return dumpThriftObject(HBaseUtils.deserializeStorageDescriptor(serialized));
}