Example usage for org.apache.commons.codec.binary Hex encodeHex

List of usage examples for org.apache.commons.codec.binary Hex encodeHex

Introduction

In this page you can find the example usage for org.apache.commons.codec.binary Hex encodeHex.

Prototype

public static char[] encodeHex(byte[] data) 

Source Link

Document

Converts an array of bytes into an array of characters representing the hexadecimal values of each byte in order.

Usage

From source file:jenkins.bouncycastle.api.PEMEncodable.java

/**
 * Encode {@link byte[]} in hex formated string "ab:cd:ef:...:12"
 * // w w w  . j ava  2  s  . c  o  m
 * @param data to be encoded
 * @return hex formated string "ab:cd:ef:...:12"
 */
@Nonnull
private static String hexEncode(@Nonnull byte[] data) {
    char[] hex = Hex.encodeHex(data);
    StringBuilder buf = new StringBuilder(hex.length + Math.max(0, hex.length / 2 - 1));
    for (int i = 0; i < hex.length; i += 2) {
        if (i > 0) {
            buf.append(':');
        }
        buf.append(hex, i, 2);
    }
    return buf.toString();
}

From source file:com.zimbra.common.util.ByteUtil.java

/**
 * Returns the digest of the supplied data.
 * @param algorithm e.g. "SHA1"//from   ww w  .j a va2s.  c o  m
 * @param data data to digest
 * @param base64 if <tt>true</tt>, return as base64 String, otherwise return
 *  as hex string.
 * @return hex or base64 string
 */
public static String getDigest(String algorithm, byte[] data, boolean base64) {
    try {
        MessageDigest md = MessageDigest.getInstance(algorithm);
        byte[] digest = md.digest(data);
        if (base64)
            return encodeFSSafeBase64(digest);
        else
            return new String(Hex.encodeHex(digest));
    } catch (NoSuchAlgorithmException e) {
        // this should never happen unless the JDK is foobar
        //   e.printStackTrace();
        throw new RuntimeException(e);
    }
}

From source file:de.innovationgate.webgate.api.jdbc.filehandling.CS41FileAttachmentHandler.java

public void saveFileData(AttachFileOperation<CS41FileAttachmentEntity> op) throws WGAPIException {

    try {/*  w  w  w .  j  av  a  2 s  .c  om*/
        if (!op.isUpdateData()) {
            return;
        }

        // create digest for checksum computation
        MessageDigest digest = null;
        try {
            digest = MessageDigest.getInstance("MD5");
        } catch (NoSuchAlgorithmException e) {
            // ignore digest creation
        }

        Session session = _handling.getParent().getSession();

        CS41FileAttachmentEntity fileMeta = op.getEntity();
        InputStream in = new BufferedInputStream(new FileInputStream(fileMeta.getSourceFile()));
        // store file data
        // split up file in parts with 64K end store each part                          
        int partnr = 0;
        byte[] buffer = new byte[CS41FileAttachmentHandler.ATTACHMENT_FILEPART_SIZE];
        int len = in.read(buffer);

        while (len > 0) {
            // create new file part
            AttachmentFilePart part = _entityDescriptor.createFilePart(fileMeta);
            part.setPartnr(partnr);

            Blob data = Hibernate.getLobCreator(session).createBlob(new ByteArrayInputStream(buffer, 0, len),
                    len);
            part.setData(data);
            // store file part
            session.save(part);
            session.flush();
            session.evict(part);
            // update md5 digest
            if (digest != null) {
                digest.update(buffer, 0, len);
            }
            // read next part from inputstream
            partnr++;
            len = in.read(buffer);
        }

        // store md5 sum as meta
        if (digest != null) {
            fileMeta.setChecksum(new String(Hex.encodeHex(digest.digest())));
            if (_handling.getParent().isSaveIsolationActive()) {
                session.update(fileMeta);
            }
        }
    } catch (Exception e) {
        throw new WGBackendException("Exception storing file data", e);
    }

}

From source file:com.zimbra.cs.account.ZimbraAuthToken.java

@Override
public String getEncoded() throws AuthTokenException {
    if (encoded == null) {
        StringBuilder encodedBuff = new StringBuilder(64);
        BlobMetaData.encodeMetaData(C_ID, accountId, encodedBuff);
        BlobMetaData.encodeMetaData(C_EXP, Long.toString(expires), encodedBuff);
        if (adminAccountId != null) {
            BlobMetaData.encodeMetaData(C_AID, adminAccountId, encodedBuff);
        }/*ww  w .  j  av a  2 s.  c o m*/
        if (isAdmin) {
            BlobMetaData.encodeMetaData(C_ADMIN, "1", encodedBuff);
        }
        if (isDomainAdmin) {
            BlobMetaData.encodeMetaData(C_DOMAIN, "1", encodedBuff);
        }
        if (isDelegatedAdmin) {
            BlobMetaData.encodeMetaData(C_DLGADMIN, "1", encodedBuff);
        }
        if (validityValue != -1) {
            BlobMetaData.encodeMetaData(C_VALIDITY_VALUE, validityValue, encodedBuff);
        }
        BlobMetaData.encodeMetaData(C_TYPE, type, encodedBuff);

        if (authMech != null) {
            BlobMetaData.encodeMetaData(C_AUTH_MECH, authMech.name(), encodedBuff);
        }

        if (usage != null) {
            BlobMetaData.encodeMetaData(C_USAGE, usage.getCode(), encodedBuff);
        }
        BlobMetaData.encodeMetaData(C_TOKEN_ID, tokenID, encodedBuff);
        BlobMetaData.encodeMetaData(C_EXTERNAL_USER_EMAIL, externalUserEmail, encodedBuff);
        BlobMetaData.encodeMetaData(C_DIGEST, digest, encodedBuff);
        BlobMetaData.encodeMetaData(C_SERVER_VERSION, server_version, encodedBuff);
        if (this.csrfTokenEnabled) {
            BlobMetaData.encodeMetaData(C_CSRF, "1", encodedBuff);
        }

        String data = new String(Hex.encodeHex(encodedBuff.toString().getBytes()));
        AuthTokenKey key = getCurrentKey();
        String hmac = TokenUtil.getHmac(data, key.getKey());
        encoded = key.getVersion() + "_" + hmac + "_" + data;
    }
    return encoded;
}

From source file:edu.harvard.iq.dvn.ingest.statdataio.impl.plugins.por.PORFileReader.java

private File decodeHeader(BufferedInputStream stream) throws IOException {
    File tempPORfile = null;/*from   w  ww  .  j ava  2  s.c  o m*/

    if (stream == null) {
        throw new IllegalArgumentException("file == null!");
    }

    byte[] headerByes = new byte[POR_HEADER_SIZE];

    if (stream.markSupported()) {
        stream.mark(1000);
    }
    int nbytes = stream.read(headerByes, 0, POR_HEADER_SIZE);

    //printHexDump(headerByes, "hex dump of the byte-array");

    if (nbytes == 0) {
        throw new IOException("decodeHeader: reading failure");
    } else if (nbytes < 491) {
        // Size test: by defnition, it must have at least
        // 491-byte header, i.e., the file size less than this threshold
        // is not a POR file
        dbgLog.fine("this file is NOT spss-por type");
        throw new IllegalArgumentException("file is not spss-por type");
    }
    // rewind the current reading position back to the beginning
    if (stream.markSupported()) {
        stream.reset();
    }

    // line-terminating characters are usually one or two by defnition
    // however, a POR file saved by a genuine SPSS for Windows
    // had a three-character line terminator, i.e., failed to remove the
    // original file's one-character terminator when it was opened, and
    // saved it with the default two-character terminator without
    // removing original terminators. So we have to expect such a rare
    // case
    //
    // terminator
    // windows [0D0A]=>   [1310] = [CR/LF]
    // unix    [0A]  =>   [10]
    // mac     [0D]  =>   [13]
    // 3char  [0D0D0A]=> [131310] spss for windows rel 15
    //
    // terminating characters should be found at the following
    //                             column positions[counting from 0]:
    // unix    case: [0A]   : [80], [161], [242], [323], [404], [485]
    // windows case: [0D0A] : [81], [163], [245], [327], [409], [491]
    //           : [0D0D0A] : [82], [165], [248], [331], [414], [495]

    // convert b into a ByteBuffer

    ByteBuffer buff = ByteBuffer.wrap(headerByes);
    byte[] nlch = new byte[36];
    int pos1;
    int pos2;
    int pos3;
    int ucase = 0;
    int wcase = 0;
    int mcase = 0;
    int three = 0;
    int nolines = 6;
    int nocols = 80;
    for (int i = 0; i < nolines; ++i) {
        int baseBias = nocols * (i + 1);
        // 1-char case
        pos1 = baseBias + i;
        buff.position(pos1);
        dbgLog.finer("\tposition(1)=" + buff.position());
        int j = 6 * i;
        nlch[j] = buff.get();

        if (nlch[j] == 10) {
            ucase++;
        } else if (nlch[j] == 13) {
            mcase++;
        }

        // 2-char case
        pos2 = baseBias + 2 * i;
        buff.position(pos2);
        dbgLog.finer("\tposition(2)=" + buff.position());

        nlch[j + 1] = buff.get();
        nlch[j + 2] = buff.get();

        // 3-char case
        pos3 = baseBias + 3 * i;
        buff.position(pos3);
        dbgLog.finer("\tposition(3)=" + buff.position());

        nlch[j + 3] = buff.get();
        nlch[j + 4] = buff.get();
        nlch[j + 5] = buff.get();

        dbgLog.finer(i + "-th iteration position =" + nlch[j] + "\t" + nlch[j + 1] + "\t" + nlch[j + 2]);
        dbgLog.finer(i + "-th iteration position =" + nlch[j + 3] + "\t" + nlch[j + 4] + "\t" + nlch[j + 5]);

        if ((nlch[j + 3] == 13) && (nlch[j + 4] == 13) && (nlch[j + 5] == 10)) {
            three++;
        } else if ((nlch[j + 1] == 13) && (nlch[j + 2] == 10)) {
            wcase++;
        }

        buff.rewind();
    }

    boolean windowsNewLine = true;
    if (three == nolines) {
        windowsNewLine = false; // lineTerminator = "0D0D0A"
    } else if ((ucase == nolines) && (wcase < nolines)) {
        windowsNewLine = false; // lineTerminator = "0A"
    } else if ((ucase < nolines) && (wcase == nolines)) {
        windowsNewLine = true; //lineTerminator = "0D0A"
    } else if ((mcase == nolines) && (wcase < nolines)) {
        windowsNewLine = false; //lineTerminator = "0D"
    }

    buff.rewind();
    int PORmarkPosition = POR_MARK_POSITION_DEFAULT;
    if (windowsNewLine) {
        PORmarkPosition = PORmarkPosition + 5;
    } else if (three == nolines) {
        PORmarkPosition = PORmarkPosition + 10;
    }

    byte[] pormark = new byte[8];
    buff.position(PORmarkPosition);
    buff.get(pormark, 0, 8);
    String pormarks = new String(pormark);

    //dbgLog.fine("pormark =>" + pormarks + "<-");
    dbgLog.fine(
            "pormark[hex: 53 50 53 53 50 4F 52 54 == SPSSPORT] =>" + new String(Hex.encodeHex(pormark)) + "<-");

    if (pormarks.equals(POR_MARK)) {
        dbgLog.fine("POR ID toke test: Passed");
        init();

        smd.getFileInformation().put("mimeType", MIME_TYPE);
        smd.getFileInformation().put("fileFormat", MIME_TYPE);

    } else {
        dbgLog.fine("this file is NOT spss-por type");
        throw new IllegalArgumentException("decodeHeader: POR ID token was not found");
    }

    // save the POR file without new line characters

    FileOutputStream fileOutPOR = null;
    Writer fileWriter = null;

    // Scanner class can handle three-character line-terminator
    Scanner porScanner = null;

    try {
        tempPORfile = File.createTempFile("tempPORfile.", ".por");
        fileOutPOR = new FileOutputStream(tempPORfile);
        fileWriter = new BufferedWriter(new OutputStreamWriter(fileOutPOR, "utf8"));
        porScanner = new Scanner(stream);

        // Because 64-bit and 32-bit machines decode POR's first 40-byte
        // sequence differently, the first 5 leader lines are skipped from
        // the new-line-stripped file

        int lineCounter = 0;
        while (porScanner.hasNextLine()) {
            lineCounter++;
            if (lineCounter <= 5) {
                String line = porScanner.nextLine().toString();
                dbgLog.fine("line=" + lineCounter + ":" + line.length() + ":" + line);
            } else {
                fileWriter.write(porScanner.nextLine().toString());
            }
        }
    } finally {
        try {
            if (fileWriter != null) {
                fileWriter.close();
            }
        } catch (IOException ex) {
            ex.printStackTrace();
        }

        if (porScanner != null) {
            porScanner.close();
        }
    }

    return tempPORfile;
}

From source file:com.zimbra.cs.account.ZimbraAuthToken.java

@Override
public String getCrumb() throws AuthTokenException {
    String authToken = getEncoded();
    try {/*from  w ww . j  a  v a  2s .  c  o  m*/
        ByteKey bk = new ByteKey(getCurrentKey().getKey());
        Mac mac = Mac.getInstance("HmacMD5");
        mac.init(bk);
        return new String(Hex.encodeHex(mac.doFinal(authToken.getBytes())));
    } catch (NoSuchAlgorithmException e) {
        throw new RuntimeException("fatal error", e);
    } catch (InvalidKeyException e) {
        throw new RuntimeException("fatal error", e);
    }
}

From source file:com.zimbra.common.util.ByteUtil.java

/**
 * Reads the given <tt>InputStream</tt> in its entirety, closes
 * the stream, and returns the digest of the read data.
 * @param algorithm e.g. "SHA1"/*from w  ww  . j  av a2s.com*/
 * @param in data to digest
 * @param base64 if <tt>true</tt>, returns as base64 String, otherwise return
 *  as hex string.
 * @return hex or base64 string
 */
public static String getDigest(String algorithm, InputStream in, boolean base64) throws IOException {
    try {
        MessageDigest md = MessageDigest.getInstance(algorithm);
        byte[] buffer = new byte[1024];
        int numBytes;
        while ((numBytes = in.read(buffer)) >= 0) {
            md.update(buffer, 0, numBytes);
        }
        byte[] digest = md.digest();
        if (base64)
            return encodeFSSafeBase64(digest);
        else
            return new String(Hex.encodeHex(digest));
    } catch (NoSuchAlgorithmException e) {
        // this should never happen unless the JDK is foobar
        //  e.printStackTrace();
        throw new RuntimeException(e);
    } finally {
        ByteUtil.closeStream(in);
    }
}

From source file:net.solarnetwork.node.power.impl.sma.sunnynet.SMASunnyNetPowerDatumDataSource.java

/**
 * Create a new SmaPacket instance.// w  ww.  ja  va  2s  . c o m
 * 
 * @param cmd
 *        the command to create
 * @param destAddr
 *        the device destination address
 * @param count
 *        the packet counter (requests usually use 0)
 * @param control
 *        the request control type
 * @param data
 *        the user data to add to the packet
 * @return the new packet
 */
private SmaPacket createRequestPacket(SmaCommand cmd, int destAddr, int count, SmaControl control,
        byte[] data) {
    SmaPacket packet = new SmaPacket(0, destAddr, count, control, cmd, data);
    if (log.isTraceEnabled()) {
        log.trace("CRC: " + packet.getCrc());
    }
    if (log.isDebugEnabled()) {
        log.debug("Sending SMA request " + cmd + ": " + String.valueOf(Hex.encodeHex(packet.getPacket())));
    }
    return packet;
}

From source file:edu.harvard.iq.dvn.ingest.statdataio.impl.plugins.dta.DTAFileReader.java

private void decodeHeader(BufferedInputStream stream) throws IOException {
    dbgLog.fine("***** decodeHeader(): start *****");

    if (stream == null) {
        throw new IllegalArgumentException("stream == null!");
    }/*from w ww.  j  a  va2 s  .  c  o m*/

    dbgLog.fine("reading the header segument 1: 4 byte\n");
    byte[] magic_number = new byte[DTA_MAGIC_NUMBER_LENGTH];

    int nbytes = stream.read(magic_number, 0, DTA_MAGIC_NUMBER_LENGTH);

    if (nbytes == 0) {
        throw new IOException();
    }

    if (dbgLog.isLoggable(Level.FINE))
        dbgLog.fine("hex dump: 1st 4bytes =>" + new String(Hex.encodeHex(magic_number)) + "<-");

    if (magic_number[2] != 1) {
        dbgLog.fine("3rd byte is not 1: given file is not stata-dta type");
        throw new IllegalArgumentException("given file is not stata-dta type");
    } else if ((magic_number[1] != 1) && (magic_number[1] != 2)) {
        dbgLog.fine("2nd byte is neither 0 nor 1: this file is not stata-dta type");
        throw new IllegalArgumentException("given file is not stata-dta type");
    } else if (!STATA_RELEASE_NUMBER.containsKey((int) magic_number[0])) {
        dbgLog.fine("1st byte (" + magic_number[0] + ") is not within the ingestable range [rel. 3-10]:"
                + "we cannot ingest this Stata file.");
        throw new IllegalArgumentException("given file is not stata-dta type");
    } else {
        releaseNumber = (int) magic_number[0];
        smd.getFileInformation().put("releaseNumber", releaseNumber);
        smd.getFileInformation().put("byteOrder", (int) magic_number[1]);
        smd.getFileInformation().put("OSByteOrder", ByteOrder.nativeOrder().toString());

        smd.getFileInformation().put("mimeType", MIME_TYPE[0]);
        smd.getFileInformation().put("fileFormat", MIME_TYPE[0]);
        init();

        if (dbgLog.isLoggable(Level.FINE))
            dbgLog.fine("this file is stata-dta type: " + STATA_RELEASE_NUMBER.get((int) magic_number[0])
                    + "(Number=" + magic_number[0] + ")");
        if (dbgLog.isLoggable(Level.FINE))
            dbgLog.fine("Endian(file)(Big: 1; Little:2)=" + magic_number[1]);

        if ((int) magic_number[1] == 2) {
            isLittleEndian = true;
            dbgLog.fine("Reveral of the bytes is necessary to decode " + "multi-byte fields");
        }
        if (dbgLog.isLoggable(Level.FINE))
            dbgLog.fine("Endian of this platform:" + ByteOrder.nativeOrder().toString());
    }

    dbgLog.fine("reading the remaining header segument 2: 60 or 109-byte");

    byte[] header = new byte[headerLength];
    nbytes = stream.read(header, 0, headerLength);
    //printHexDump(header, "header:\n");

    // 1. number of variables: short (2 bytes)
    ByteBuffer bbnvar = ByteBuffer.wrap(header, 0, NVAR_FIELD_LENGTH);
    ByteBuffer dupnvar = bbnvar.duplicate();
    short short_nvar = dupnvar.getShort();

    if (dbgLog.isLoggable(Level.FINE))
        dbgLog.fine("get original short view(nvar)=" + short_nvar);
    if (isLittleEndian) {
        bbnvar.order(ByteOrder.LITTLE_ENDIAN);

    }

    short shrt_nvar = bbnvar.getShort();
    smd.getFileInformation().put("varQnty", new Integer(shrt_nvar));

    // setup variableTypeList
    int nvar = shrt_nvar;
    variableTypelList = new String[nvar];

    // 2. number of observations: int (4 bytes)
    ByteBuffer nobs = ByteBuffer.wrap(header, NVAR_FIELD_LENGTH, NOBS_FIELD_LENGTH);
    ByteBuffer dupnobs = nobs.duplicate();
    int int_dupnobs = dupnobs.getInt();
    if (dbgLog.isLoggable(Level.FINE))
        dbgLog.fine("raw nobs=" + int_dupnobs);
    if (isLittleEndian) {
        nobs.order(ByteOrder.LITTLE_ENDIAN);
    }
    int int_nobs = nobs.getInt();
    if (dbgLog.isLoggable(Level.FINE))
        dbgLog.fine("reversed nobs=" + int_nobs);

    smd.getFileInformation().put("caseQnty", new Integer(int_nobs));

    // 3. data_label: 32 or 81 bytes
    int dl_offset = NVAR_FIELD_LENGTH + NOBS_FIELD_LENGTH;
    if (dbgLog.isLoggable(Level.FINE))
        dbgLog.fine("dl_offset=" + dl_offset);
    if (dbgLog.isLoggable(Level.FINE))
        dbgLog.fine("data_label_length=" + dataLabelLength);

    String data_label = new String(Arrays.copyOfRange(header, dl_offset, (dl_offset + dataLabelLength)),
            "ISO-8859-1");

    if (dbgLog.isLoggable(Level.FINE))
        dbgLog.fine("data_label_length=" + data_label.length());
    if (dbgLog.isLoggable(Level.FINE))
        dbgLog.fine("loation of the null character=" + data_label.indexOf(0));

    String dataLabel = getNullStrippedString(data_label);
    if (dbgLog.isLoggable(Level.FINE))
        dbgLog.fine("data_label_length=" + dataLabel.length());
    if (dbgLog.isLoggable(Level.FINE))
        dbgLog.fine("data_label=[" + dataLabel + "]");

    smd.getFileInformation().put("dataLabel", dataLabel);

    // 4. time_stamp: ASCII String (18 bytes)
    // added after release 4
    if (releaseNumber > 104) {
        int ts_offset = dl_offset + dataLabelLength;
        String time_stamp = new String(Arrays.copyOfRange(header, ts_offset, ts_offset + TIME_STAMP_LENGTH),
                "ISO-8859-1");
        if (dbgLog.isLoggable(Level.FINE))
            dbgLog.fine("time_stamp_length=" + time_stamp.length());
        if (dbgLog.isLoggable(Level.FINE))
            dbgLog.fine("loation of the null character=" + time_stamp.indexOf(0));

        String timeStamp = getNullStrippedString(time_stamp);
        if (dbgLog.isLoggable(Level.FINE))
            dbgLog.fine("timeStamp_length=" + timeStamp.length());
        if (dbgLog.isLoggable(Level.FINE))
            dbgLog.fine("timeStamp=[" + timeStamp + "]");

        smd.getFileInformation().put("timeStamp", timeStamp);
        smd.getFileInformation().put("fileDate", timeStamp);
        smd.getFileInformation().put("fileTime", timeStamp);
        smd.getFileInformation().put("varFormat_schema", "STATA");

    }

    if (dbgLog.isLoggable(Level.FINE)) {
        dbgLog.fine("smd dump:" + smd.toString());
        dbgLog.fine("***** decodeHeader(): end *****");
    }
}

From source file:net.solarnetwork.node.io.serial.rxtx.SerialPortConnection.java

private String asciiDebugValue(byte[] data) {
    if (data == null || data.length < 1) {
        return "";
    }//from   w ww. j  a  v  a  2  s  . c om
    StringBuilder buf = new StringBuilder();
    buf.append(Hex.encodeHex(data)).append(" (");
    for (byte b : data) {
        if (b >= 32 && b < 126) {
            buf.append(Character.valueOf((char) b));
        } else {
            buf.append('~');
        }
    }
    buf.append(")");
    return buf.toString();
}