Example usage for org.apache.commons.codec.digest DigestUtils shaHex

List of usage examples for org.apache.commons.codec.digest DigestUtils shaHex

Introduction

In this page you can find the example usage for org.apache.commons.codec.digest DigestUtils shaHex.

Prototype

@Deprecated
    public static String shaHex(String data) 

Source Link

Usage

From source file:org.nuxeo.utils.GetStringDigest.java

@OperationMethod
public void run() throws ClientException, UnsupportedEncodingException {

    // Check parameter
    if (stringToHash == null) {
        stringToHash = "";
    }/* www.  java2s. com*/

    // Cleanup
    digestKind = digestKind.toLowerCase().trim();
    if (digestKind == "") {
        digestKind = "md5";
    }

    charset = charset.trim();
    if (charset == "") {
        charset = "UTF-8";
    }

    byte[] bytes = stringToHash.getBytes(charset);

    switch (digestKind) {
    case "md5":
        ctx.put(contextVarName, DigestUtils.md5Hex(bytes));
        break;

    case "sha":
        ctx.put(contextVarName, DigestUtils.shaHex(bytes));
        break;

    case "sha256":
        ctx.put(contextVarName, DigestUtils.sha256Hex(bytes));
        break;

    case "sha384":
        ctx.put(contextVarName, DigestUtils.sha384Hex(bytes));
        break;

    case "sha512":
        ctx.put(contextVarName, DigestUtils.sha512Hex(bytes));
        break;

    default:
        throw new ClientException("Unhandled digest kind: " + digestKind);
        //break;
    }
}

From source file:org.openengsb.core.edbi.jdbc.names.SQLIndexNameTranslator.java

@Override
public String translate(Index<?> index) {
    if (index == null) {
        throw new IllegalArgumentException("Class to translate is null");
    }//from   w ww .  j  a  va2  s.c  o m

    return DigestUtils.shaHex(index.getName()).substring(0, DEFAULT_MAXLEN).toUpperCase();
}

From source file:org.openeos.wf.internal.DAOWorkflowService.java

@Override
@Transactional/*from   w  w  w.  j ava 2  s.c  o  m*/
protected org.openeos.wf.WorkflowDefinition saveWorkflowDefinition(Deployment deployment, URL url) {
    WorkflowDefinition def = new WorkflowDefinition();
    def.setDeployment(deploymentDAO.read(deployment.getId()));
    byte[] content = null;
    try (InputStream stream = url.openStream()) {
        content = IOUtils.toByteArray(stream);
    } catch (IOException e) {
        LOG.error("An error has occurred while trying to read content from url workflow definition resource",
                e);
        throw new WorkflowServiceException(e);
    }
    WorkflowDefinitionContent defContent = new WorkflowDefinitionContent();
    defContent.setContent(content);
    defContent.setWorkflowDefinition(def);
    def.setWorkflowDefinitionContent(defContent);
    extractValues(def, content);
    def.setChecksum(DigestUtils.shaHex(content));
    def.setVersion(numerationService.getAndIncrement(NUMERATION_WORKFLOW_VERSION_SEQ_ID, def));
    workflowDefinitionDAO.create(def);
    return new WorkflowDefinitionModelBasedImpl(def);
}

From source file:org.openpnp.machine.reference.camera.TableScannerCamera.java

private synchronized void initialize() throws Exception {
    stop();/*from   w w  w . j  a v  a2s  . c  o m*/
    sourceUrl = new URL(sourceUri);
    cacheDirectory = new File(Configuration.get().getResourceDirectory(getClass()),
            DigestUtils.shaHex(sourceUri));
    if (!cacheDirectory.exists()) {
        cacheDirectory.mkdirs();
    }
    File[] files = null;
    // Attempt to get the list of files from the source.
    try {
        files = loadSourceFiles();
    } catch (Exception e) {
        logger.warn("Unable to load file list from {}", sourceUri);
        logger.warn("Reason", e);
    }

    if (files == null) {
        files = loadCachedFiles();
    }

    if (files.length == 0) {
        throw new Exception("No source or cached files found.");
    }
    // Load the first image we found and use it's properties as a template
    // for the rest of the images.
    BufferedImage templateImage = new Tile(0, 0, files[0]).getImage();

    width = templateImage.getWidth();
    height = templateImage.getHeight();

    tileList = new ArrayList<Tile>();
    lastX = Double.MIN_VALUE;
    lastY = Double.MIN_VALUE;
    lastCenterTile = null;

    // We build a set of unique X and Y positions that we see so we can
    // later build a two dimensional array of the riles
    TreeSet<Double> uniqueX = new TreeSet<Double>();
    TreeSet<Double> uniqueY = new TreeSet<Double>();
    // Create a map of the tiles so that we can quickly find them when we
    // build the array.
    Map<Tile, Tile> tileMap = new HashMap<Tile, Tile>();
    // Parse the filenames of the all the files and add their coordinates
    // to the sets and map.
    for (File file : files) {
        String filename = file.getName();
        filename = filename.substring(0, filename.indexOf(".png"));
        String[] xy = filename.split(",");
        double x = Double.parseDouble(xy[0]);
        double y = Double.parseDouble(xy[1]);
        Tile tile = new Tile(x, y, file);
        uniqueX.add(x);
        uniqueY.add(y);
        tileMap.put(tile, tile);
        tileList.add(tile);
    }
    // Create a two dimensional array to store all the of the tiles
    tiles = new Tile[uniqueX.size()][uniqueY.size()];

    // Iterate through all the unique X and Y positions that were found
    // and add each file to the two dimensional array in the position
    // where it belongs
    int x = 0, y = 0;
    for (Double xPos : uniqueX) {
        y = 0;
        for (Double yPos : uniqueY) {
            Tile tile = tileMap.get(new Tile(xPos, yPos, null));
            tiles[x][y] = tile;
            tile.setTileX(x);
            tile.setTileY(y);
            y++;
        }
        x++;
    }

    /*
     * Create a buffer that we will render the center tile and it's
     * surrounding tiles to. 
     */
    buffer = new BufferedImage(templateImage.getWidth() * tilesWide, templateImage.getHeight() * tilesHigh,
            BufferedImage.TYPE_INT_ARGB);

    if (listeners.size() > 0) {
        start();
    }
}

From source file:org.overlord.commons.osgi.vfs.VfsBundle.java

/**
 * Indexes the JAR file by getting a SHA1 hash of its MANIFEST.MF file.
 * @param entryFile/*from   w w  w. j ava  2s  .  c  om*/
 */
private void indexJar(File entryFile) {
    ZipFile zipFile = null;
    try {
        zipFile = new ZipFile(entryFile);
        ZipEntry zipEntry = zipFile.getEntry("META-INF/MANIFEST.MF"); //$NON-NLS-1$
        if (zipEntry != null) {
            InputStream inputStream = zipFile.getInputStream(zipEntry);
            String hash = DigestUtils.shaHex(inputStream);
            index.put(hash, entryFile);
        }
    } catch (Exception e) {
        // Do nothing - invalid JAR file?
    } finally {
        try {
            if (zipFile != null)
                zipFile.close();
        } catch (IOException e) {
        }
    }
}

From source file:org.overlord.commons.osgi.vfs.VfsBundle.java

/**
 * Indexes the bundle (adds the bundle itself to the index).
 * @param entryFile//from   w ww  .j  a  va  2 s .  c  o m
 */
private void indexBundle(File entryFile) {
    InputStream is = null;
    try {
        is = new FileInputStream(entryFile);
        String hash = DigestUtils.shaHex(is);
        index.put(hash, new File(vfsBundleDir, "content")); //$NON-NLS-1$
    } catch (Exception e) {
        throw new RuntimeException(e);
    } finally {
        IOUtils.closeQuietly(is);
    }
}

From source file:org.overlord.commons.osgi.vfs.VfsBundle.java

/**
 * Converts a URL to a File.  If the URL is the root URL for the bundle, then
 * this method will return a {@link File} that points to directory on the file
 * system.  However, if the URL points to a JAR within the bundle, then this 
 * method is responsible for figuring out which JAR is being referenced and 
 * then returning a {@link File} pointing to that JAR.
 * //from ww  w  .j a v a  2s  .c om
 * The approach to figuring out what the URL points to is as follows:
 * 1) Get the SHA1 hash of the MANIFEST.MF file returned via the URL
 * 2) Look up the File previously registered to that hash value
 * 
 * @param url
 */
public File asFile(URL url) {
    InputStream manifestStream = null;
    try {
        String manifestUrl = "bundle://" + url.getHost() + ":" + url.getPort() + "/META-INF/MANIFEST.MF"; //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
        URL manifestURL = new URL(manifestUrl);
        manifestStream = manifestURL.openStream();
        String manifestHash = DigestUtils.shaHex(manifestStream);

        File jarFile = index.get(manifestHash);
        if (jarFile != null) {
            return jarFile;
        }
    } catch (Exception e) {
        // TODO log the error
    } finally {
        IOUtils.closeQuietly(manifestStream);
    }

    throwNotFoundError(url);
    return null;
}

From source file:org.overlord.sramp.common.maven.MavenGavInfo.java

public static MavenGavInfo fromCommandLine(String gavArg, File file) throws Exception {
    String[] split = gavArg.split(":"); //$NON-NLS-1$
    String groupId = split[0];/*from  ww w .  ja  va  2  s  .  c om*/
    String artifactId = split[1];
    String version = split[2];
    String filename = file.getName();
    if (file.getName().endsWith(".tmp")) { //$NON-NLS-1$
        filename = filename.substring(0, filename.indexOf(".jar") + 4); //$NON-NLS-1$
    }
    String type = filename.substring(filename.lastIndexOf('.') + 1);
    if (filename.endsWith(".sha1")) {
        type = filename.substring(0, filename.length() - 5);
        type = type.substring(type.lastIndexOf('.') + 1) + ".sha1";
    } else if (filename.endsWith(".md5")) {
        type = filename.substring(0, filename.length() - 4);
        type = type.substring(type.lastIndexOf('.') + 1) + ".md5";
    }
    String classifier = null;
    if (split.length >= 5) {
        classifier = split[5];
    }
    boolean snapshot = version != null && version.endsWith("-SNAPSHOT");
    String snapshotId = null;
    if (snapshot && !filename.contains(version)) {
        snapshotId = extractSnapshotId(filename, version, type, classifier);
    }
    // MD5 hash
    InputStream is = new FileInputStream(file);
    String md5 = DigestUtils.md5Hex(is);
    IOUtils.closeQuietly(is);
    // SHA-1 hash
    is = new FileInputStream(file);
    String sha1 = DigestUtils.shaHex(is);
    IOUtils.closeQuietly(is);

    MavenGavInfo gav = new MavenGavInfo();
    gav.setName(filename);
    gav.setGroupId(groupId);
    gav.setArtifactId(artifactId);
    gav.setVersion(version);
    gav.setClassifier(classifier);
    gav.setType(type);
    gav.setSnapshot(snapshot);
    gav.setSnapshotId(snapshotId);
    gav.setMd5(md5);
    gav.setSha1(sha1);
    return gav;
}

From source file:org.overlord.sramp.governance.QueryExecutor.java

/**
 * Creates a unique GUID for the workflow artifact by using SHA1 to create a hash
 * of the target artifact's UUID and the ID of the workflow being created.
 * @param uuid// w ww. j a  va  2s. c o  m
 * @param workflowId
 */
private static String createWorkflowArtifactGuid(String uuid, String workflowId) {
    String value = uuid + "||" + workflowId; //$NON-NLS-1$
    return DigestUtils.shaHex(value);
}

From source file:org.overlord.sramp.repository.jcr.JCRArtifactPersister.java

/**
 * Phase one of persisting an artifact consists of creating the JCR node for the artifact and
 * persisting all of its meta-data to it.
 * @param session/*from ww w .j  av  a  2  s .co  m*/
 * @param metaData
 * @param content
 * @param referenceFactory
 * @param classificationHelper
 * @throws Exception
 */
public static Phase1Result persistArtifactPhase1(Session session, BaseArtifactType metaData,
        InputStream content, ClassificationHelper classificationHelper) throws Exception {
    JCRUtils tools = new JCRUtils();
    if (metaData.getUuid() == null) {
        metaData.setUuid(UUID.randomUUID().toString());
    }
    String uuid = metaData.getUuid();
    ArtifactType artifactType = ArtifactType.valueOf(metaData);
    String name = metaData.getName();
    String artifactPath = MapToJCRPath.getArtifactPath(uuid);
    if (session.nodeExists(artifactPath)) {
        throw new ArtifactAlreadyExistsException(uuid);
    }
    log.debug(Messages.i18n.format("UPLOADING_TO_JCR", name)); //$NON-NLS-1$

    Node artifactNode = null;
    boolean isDocumentArtifact = SrampModelUtils.isDocumentArtifact(metaData);
    if (content == null && !isDocumentArtifact) {
        artifactNode = tools.findOrCreateNode(session, artifactPath, "nt:folder", //$NON-NLS-1$
                JCRConstants.SRAMP_NON_DOCUMENT_TYPE);
    } else {
        artifactNode = tools.uploadFile(session, artifactPath, content);
        JCRUtils.setArtifactContentMimeType(artifactNode, artifactType.getMimeType());
    }

    String jcrMixinName = artifactType.getArtifactType().getApiType().value();
    jcrMixinName = JCRConstants.SRAMP_ + StringUtils.uncapitalize(jcrMixinName);
    artifactNode.addMixin(jcrMixinName);
    // BaseArtifactType
    artifactNode.setProperty(JCRConstants.SRAMP_UUID, uuid);
    artifactNode.setProperty(JCRConstants.SRAMP_ARTIFACT_MODEL, artifactType.getArtifactType().getModel());
    artifactNode.setProperty(JCRConstants.SRAMP_ARTIFACT_TYPE, artifactType.getArtifactType().getType());
    // Extended
    if (ExtendedArtifactType.class.isAssignableFrom(artifactType.getArtifactType().getTypeClass())) {
        artifactNode.setProperty(JCRConstants.SRAMP_EXTENDED_TYPE, artifactType.getExtendedType());
    }
    // Extended Document
    if (ExtendedDocument.class.isAssignableFrom(artifactType.getArtifactType().getTypeClass())) {
        artifactNode.setProperty(JCRConstants.SRAMP_EXTENDED_TYPE, artifactType.getExtendedType());
    }
    // Document
    if (DocumentArtifactType.class.isAssignableFrom(artifactType.getArtifactType().getTypeClass())) {
        artifactNode.setProperty(JCRConstants.SRAMP_CONTENT_TYPE, artifactType.getMimeType());
        artifactNode.setProperty(JCRConstants.SRAMP_CONTENT_SIZE,
                artifactNode.getProperty("jcr:content/jcr:data").getLength()); //$NON-NLS-1$
        String shaHex = DigestUtils
                .shaHex(artifactNode.getProperty("jcr:content/jcr:data").getBinary().getStream()); //$NON-NLS-1$
        artifactNode.setProperty(JCRConstants.SRAMP_CONTENT_HASH, shaHex);
    }
    // XMLDocument
    if (XmlDocument.class.isAssignableFrom(artifactType.getArtifactType().getTypeClass())) {
        // read the encoding from the header
        artifactNode.setProperty(JCRConstants.SRAMP_CONTENT_ENCODING, "UTF-8"); //$NON-NLS-1$
    }

    // Update the JCR node with any properties included in the meta-data
    ArtifactToJCRNodeVisitor visitor = new ArtifactToJCRNodeVisitor(artifactType, artifactNode,
            new JCRReferenceFactoryImpl(session), classificationHelper);
    ArtifactVisitorHelper.visitArtifact(visitor, metaData);
    if (visitor.hasError())
        throw visitor.getError();

    log.debug(Messages.i18n.format("SAVED_JCR_NODE", name, uuid)); //$NON-NLS-1$
    if (sramp.isAuditingEnabled()) {
        auditCreateArtifact(artifactNode);
        session.save();
    }
    session.save();

    Phase1Result result = new Phase1Result();
    result.artifactNode = artifactNode;
    result.artifactType = artifactType;
    result.isDocumentArtifact = isDocumentArtifact;
    return result;
}