Example usage for org.apache.commons.codec.digest DigestUtils sha1Hex

List of usage examples for org.apache.commons.codec.digest DigestUtils sha1Hex

Introduction

In this page you can find the example usage for org.apache.commons.codec.digest DigestUtils sha1Hex.

Prototype

public static String sha1Hex(String data) 

Source Link

Usage

From source file:models.sos.ObservationDescription.java

/**
 * createSamplingFeature, tries to crate sf_samplingfeature flexibly as
 * possible//from  w w  w  . j ava  2 s .c o  m
 * 
 * @param featureURI
 *            (will be needed in any case)
 * @param position
 *            (can be null, but if not must have x/y values)
 * @param sampledFeatureURI
 *            (can be null)
 * @param foiCodeSpace
 *            (can be null)
 * @return
 */
private String createSamplingFeature(String featureURI, Double[] position, String sampledFeatureURI,
        String foiCodeSpace) {

    StringBuilder sosOMGenerator = new StringBuilder();

    // generate better?
    String ssfGmlID = "ssf_" + DigestUtils.sha1Hex(featureURI).toUpperCase();
    String pointGmlID = "poi_" + DigestUtils.sha1Hex(featureURI).toUpperCase();

    String codeSpace = "";
    if (foiCodeSpace != null) {
        codeSpace = foiCodeSpace;
    }

    if (position == null) {
        sosOMGenerator.append("       <om:featureOfInterest xlink:href=\"" + featureURI + "\"/>\n");
    } else {
        sosOMGenerator.append("       <om:featureOfInterest>\n" + "<sams:SF_SpatialSamplingFeature gml:id=\""
                + ssfGmlID + "\">\n" + "<gml:identifier codeSpace=\"" + codeSpace + "\">" + featureURI
                + "</gml:identifier>\n" + "<sf:type xlink:href=\"" + SOSConstants.SAMPLINGPOINT_DEF + "\"/>\n");

        if ((sampledFeatureURI != null) && !(sampledFeatureURI.isEmpty())) {
            sosOMGenerator.append("<sf:sampledFeature xlink:href=\"" + sampledFeatureURI + "\"/>\n");
        }

        sosOMGenerator.append("<sams:shape>\n" + "<gml:Point gml:id=\"" + pointGmlID + "\">\n"
                + "<gml:pos srsName=\"http://www.opengis.net/def/crs/EPSG/0/4326\">" + position[0] + " "
                + position[1] + "</gml:pos>\n" + "</gml:Point>\n" + "</sams:shape>\n"
                + "</sams:SF_SpatialSamplingFeature>\n" + "</om:featureOfInterest>\n");
    }

    return sosOMGenerator.toString();
}

From source file:de.joinout.criztovyl.tools.files.FileList.java

/**
 * Generates a map with the file name and modification date hashed together
 * as key and the {@link Path} as value.<br>
 * //  w w  w.java2s.  c  o m
 * @param ignore
 *            a set which contains {@link Path}s that should be
 *            ignored. Can be <code>null</code>.
 * @param jsonOnly whether data should be loaded from JSON only.
 * @return a {@link Map} with a {@link String} as key and the {@link Path} as value.
 */
public Map<String, Path> getMappedHashedModifications(Set<Path> ignore, boolean jsonOnly) {

    final Map<String, Path> mods = new HashMap<>();

    // Create empty set if ignore is null
    if (ignore == null)
        ignore = new HashSet<>();
    else
        ;

    // JSON data-file should be ignored, adding to list
    ignore.add(getDirectory().append(JSON_FILE_NAME));

    //Check if should use JSON only. If so, load map from JSON file.
    if (jsonOnly) {
        if (jsonFile.getJSONObject().has(JSON_MODIFICATIONS)) {
            return new JSONMap<>(jsonFile.getJSONObject().getJSONObject(JSON_MODIFICATIONS),
                    JSONCreators.STRING, JSONCreators.PATH).getMap();
        } else
            //JSON file has no stored modifications, return empty map.
            return mods;
    } else
        // Iterate
        for (Path path : map.keySet()) {

            Path pathF = getDirectory().append(path);
            try {
                pathF = pathF.realPath();
            } catch (IOException e) {
                logger.warn("Caught Exception while resolving real path of file {}", path, e);
            }

            // Check if should not ignored
            if (!ignore.contains(path))
                // Put with hashed path and modification time as key and
                // full path as value
                if (pathF.getFile().isFile())
                    mods.put(
                            DigestUtils.sha1Hex(path.getPath() + Long.toString(pathF.getFile().lastModified())),
                            pathF);

        }

    // Return
    return mods;
}

From source file:de.elomagic.carafile.client.CaraFileClient.java

/**
 * Don't call this method!/*  w  w w  .ja  v a2  s.c  o m*/
 * <p/>
 * This method will be called usually by the server and this class.
 *
 * @param sp
 * @param md
 * @param out
 * @throws IOException
 */
public void downloadShunk(final PeerChunk sp, final MetaData md, final OutputStream out) throws IOException {
    if (out == null) {
        throw new IllegalArgumentException("Parameter 'out' must not be null!");
    }

    URI uri = CaraFileUtils.buildURI(sp.getPeerURI(), "peer", "leechChunk", sp.getChunkId());

    HttpResponse response = executeRequest(
            Request.Get(uri).addHeader(HttpHeaders.ACCEPT, ContentType.APPLICATION_OCTET_STREAM.toString()))
                    .returnResponse();

    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    response.getEntity().writeTo(baos);

    String sha1 = DigestUtils.sha1Hex(baos.toByteArray());
    if (!sha1.equalsIgnoreCase(sp.getChunkId())) {
        throw new IOException("SHA1 validation failed. Expected " + sp.getChunkId() + " but was " + sha1);
    }

    out.write(baos.toByteArray());
}

From source file:com.searchcode.app.jobs.repository.IndexBaseRepoJob.java

/**
 * Indexes all the documents in the repository changed file effectively performing a delta update
 * Should only be called when there is a genuine update IE something was indexed previously and
 * has has a new commit.//from  w  ww.  j a v  a2s. c  o  m
 */
public void indexDocsByDelta(Path path, String repoName, String repoLocations, String repoRemoteLocation,
        RepositoryChanged repositoryChanged) {
    SearchcodeLib scl = Singleton.getSearchCodeLib(); // Should have data object by this point
    Queue<CodeIndexDocument> codeIndexDocumentQueue = Singleton.getCodeIndexQueue();
    String fileRepoLocations = FilenameUtils.separatorsToUnix(repoLocations);

    // Used to hold the reports of what was indexed
    List<String[]> reportList = new ArrayList<>();

    for (String changedFile : repositoryChanged.getChangedFiles()) {
        if (this.shouldJobPauseOrTerminate()) {
            return;
        }

        if (Singleton.getDataService().getPersistentDelete().contains(repoName)) {
            return;
        }

        String[] split = changedFile.split("/");
        String fileName = split[split.length - 1];
        changedFile = fileRepoLocations + "/" + repoName + "/" + changedFile;
        changedFile = changedFile.replace("//", "/");

        CodeLinesReturn codeLinesReturn = this.getCodeLines(changedFile, reportList);
        if (codeLinesReturn.isError()) {
            break;
        }

        IsMinifiedReturn isMinified = this.getIsMinified(codeLinesReturn.getCodeLines(), fileName, reportList);
        if (isMinified.isMinified()) {
            break;
        }

        if (this.checkIfEmpty(codeLinesReturn.getCodeLines(), changedFile, reportList)) {
            break;
        }

        if (this.determineBinary(changedFile, fileName, codeLinesReturn.getCodeLines(), reportList)) {
            break;
        }

        String md5Hash = this.getFileMd5(changedFile);
        String languageName = Singleton.getFileClassifier().languageGuesser(changedFile,
                codeLinesReturn.getCodeLines());
        String fileLocation = this.getRelativeToProjectPath(path.toString(), changedFile);
        String fileLocationFilename = changedFile.replace(fileRepoLocations, Values.EMPTYSTRING);
        String repoLocationRepoNameLocationFilename = changedFile;
        String newString = this.getBlameFilePath(fileLocationFilename);
        String codeOwner = this.getCodeOwner(codeLinesReturn.getCodeLines(), newString, repoName,
                fileRepoLocations, scl);

        if (this.LOWMEMORY) {
            try {
                Singleton.getCodeIndexer().indexDocument(new CodeIndexDocument(
                        repoLocationRepoNameLocationFilename, repoName, fileName, fileLocation,
                        fileLocationFilename, md5Hash, languageName, codeLinesReturn.getCodeLines().size(),
                        StringUtils.join(codeLinesReturn.getCodeLines(), " "), repoRemoteLocation, codeOwner));
            } catch (IOException ex) {
                Singleton.getLogger().warning("ERROR - caught a " + ex.getClass() + " in " + this.getClass()
                        + "\n with message: " + ex.getMessage());
            }
        } else {
            this.sharedService.incrementCodeIndexLinesCount(codeLinesReturn.getCodeLines().size());
            codeIndexDocumentQueue.add(new CodeIndexDocument(repoLocationRepoNameLocationFilename, repoName,
                    fileName, fileLocation, fileLocationFilename, md5Hash, languageName,
                    codeLinesReturn.getCodeLines().size(),
                    StringUtils.join(codeLinesReturn.getCodeLines(), " "), repoRemoteLocation, codeOwner));
        }

        if (this.LOGINDEXED) {
            reportList.add(new String[] { changedFile, "included", "" });
        }
    }

    if (this.LOGINDEXED && reportList.isEmpty() == false) {
        this.logIndexed(repoName + "_delta", reportList);
    }

    for (String deletedFile : repositoryChanged.getDeletedFiles()) {
        deletedFile = fileRepoLocations + "/" + repoName + "/" + deletedFile;
        deletedFile = deletedFile.replace("//", "/");
        Singleton.getLogger().info("Missing from disk, removing from index " + deletedFile);
        try {
            Singleton.getCodeIndexer().deleteByCodeId(DigestUtils.sha1Hex(deletedFile));
        } catch (IOException ex) {
            Singleton.getLogger()
                    .warning("ERROR - caught a " + ex.getClass() + " in " + this.getClass()
                            + " indexDocsByDelta deleteByFileLocationFilename for " + repoName + " "
                            + deletedFile + "\n with message: " + ex.getMessage());
        }
    }
}

From source file:models.sos.ObservationDescription.java

/**
 * createSpatialSamplingParameter, new, I wonder if this in now the
 * alternative to querying against SF_feature geometry, we can used it
 * instead of always encoding the SF_foi because I think the 52n sos
 * complained that the same feature already exists
 * /*from   www  . ja  va2s  .  c  o m*/
 * @param position
 * @return
 */
private String createSpatialSamplingParameter(Double[] position) {

    StringBuilder sosOMGenerator = new StringBuilder();
    String hex = "SamplingPoint_" + position[0] + " " + position[1];

    String pointGmlID = "SamplingPoint_" + DigestUtils.sha1Hex(hex).toUpperCase();

    sosOMGenerator.append("<om:parameter>\n" + "<om:NamedValue>\n"
            + "    <om:name xlink:href=\"http://www.opengis.net/def/param-name/OGC-OM/2.0/samplingGeometry\"/>\n"
            + "    <om:value xsi:type=\"gml:GeometryPropertyType\">\n" + "        <gml:Point gml:id=\""
            + pointGmlID + "\">\n"
            + "            <gml:pos srsName=\"http://www.opengis.net/def/crs/EPSG/0/4326\">" + position[0] + " "
            + position[1] + "</gml:pos>\n" + "        </gml:Point>\n" + "    </om:value>\n"
            + "   </om:NamedValue>\n" + "</om:parameter>\n");

    return sosOMGenerator.toString();
}

From source file:com.searchcode.app.jobs.repository.IndexBaseRepoJob.java

/**
 * Method to remove from the index files that are no longer required
 *///w w  w  .  j a  v a2  s .c  o  m
public void cleanMissingPathFiles(CodeSearcher codeSearcher, String repoName,
        Map<String, String> fileLocations) {
    int page = 0;
    boolean doClean = true;

    while (doClean) {
        List<String> indexLocations = codeSearcher.getRepoDocuments(repoName, page);
        Singleton.getLogger().info("cleanMissingPathFiles doClean " + page + " " + indexLocations.size());

        if (indexLocations.isEmpty()) {
            doClean = false;
        }

        for (String file : indexLocations) {
            if (!fileLocations.containsKey(file)) {
                Singleton.getLogger().info("Missing from disk, removing from index " + file);
                try {
                    Singleton.getCodeIndexer().deleteByCodeId(DigestUtils.sha1Hex(file));
                } catch (IOException ex) {
                    Singleton.getLogger()
                            .warning("ERROR - caught a " + ex.getClass() + " in " + this.getClass()
                                    + " indexDocsByPath deleteByFileLocationFilename for " + repoName + " "
                                    + file + "\n with message: " + ex.getMessage());
                }
            }
        }

        page++;
    }
}

From source file:com.bzcentre.dapiPush.DapiReceiver.java

private static String newBzToken(String seed) {
    return DigestUtils.sha1Hex(seed + hoicoi_token);
}

From source file:io.druid.indexing.kafka.supervisor.KafkaSupervisor.java

@VisibleForTesting
String generateSequenceName(int groupId) {
    StringBuilder sb = new StringBuilder();
    Map<Integer, Long> startPartitions = taskGroups.get(groupId).partitionOffsets;

    for (Map.Entry<Integer, Long> entry : startPartitions.entrySet()) {
        sb.append(String.format("+%d(%d)", entry.getKey(), entry.getValue()));
    }/*from ww  w. j a va2 s . c  om*/
    String partitionOffsetStr = sb.toString().substring(1);

    Optional<DateTime> minimumMessageTime = taskGroups.get(groupId).minimumMessageTime;
    String minMsgTimeStr = (minimumMessageTime.isPresent()
            ? String.valueOf(minimumMessageTime.get().getMillis())
            : "");

    String dataSchema, tuningConfig;
    try {
        dataSchema = sortingMapper.writeValueAsString(spec.getDataSchema());
        tuningConfig = sortingMapper.writeValueAsString(taskTuningConfig);
    } catch (JsonProcessingException e) {
        throw Throwables.propagate(e);
    }

    String hashCode = DigestUtils.sha1Hex(dataSchema + tuningConfig + partitionOffsetStr + minMsgTimeStr)
            .substring(0, 15);

    return Joiner.on("_").join("index_kafka", dataSource, hashCode);
}

From source file:net.java.sip.communicator.gui.AuthenticationSplash.java

private void registrationDialogDone(Object actionCommand) {
    String cmd = null;/*www  . j a v a  2 s .  co m*/
    if (actionCommand != null) {
        if (actionCommand instanceof ActionEvent) {
            cmd = ((ActionEvent) actionCommand).getActionCommand();
        } else {
            cmd = actionCommand.toString();
        }
    }
    if (cmd == null) {
        // do nothing
    } else if (cmd.equals(CMD_CANCEL)) {
        userName = null;
        lastName = null;
        name = null;
        mail = null;
        policy = null;
        password = null;
    } else if (cmd.equals(CMD_REGISTER)) {
        nameLabel.setForeground(javax.swing.UIManager.getDefaults().getColor("JLabel.selectionForeground"));
        lastNameLabel.setForeground(javax.swing.UIManager.getDefaults().getColor("JLabel.selectionForeground"));
        mailLabel.setForeground(javax.swing.UIManager.getDefaults().getColor("JLabel.selectionForeground"));
        userNameLabel.setForeground(javax.swing.UIManager.getDefaults().getColor("JLabel.selectionForeground"));
        passwordLabel.setForeground(javax.swing.UIManager.getDefaults().getColor("JLabel.selectionForeground"));

        userName = userNameTextField.getText();
        lastName = lastNameTextField.getText();
        name = nameTextField.getText();
        mail = mailTextField.getText();
        policy = (String) policyDropDown.getSelectedItem();
        password = passwordTextField.getText();

        if (!valid())
            return;
        password = DigestUtils.sha1Hex(password);
        parity = "true";
    }
    setVisible(false);
    dispose();
}

From source file:io.druid.indexing.jdbc.supervisor.JDBCSupervisor.java

@VisibleForTesting
String generateSequenceName(int groupId) {
    StringBuilder sb = new StringBuilder();
    Map<Integer, Long> offsetMaps = taskGroups.get(groupId).offsetsMap;
    for (Map.Entry<Integer, Long> entry : offsetMaps.entrySet()) {
        sb.append(StringUtils.format("+%d(%d)", entry.getKey(), entry.getValue()));
    }// w  w  w .  j  av  a  2  s. c  o  m

    String offsetStr = sb.toString().substring(1);

    Optional<DateTime> minimumMessageTime = taskGroups.get(groupId).minimumMessageTime;
    String minMsgTimeStr = (minimumMessageTime.isPresent()
            ? String.valueOf(minimumMessageTime.get().getMillis())
            : "");

    String dataSchema, tuningConfig;
    try {
        dataSchema = sortingMapper.writeValueAsString(spec.getDataSchema());
        tuningConfig = sortingMapper.writeValueAsString(taskTuningConfig);
    } catch (JsonProcessingException e) {
        throw Throwables.propagate(e);
    }

    String hashCode = DigestUtils.sha1Hex(dataSchema + tuningConfig + offsetStr + minMsgTimeStr).substring(0,
            15);

    return Joiner.on("_").join("index_jdbc", dataSource, hashCode);
}