List of usage examples for org.apache.commons.codec.digest DigestUtils sha
@Deprecated public static byte[] sha(String data)
From source file:org.sakaiproject.iclicker.logic.IClickerLogic.java
/** * Verify the passed in encrypted SSO shared key is valid, * this will return false if the key is not configured * //from w w w. jav a2 s . c o m * Key must have been encoded like so (where timestamp is the unix time in seconds): * sentKey = hex(sha1(sharedKey + ":" + timestamp)) + "|" + timestamp * * @param key the passed in key (should already be sha-1 and hex encoded with the timestamp appended) * @return true if the key is valid, false if SSO shared keys are disabled * @throws IllegalArgumentException if the key format is invalid * @throws SecurityException if the key timestamp has expired or the key does not match */ public boolean verifyKey(String key) { if (StringUtils.isEmpty(key)) { throw new IllegalArgumentException("key must be set in order to verify the key"); } boolean verified = false; if (singleSignOnHandling) { // encoding process requires the key and timestamp so split them from the passed in key int splitIndex = key.lastIndexOf('|'); if ((splitIndex == -1) || (key.length() < splitIndex + 1)) { throw new IllegalArgumentException("i>clicker shared key (" + key + ") format is invalid (no |), must be {encoded key}|{timestamp}"); } String actualKey = key.substring(0, splitIndex); if (StringUtils.isEmpty(actualKey)) { throw new IllegalArgumentException("i>clicker shared key (" + key + ") format is invalid (missing encoded key), must be {encoded key}|{timestamp}"); } String timestampStr = key.substring(splitIndex + 1); if (StringUtils.isEmpty(timestampStr)) { throw new IllegalArgumentException("i>clicker shared key (" + key + ") format is invalid (missing timestamp), must be {encoded key}|{timestamp}"); } long timestamp; try { timestamp = Long.parseLong(timestampStr); } catch (NumberFormatException e) { throw new IllegalArgumentException("i>clicker shared key (" + key + ") format is invalid (non numeric timestamp), must be {encoded key}|{timestamp}"); } // check this key is still good (must be within 5 mins of now) long unixTime = System.currentTimeMillis() / 1000l; long timeDiff = Math.abs(timestamp - unixTime); if (timeDiff > 300l) { throw new SecurityException( "i>clicker shared key (" + key + ") timestamp is out of date, this timestamp (" + timestamp + ") is more than 5 minutes different from the current time (" + unixTime + ")"); } // finally we verify the key with the one in the config byte[] sha1Bytes = DigestUtils.sha(singleSignOnSharedkey + ":" + timestamp); String sha1Hex = Hex.encodeHexString(sha1Bytes); if (!actualKey.equals(sha1Hex)) { throw new SecurityException( "i>clicker encoded shared key (" + key + ") does not match with the key (" + sha1Hex + ") in Sakai (using timestamp: " + timestamp + ")"); } verified = true; } return verified; }
From source file:org.sakaiproject.iclicker.logic.IClickerLogicImplTest.java
/** * How to create a valid encoded key:/*from w ww . ja va 2 s . c o m*/ * Take the input key (must be 10 chars long or longer) and append ':' and the current unix timestamp in seconds * Take that string and SHA-1 encode it into a hexadecimal encoded string * Take the hex string and append '|' and the same timestamp as before * This is the encoded key which should be sent with the request * * NOTE: it is safe to pass the encode key in the clear (as a url param or otherwise) * as it is one way encrypted and very very difficult to brute force decrypt * * Sample key: * abcdef1234566890 * Sample timestamp: * 1332470760 * Encoded key: * cc80462bfc0da7e614237d7cab4b7971b0e71e9f|1332470760 */ public void testVerifyKey() { String key = "abcdef1234566890"; logicImpl.setSharedKey(key); // test expired timestamp String encodedKey = "cc80462bfc0da7e614237d7cab4b7971b0e71e9f|1332470760"; try { logicImpl.verifyKey(encodedKey); fail("should have died"); } catch (SecurityException e) { assertNotNull(e.getMessage()); } // test invalid format try { logicImpl.verifyKey("xxxxxxxxxxxxx"); fail("should have died"); } catch (IllegalArgumentException e) { assertNotNull(e.getMessage()); } try { logicImpl.verifyKey("xxxxxxxxxxxxx|"); fail("should have died"); } catch (IllegalArgumentException e) { assertNotNull(e.getMessage()); } try { logicImpl.verifyKey("xxxxxxxx|12344ffff"); fail("should have died"); } catch (IllegalArgumentException e) { assertNotNull(e.getMessage()); } // test valid encoded key long timestamp = System.currentTimeMillis() / 1000l; byte[] sha1Bytes = DigestUtils.sha(key + ":" + timestamp); encodedKey = Hex.encodeHexString(sha1Bytes) + "|" + timestamp; boolean result = logicImpl.verifyKey(encodedKey); assertTrue(result); //System.out.println("key: "+key+" , encoded: "+encodedKey); // for testing other keys /* key = "66f3b80a-96b5-41c0-a2fb-1d0b17aec523"; logicImpl.setSharedKey(key); //timestamp = System.currentTimeMillis() / 1000l; timestamp = 1333495162; sha1Bytes = DigestUtils.sha(key + ":" + timestamp); encodedKey = Hex.encodeHexString(sha1Bytes) + "|" + timestamp; result = logicImpl.verifyKey(encodedKey); System.out.println("key: "+key+", timestamp: "+timestamp+", encoded: "+encodedKey+", result="+result); assertTrue(result); */ }
From source file:org.torproject.collector.bridgedescs.BridgeSnapshotReader.java
/** * Reads the half-hourly snapshots of bridge descriptors from Bifroest. */// w ww.jav a2 s .com public BridgeSnapshotReader(BridgeDescriptorParser bdp, File bridgeDirectoriesDir, File statsDirectory) throws ConfigurationException { if (bdp == null || bridgeDirectoriesDir == null || statsDirectory == null) { throw new IllegalArgumentException(); } SortedSet<String> parsed = new TreeSet<String>(); File bdDir = bridgeDirectoriesDir; File pbdFile = new File(statsDirectory, "parsed-bridge-directories"); boolean modified = false; if (bdDir.exists()) { if (pbdFile.exists()) { logger.debug("Reading file " + pbdFile.getAbsolutePath() + "..."); try { BufferedReader br = new BufferedReader(new FileReader(pbdFile)); String line = null; while ((line = br.readLine()) != null) { parsed.add(line); } br.close(); logger.debug("Finished reading file " + pbdFile.getAbsolutePath() + "."); } catch (IOException e) { logger.warn("Failed reading file " + pbdFile.getAbsolutePath() + "!", e); return; } } logger.debug("Importing files in directory " + bridgeDirectoriesDir + "/..."); Set<String> descriptorImportHistory = new HashSet<String>(); int parsedFiles = 0; int skippedFiles = 0; int parsedStatuses = 0; int parsedServerDescriptors = 0; int skippedServerDescriptors = 0; int parsedExtraInfoDescriptors = 0; int skippedExtraInfoDescriptors = 0; Stack<File> filesInInputDir = new Stack<File>(); filesInInputDir.add(bdDir); while (!filesInInputDir.isEmpty()) { File pop = filesInInputDir.pop(); if (pop.isDirectory()) { for (File f : pop.listFiles()) { filesInInputDir.add(f); } } else if (!parsed.contains(pop.getName())) { try { FileInputStream in = new FileInputStream(pop); if (in.available() > 0) { TarArchiveInputStream tais = null; if (pop.getName().endsWith(".tar.gz")) { GzipCompressorInputStream gcis = new GzipCompressorInputStream(in); tais = new TarArchiveInputStream(gcis); } else if (pop.getName().endsWith(".tar")) { tais = new TarArchiveInputStream(in); } else { continue; } BufferedInputStream bis = new BufferedInputStream(tais); String fn = pop.getName(); String[] fnParts = fn.split("-"); if (fnParts.length != 5) { logger.warn("Invalid bridge descriptor tarball file name: " + fn + ". Skipping."); continue; } String authorityPart = String.format("%s-%s-", fnParts[0], fnParts[1]); String datePart = String.format("%s-%s-%s", fnParts[2], fnParts[3], fnParts[4]); String authorityFingerprint; switch (authorityPart) { case "from-tonga-": authorityFingerprint = "4A0CCD2DDC7995083D73F5D667100C8A5831F16D"; break; case "from-bifroest-": authorityFingerprint = "1D8F3A91C37C5D1C4C19B1AD1D0CFBE8BF72D8E1"; break; default: logger.warn("Did not recognize the bridge authority that " + "generated " + fn + ". Skipping."); continue; } String dateTime = datePart.substring(0, 10) + " " + datePart.substring(11, 13) + ":" + datePart.substring(13, 15) + ":" + datePart.substring(15, 17); while ((tais.getNextTarEntry()) != null) { ByteArrayOutputStream baos = new ByteArrayOutputStream(); int len; byte[] data = new byte[1024]; while ((len = bis.read(data, 0, 1024)) >= 0) { baos.write(data, 0, len); } byte[] allData = baos.toByteArray(); if (allData.length == 0) { continue; } String fileDigest = Hex.encodeHexString(DigestUtils.sha(allData)); String ascii = new String(allData, "US-ASCII"); BufferedReader br3 = new BufferedReader(new StringReader(ascii)); String firstLine = null; while ((firstLine = br3.readLine()) != null) { if (firstLine.startsWith("@")) { continue; } else { break; } } if (firstLine == null) { continue; } if (firstLine.startsWith("published ") || firstLine.startsWith("flag-thresholds ") || firstLine.startsWith("r ")) { bdp.parse(allData, dateTime, authorityFingerprint); parsedStatuses++; } else if (descriptorImportHistory.contains(fileDigest)) { /* Skip server descriptors or extra-info descriptors if * we parsed them before. */ skippedFiles++; continue; } else { int start = -1; int sig = -1; int end = -1; String startToken = firstLine.startsWith("router ") ? "router " : "extra-info "; String sigToken = "\nrouter-signature\n"; String endToken = "\n-----END SIGNATURE-----\n"; while (end < ascii.length()) { start = ascii.indexOf(startToken, end); if (start < 0) { break; } sig = ascii.indexOf(sigToken, start); if (sig < 0) { break; } sig += sigToken.length(); end = ascii.indexOf(endToken, sig); if (end < 0) { break; } end += endToken.length(); byte[] descBytes = new byte[end - start]; System.arraycopy(allData, start, descBytes, 0, end - start); String descriptorDigest = Hex.encodeHexString(DigestUtils.sha(descBytes)); if (!descriptorImportHistory.contains(descriptorDigest)) { bdp.parse(descBytes, dateTime, authorityFingerprint); descriptorImportHistory.add(descriptorDigest); if (firstLine.startsWith("router ")) { parsedServerDescriptors++; } else { parsedExtraInfoDescriptors++; } } else { if (firstLine.startsWith("router ")) { skippedServerDescriptors++; } else { skippedExtraInfoDescriptors++; } } } } descriptorImportHistory.add(fileDigest); parsedFiles++; } bis.close(); } in.close(); /* Let's give some memory back, or we'll run out of it. */ System.gc(); parsed.add(pop.getName()); modified = true; } catch (IOException e) { logger.warn("Could not parse bridge snapshot " + pop.getName() + "!", e); continue; } } } logger.debug("Finished importing files in directory " + bridgeDirectoriesDir + "/. In total, we parsed " + parsedFiles + " files (skipped " + skippedFiles + ") containing " + parsedStatuses + " statuses, " + parsedServerDescriptors + " server descriptors (skipped " + skippedServerDescriptors + "), and " + parsedExtraInfoDescriptors + " extra-info descriptors " + "(skipped " + skippedExtraInfoDescriptors + ")."); if (!parsed.isEmpty() && modified) { logger.debug("Writing file " + pbdFile.getAbsolutePath() + "..."); pbdFile.getParentFile().mkdirs(); try (BufferedWriter bw = new BufferedWriter(new FileWriter(pbdFile))) { for (String f : parsed) { bw.append(f + "\n"); } logger.debug("Finished writing file " + pbdFile.getAbsolutePath() + "."); } catch (IOException e) { logger.warn("Failed writing file " + pbdFile.getAbsolutePath() + "!", e); } } } }
From source file:org.torproject.collector.bridgedescs.SanitizedBridgesWriter.java
/** * Sanitizes a network status and writes it to disk. *///www .j av a 2 s . com public void sanitizeAndStoreNetworkStatus(byte[] data, String publicationTime, String authorityFingerprint) throws ConfigurationException { if (this.persistenceProblemWithSecrets) { /* There's a persistence problem, so we shouldn't scrub more IP * addresses in this execution. */ return; } if (publicationTime.compareTo(maxNetworkStatusPublishedTime) > 0) { maxNetworkStatusPublishedTime = publicationTime; } if (this.bridgeSanitizingCutOffTimestamp.compareTo(publicationTime) > 0) { String text = "Sanitizing and storing network status with " + "publication time outside our descriptor sanitizing " + "interval."; if (this.haveWarnedAboutInterval) { logger.debug(text); } else { logger.warn(text); this.haveWarnedAboutInterval = true; } } /* Parse the given network status line by line. */ StringBuilder header = new StringBuilder(); SortedMap<String, String> scrubbedLines = new TreeMap<String, String>(); try { StringBuilder scrubbed = new StringBuilder(); BufferedReader br = new BufferedReader(new StringReader(new String(data, "US-ASCII"))); String line = null; String mostRecentDescPublished = null; byte[] fingerprintBytes = null; String descPublicationTime = null; String hashedBridgeIdentityHex = null; while ((line = br.readLine()) != null) { /* Use publication time from "published" line instead of the * file's last-modified time. Don't copy over the line, because * we're going to write a "published" line below. */ if (line.startsWith("published ")) { publicationTime = line.substring("published ".length()); /* Additional header lines don't have to be cleaned up. */ } else if (line.startsWith("flag-thresholds ")) { header.append(line + "\n"); /* r lines contain sensitive information that needs to be removed * or replaced. */ } else if (line.startsWith("r ")) { /* Clear buffer from previously scrubbed lines. */ if (scrubbed.length() > 0) { String scrubbedLine = scrubbed.toString(); scrubbedLines.put(hashedBridgeIdentityHex, scrubbedLine); scrubbed = new StringBuilder(); } /* Parse the relevant parts of this r line. */ String[] parts = line.split(" "); if (parts.length < 9) { logger.warn( "Illegal line '" + line + "' in bridge network " + "status. Skipping descriptor."); return; } if (!Base64.isBase64(parts[2])) { logger.warn("Illegal base64 character in r line '" + parts[2] + "'. Skipping descriptor."); return; } fingerprintBytes = Base64.decodeBase64(parts[2] + "=="); descPublicationTime = parts[4] + " " + parts[5]; String address = parts[6]; String orPort = parts[7]; String dirPort = parts[8]; /* Determine most recent descriptor publication time. */ if (descPublicationTime.compareTo(publicationTime) <= 0 && (mostRecentDescPublished == null || descPublicationTime.compareTo(mostRecentDescPublished) > 0)) { mostRecentDescPublished = descPublicationTime; } /* Write scrubbed r line to buffer. */ byte[] hashedBridgeIdentity = DigestUtils.sha(fingerprintBytes); String hashedBridgeIdentityBase64 = Base64.encodeBase64String(hashedBridgeIdentity).substring(0, 27); hashedBridgeIdentityHex = Hex.encodeHexString(hashedBridgeIdentity); String descriptorIdentifier = parts[3]; String hashedDescriptorIdentifier = Base64 .encodeBase64String(DigestUtils.sha(Base64.decodeBase64(descriptorIdentifier + "=="))) .substring(0, 27); String scrubbedAddress = scrubIpv4Address(address, fingerprintBytes, descPublicationTime); String nickname = parts[1]; String scrubbedOrPort = this.scrubTcpPort(orPort, fingerprintBytes, descPublicationTime); String scrubbedDirPort = this.scrubTcpPort(dirPort, fingerprintBytes, descPublicationTime); scrubbed.append("r " + nickname + " " + hashedBridgeIdentityBase64 + " " + hashedDescriptorIdentifier + " " + descPublicationTime + " " + scrubbedAddress + " " + scrubbedOrPort + " " + scrubbedDirPort + "\n"); /* Sanitize any addresses in a lines using the fingerprint and * descriptor publication time from the previous r line. */ } else if (line.startsWith("a ")) { String scrubbedOrAddress = scrubOrAddress(line.substring("a ".length()), fingerprintBytes, descPublicationTime); if (scrubbedOrAddress != null) { scrubbed.append("a " + scrubbedOrAddress + "\n"); } else { logger.warn( "Invalid address in line '" + line + "' in bridge network status. Skipping line!"); } /* Nothing special about s, w, and p lines; just copy them. */ } else if (line.startsWith("s ") || line.equals("s") || line.startsWith("w ") || line.equals("w") || line.startsWith("p ") || line.equals("p")) { scrubbed.append(line + "\n"); /* There should be nothing else but r, a, w, p, and s lines in the * network status. If there is, we should probably learn before * writing anything to the sanitized descriptors. */ } else { logger.debug("Unknown line '" + line + "' in bridge " + "network status. Not writing to disk!"); return; } } br.close(); if (scrubbed.length() > 0) { String scrubbedLine = scrubbed.toString(); scrubbedLines.put(hashedBridgeIdentityHex, scrubbedLine); scrubbed = new StringBuilder(); } /* Check if we can tell from the descriptor publication times * whether this status is possibly stale. */ SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); formatter.setTimeZone(TimeZone.getTimeZone("UTC")); if (formatter.parse(publicationTime).getTime() - formatter.parse(mostRecentDescPublished).getTime() > 60L * 60L * 1000L) { logger.warn("The most recent descriptor in the bridge " + "network status published at " + publicationTime + " was " + "published at " + mostRecentDescPublished + " which is " + "more than 1 hour before the status. This is a sign for " + "the status being stale. Please check!"); } } catch (ParseException e) { logger.warn("Could not parse timestamp in " + "bridge network status.", e); return; } catch (IOException e) { logger.warn("Could not parse bridge network " + "status.", e); return; } /* Write the sanitized network status to disk. */ try { String syear = publicationTime.substring(0, 4); String smonth = publicationTime.substring(5, 7); String sday = publicationTime.substring(8, 10); String stime = publicationTime.substring(11, 13) + publicationTime.substring(14, 16) + publicationTime.substring(17, 19); File tarballFile = new File( this.sanitizedBridgesDirectory.getAbsolutePath() + "/" + syear + "/" + smonth + "/statuses/" + sday + "/" + syear + smonth + sday + "-" + stime + "-" + authorityFingerprint); File rsyncFile = new File(recentPathName, "statuses/" + tarballFile.getName()); File[] outputFiles = new File[] { tarballFile, rsyncFile }; for (File outputFile : outputFiles) { outputFile.getParentFile().mkdirs(); BufferedWriter bw = new BufferedWriter(new FileWriter(outputFile)); bw.write(Annotation.Status.toString()); bw.write("published " + publicationTime + "\n"); bw.write(header.toString()); for (String scrubbed : scrubbedLines.values()) { bw.write(scrubbed); } bw.close(); } } catch (IOException e) { logger.warn("Could not write sanitized bridge " + "network status to disk.", e); return; } }
From source file:org.torproject.collector.bridgedescs.SanitizedBridgesWriter.java
/** * Sanitizes a bridge server descriptor and writes it to disk. *//*from w w w . j a v a 2s . c o m*/ public void sanitizeAndStoreServerDescriptor(byte[] data) { if (this.persistenceProblemWithSecrets) { /* There's a persistence problem, so we shouldn't scrub more IP * addresses in this execution. */ return; } /* Parse descriptor to generate a sanitized version. */ String scrubbedDesc = null; String published = null; String masterKeyEd25519FromIdentityEd25519 = null; try { BufferedReader br = new BufferedReader(new StringReader(new String(data, "US-ASCII"))); StringBuilder scrubbed = new StringBuilder(); String line = null; String hashedBridgeIdentity = null; String address = null; String routerLine = null; String scrubbedRouterLine = null; String scrubbedAddress = null; String masterKeyEd25519 = null; List<String> orAddresses = null; List<String> scrubbedOrAddresses = null; boolean skipCrypto = false; while ((line = br.readLine()) != null) { /* Skip all crypto parts that might be used to derive the bridge's * identity fingerprint. */ if (skipCrypto && !line.startsWith("-----END ")) { continue; /* Store the router line for later processing, because we may need * the bridge identity fingerprint for replacing the IP address in * the scrubbed version. */ } else if (line.startsWith("router ")) { String[] parts = line.split(" "); if (parts.length != 6) { logger.warn("Invalid router line: '" + line + "'. Skipping."); return; } address = parts[2]; routerLine = line; /* Store or-address parts in a list and sanitize them when we have * read the fingerprint. */ } else if (line.startsWith("or-address ")) { if (orAddresses == null) { orAddresses = new ArrayList<String>(); } orAddresses.add(line.substring("or-address ".length())); /* Parse the publication time to see if we're still inside the * sanitizing interval. */ } else if (line.startsWith("published ")) { published = line.substring("published ".length()); if (published.compareTo(maxServerDescriptorPublishedTime) > 0) { maxServerDescriptorPublishedTime = published; } if (this.bridgeSanitizingCutOffTimestamp.compareTo(published) > 0) { String text = "Sanitizing and storing " + "server descriptor with publication time outside our " + "descriptor sanitizing interval."; if (this.haveWarnedAboutInterval) { logger.debug(text); } else { logger.warn(text); this.haveWarnedAboutInterval = true; } } scrubbed.append(line + "\n"); /* Parse the fingerprint to determine the hashed bridge * identity. */ } else if (line.startsWith("opt fingerprint ") || line.startsWith("fingerprint ")) { String fingerprint = line .substring( line.startsWith("opt ") ? "opt fingerprint".length() : "fingerprint".length()) .replaceAll(" ", "").toLowerCase(); byte[] fingerprintBytes = Hex.decodeHex(fingerprint.toCharArray()); hashedBridgeIdentity = DigestUtils.shaHex(fingerprintBytes).toLowerCase(); try { scrubbedAddress = scrubIpv4Address(address, fingerprintBytes, published); if (orAddresses != null) { scrubbedOrAddresses = new ArrayList<String>(); for (String orAddress : orAddresses) { String scrubbedOrAddress = scrubOrAddress(orAddress, fingerprintBytes, published); if (scrubbedOrAddress != null) { scrubbedOrAddresses.add(scrubbedOrAddress); } else { logger.warn("Invalid address in line " + "'or-address " + orAddress + "' in bridge server " + "descriptor. Skipping line!"); } } } String[] routerLineParts = routerLine.split(" "); String nickname = routerLineParts[1]; String scrubbedOrPort = this.scrubTcpPort(routerLineParts[3], fingerprintBytes, published); String scrubbedDirPort = this.scrubTcpPort(routerLineParts[4], fingerprintBytes, published); String scrubbedSocksPort = this.scrubTcpPort(routerLineParts[5], fingerprintBytes, published); scrubbedRouterLine = String.format("router %s %s %s %s %s%n", nickname, scrubbedAddress, scrubbedOrPort, scrubbedDirPort, scrubbedSocksPort); } catch (IOException e) { /* There's a persistence problem, so we shouldn't scrub more * IP addresses in this execution. */ this.persistenceProblemWithSecrets = true; return; } scrubbed.append((line.startsWith("opt ") ? "opt " : "") + "fingerprint"); for (int i = 0; i < hashedBridgeIdentity.length() / 4; i++) { scrubbed.append(" " + hashedBridgeIdentity.substring(4 * i, 4 * (i + 1)).toUpperCase()); } scrubbed.append("\n"); /* Replace the contact line (if present) with a generic one. */ } else if (line.startsWith("contact ")) { scrubbed.append("contact somebody\n"); /* When we reach the signature, we're done. Write the sanitized * descriptor to disk below. */ } else if (line.startsWith("router-signature")) { scrubbedDesc = scrubbedRouterLine; if (scrubbedOrAddresses != null) { for (String scrubbedOrAddress : scrubbedOrAddresses) { scrubbedDesc += "or-address " + scrubbedOrAddress + "\n"; } } scrubbedDesc += scrubbed.toString(); break; /* Replace extra-info digest with the hashed digest of the * non-scrubbed descriptor. */ } else if (line.startsWith("opt extra-info-digest ") || line.startsWith("extra-info-digest ")) { String[] parts = line.split(" "); if (line.startsWith("opt ")) { scrubbed.append("opt "); parts = line.substring(4).split(" "); } if (parts.length > 3) { logger.warn("extra-info-digest line contains more arguments than" + "expected: '" + line + "'. Skipping descriptor."); return; } scrubbed.append("extra-info-digest " + DigestUtils.shaHex(Hex.decodeHex(parts[1].toCharArray())).toUpperCase()); if (parts.length > 2) { if (!Base64.isBase64(parts[2])) { logger.warn("Illegal base64 character in extra-info-digest line '" + line + "'. Skipping descriptor."); return; } scrubbed.append( " " + Base64.encodeBase64String(DigestUtils.sha256(Base64.decodeBase64(parts[2]))) .replaceAll("=", "")); } scrubbed.append("\n"); /* Possibly sanitize reject lines if they contain the bridge's own * IP address. */ } else if (line.startsWith("reject ")) { if (address != null && line.startsWith("reject " + address)) { scrubbed.append("reject " + scrubbedAddress + line.substring("reject ".length() + address.length()) + "\n"); } else { scrubbed.append(line + "\n"); } /* Extract master-key-ed25519 from identity-ed25519. */ } else if (line.equals("identity-ed25519")) { StringBuilder sb = new StringBuilder(); while ((line = br.readLine()) != null && !line.equals("-----END ED25519 CERT-----")) { if (line.equals("-----BEGIN ED25519 CERT-----")) { continue; } sb.append(line); } masterKeyEd25519FromIdentityEd25519 = this .parseMasterKeyEd25519FromIdentityEd25519(sb.toString()); if (masterKeyEd25519FromIdentityEd25519 == null) { logger.warn("Could not parse master-key-ed25519 from " + "identity-ed25519. Skipping descriptor."); return; } String sha256MasterKeyEd25519 = Base64 .encodeBase64String(DigestUtils .sha256(Base64.decodeBase64(masterKeyEd25519FromIdentityEd25519 + "="))) .replaceAll("=", ""); scrubbed.append("master-key-ed25519 " + sha256MasterKeyEd25519 + "\n"); if (masterKeyEd25519 != null && !masterKeyEd25519.equals(masterKeyEd25519FromIdentityEd25519)) { logger.warn("Mismatch between identity-ed25519 and " + "master-key-ed25519. Skipping."); return; } /* Verify that identity-ed25519 and master-key-ed25519 match. */ } else if (line.startsWith("master-key-ed25519 ")) { masterKeyEd25519 = line.substring(line.indexOf(" ") + 1); if (masterKeyEd25519FromIdentityEd25519 != null && !masterKeyEd25519FromIdentityEd25519.equals(masterKeyEd25519)) { logger.warn("Mismatch between identity-ed25519 and " + "master-key-ed25519. Skipping."); return; } /* Write the following lines unmodified to the sanitized * descriptor. */ } else if (line.startsWith("accept ") || line.startsWith("platform ") || line.startsWith("opt protocols ") || line.startsWith("protocols ") || line.startsWith("proto ") || line.startsWith("uptime ") || line.startsWith("bandwidth ") || line.startsWith("opt hibernating ") || line.startsWith("hibernating ") || line.startsWith("ntor-onion-key ") || line.equals("opt hidden-service-dir") || line.equals("hidden-service-dir") || line.equals("opt caches-extra-info") || line.equals("caches-extra-info") || line.equals("opt allow-single-hop-exits") || line.equals("allow-single-hop-exits") || line.startsWith("ipv6-policy ") || line.equals("tunnelled-dir-server")) { scrubbed.append(line + "\n"); /* Replace node fingerprints in the family line with their hashes * and leave nicknames unchanged. */ } else if (line.startsWith("family ")) { StringBuilder familyLine = new StringBuilder("family"); for (String s : line.substring(7).split(" ")) { if (s.startsWith("$")) { familyLine.append(" $" + DigestUtils.shaHex(Hex.decodeHex(s.substring(1).toCharArray())) .toUpperCase()); } else { familyLine.append(" " + s); } } scrubbed.append(familyLine.toString() + "\n"); /* Skip the purpose line that the bridge authority adds to its * cached-descriptors file. */ } else if (line.startsWith("@purpose ")) { continue; /* Skip all crypto parts that might leak the bridge's identity * fingerprint. */ } else if (line.startsWith("-----BEGIN ") || line.equals("onion-key") || line.equals("signing-key") || line.equals("onion-key-crosscert") || line.startsWith("ntor-onion-key-crosscert ")) { skipCrypto = true; /* Stop skipping lines when the crypto parts are over. */ } else if (line.startsWith("-----END ")) { skipCrypto = false; /* Skip the ed25519 signature; we'll include a SHA256 digest of * the SHA256 descriptor digest in router-digest-sha256. */ } else if (line.startsWith("router-sig-ed25519 ")) { continue; /* If we encounter an unrecognized line, stop parsing and print * out a warning. We might have overlooked sensitive information * that we need to remove or replace for the sanitized descriptor * version. */ } else { logger.warn("Unrecognized line '" + line + "'. Skipping."); return; } } br.close(); } catch (Exception e) { logger.warn("Could not parse server " + "descriptor.", e); return; } /* Determine filename of sanitized server descriptor. */ String descriptorDigest = null; try { String ascii = new String(data, "US-ASCII"); String startToken = "router "; String sigToken = "\nrouter-signature\n"; int start = ascii.indexOf(startToken); int sig = ascii.indexOf(sigToken) + sigToken.length(); if (start >= 0 && sig >= 0 && sig > start) { byte[] forDigest = new byte[sig - start]; System.arraycopy(data, start, forDigest, 0, sig - start); descriptorDigest = DigestUtils.shaHex(DigestUtils.sha(forDigest)); } } catch (UnsupportedEncodingException e) { /* Handle below. */ } if (descriptorDigest == null) { logger.warn("Could not calculate server " + "descriptor digest."); return; } String descriptorDigestSha256Base64 = null; if (masterKeyEd25519FromIdentityEd25519 != null) { try { String ascii = new String(data, "US-ASCII"); String startToken = "router "; String sigToken = "\n-----END SIGNATURE-----\n"; int start = ascii.indexOf(startToken); int sig = ascii.indexOf(sigToken) + sigToken.length(); if (start >= 0 && sig >= 0 && sig > start) { byte[] forDigest = new byte[sig - start]; System.arraycopy(data, start, forDigest, 0, sig - start); descriptorDigestSha256Base64 = Base64 .encodeBase64String(DigestUtils.sha256(DigestUtils.sha256(forDigest))) .replaceAll("=", ""); } } catch (UnsupportedEncodingException e) { /* Handle below. */ } if (descriptorDigestSha256Base64 == null) { logger.warn("Could not calculate server " + "descriptor SHA256 digest."); return; } } String dyear = published.substring(0, 4); String dmonth = published.substring(5, 7); File tarballFile = new File(this.sanitizedBridgesDirectory.getAbsolutePath() + "/" + dyear + "/" + dmonth + "/server-descriptors/" + "/" + descriptorDigest.charAt(0) + "/" + descriptorDigest.charAt(1) + "/" + descriptorDigest); try { File rsyncCatFile = new File(config.getPath(Key.RecentPath).toFile(), "bridge-descriptors/server-descriptors/" + this.rsyncCatString + "-server-descriptors.tmp"); File[] outputFiles = new File[] { tarballFile, rsyncCatFile }; boolean[] append = new boolean[] { false, true }; for (int i = 0; i < outputFiles.length; i++) { File outputFile = outputFiles[i]; boolean appendToFile = append[i]; if (outputFile.exists() && !appendToFile) { /* We already stored this descriptor to disk before, so let's * not store it yet another time. */ break; } outputFile.getParentFile().mkdirs(); BufferedWriter bw = new BufferedWriter(new FileWriter(outputFile, appendToFile)); bw.write(Annotation.BridgeServer.toString()); bw.write(scrubbedDesc); if (descriptorDigestSha256Base64 != null) { bw.write("router-digest-sha256 " + descriptorDigestSha256Base64 + "\n"); } bw.write("router-digest " + descriptorDigest.toUpperCase() + "\n"); bw.close(); } } catch (ConfigurationException | IOException e) { logger.warn("Could not write sanitized server " + "descriptor to disk.", e); return; } }
From source file:org.torproject.collector.bridgedescs.SanitizedBridgesWriter.java
/** * Sanitizes an extra-info descriptor and writes it to disk. */// w w w . ja v a2 s . c o m public void sanitizeAndStoreExtraInfoDescriptor(byte[] data) { /* Parse descriptor to generate a sanitized version. */ String scrubbedDesc = null; String published = null; String masterKeyEd25519FromIdentityEd25519 = null; try { BufferedReader br = new BufferedReader(new StringReader(new String(data, "US-ASCII"))); String line = null; StringBuilder scrubbed = null; String hashedBridgeIdentity = null; String masterKeyEd25519 = null; while ((line = br.readLine()) != null) { /* Parse bridge identity from extra-info line and replace it with * its hash in the sanitized descriptor. */ String[] parts = line.split(" "); if (line.startsWith("extra-info ")) { if (parts.length < 3) { logger.debug( "Illegal line in extra-info descriptor: '" + line + "'. Skipping descriptor."); return; } hashedBridgeIdentity = DigestUtils.shaHex(Hex.decodeHex(parts[2].toCharArray())).toLowerCase(); scrubbed = new StringBuilder( "extra-info " + parts[1] + " " + hashedBridgeIdentity.toUpperCase() + "\n"); /* Parse the publication time to determine the file name. */ } else if (line.startsWith("published ")) { scrubbed.append(line + "\n"); published = line.substring("published ".length()); if (published.compareTo(maxExtraInfoDescriptorPublishedTime) > 0) { maxExtraInfoDescriptorPublishedTime = published; } /* Remove everything from transport lines except the transport * name. */ } else if (line.startsWith("transport ")) { if (parts.length < 3) { logger.debug( "Illegal line in extra-info descriptor: '" + line + "'. Skipping descriptor."); return; } scrubbed.append("transport " + parts[1] + "\n"); /* Skip transport-info lines entirely. */ } else if (line.startsWith("transport-info ")) { /* Extract master-key-ed25519 from identity-ed25519. */ } else if (line.equals("identity-ed25519")) { StringBuilder sb = new StringBuilder(); while ((line = br.readLine()) != null && !line.equals("-----END ED25519 CERT-----")) { if (line.equals("-----BEGIN ED25519 CERT-----")) { continue; } sb.append(line); } masterKeyEd25519FromIdentityEd25519 = this .parseMasterKeyEd25519FromIdentityEd25519(sb.toString()); String sha256MasterKeyEd25519 = Base64 .encodeBase64String(DigestUtils .sha256(Base64.decodeBase64(masterKeyEd25519FromIdentityEd25519 + "="))) .replaceAll("=", ""); scrubbed.append("master-key-ed25519 " + sha256MasterKeyEd25519 + "\n"); if (masterKeyEd25519 != null && !masterKeyEd25519.equals(masterKeyEd25519FromIdentityEd25519)) { logger.warn("Mismatch between identity-ed25519 and " + "master-key-ed25519. Skipping."); return; } /* Verify that identity-ed25519 and master-key-ed25519 match. */ } else if (line.startsWith("master-key-ed25519 ")) { masterKeyEd25519 = line.substring(line.indexOf(" ") + 1); if (masterKeyEd25519FromIdentityEd25519 != null && !masterKeyEd25519FromIdentityEd25519.equals(masterKeyEd25519)) { logger.warn("Mismatch between identity-ed25519 and " + "master-key-ed25519. Skipping."); return; } /* Write the following lines unmodified to the sanitized * descriptor. */ } else if (line.startsWith("write-history ") || line.startsWith("read-history ") || line.startsWith("geoip-start-time ") || line.startsWith("geoip-client-origins ") || line.startsWith("geoip-db-digest ") || line.startsWith("geoip6-db-digest ") || line.startsWith("conn-bi-direct ") || line.startsWith("bridge-") || line.startsWith("dirreq-") || line.startsWith("cell-") || line.startsWith("entry-") || line.startsWith("exit-") || line.startsWith("hidserv-")) { scrubbed.append(line + "\n"); /* When we reach the signature, we're done. Write the sanitized * descriptor to disk below. */ } else if (line.startsWith("router-signature")) { scrubbedDesc = scrubbed.toString(); break; /* Skip the ed25519 signature; we'll include a SHA256 digest of * the SHA256 descriptor digest in router-digest-sha256. */ } else if (line.startsWith("router-sig-ed25519 ")) { continue; /* If we encounter an unrecognized line, stop parsing and print * out a warning. We might have overlooked sensitive information * that we need to remove or replace for the sanitized descriptor * version. */ } else { logger.warn("Unrecognized line '" + line + "'. Skipping."); return; } } br.close(); } catch (IOException e) { logger.warn("Could not parse extra-info " + "descriptor.", e); return; } catch (DecoderException e) { logger.warn("Could not parse extra-info " + "descriptor.", e); return; } /* Determine filename of sanitized extra-info descriptor. */ String descriptorDigest = null; try { String ascii = new String(data, "US-ASCII"); String startToken = "extra-info "; String sigToken = "\nrouter-signature\n"; int start = ascii.indexOf(startToken); int sig = ascii.indexOf(sigToken) + sigToken.length(); if (start >= 0 && sig >= 0 && sig > start) { byte[] forDigest = new byte[sig - start]; System.arraycopy(data, start, forDigest, 0, sig - start); descriptorDigest = DigestUtils.shaHex(DigestUtils.sha(forDigest)); } } catch (UnsupportedEncodingException e) { /* Handle below. */ } if (descriptorDigest == null) { logger.warn("Could not calculate extra-info " + "descriptor digest."); return; } String descriptorDigestSha256Base64 = null; if (masterKeyEd25519FromIdentityEd25519 != null) { try { String ascii = new String(data, "US-ASCII"); String startToken = "extra-info "; String sigToken = "\n-----END SIGNATURE-----\n"; int start = ascii.indexOf(startToken); int sig = ascii.indexOf(sigToken) + sigToken.length(); if (start >= 0 && sig >= 0 && sig > start) { byte[] forDigest = new byte[sig - start]; System.arraycopy(data, start, forDigest, 0, sig - start); descriptorDigestSha256Base64 = Base64 .encodeBase64String(DigestUtils.sha256(DigestUtils.sha256(forDigest))) .replaceAll("=", ""); } } catch (UnsupportedEncodingException e) { /* Handle below. */ } if (descriptorDigestSha256Base64 == null) { logger.warn("Could not calculate extra-info " + "descriptor SHA256 digest."); return; } } String dyear = published.substring(0, 4); String dmonth = published.substring(5, 7); File tarballFile = new File( this.sanitizedBridgesDirectory.getAbsolutePath() + "/" + dyear + "/" + dmonth + "/extra-infos/" + descriptorDigest.charAt(0) + "/" + descriptorDigest.charAt(1) + "/" + descriptorDigest); try { File rsyncCatFile = new File(config.getPath(Key.RecentPath).toFile(), "bridge-descriptors/extra-infos/" + this.rsyncCatString + "-extra-infos.tmp"); File[] outputFiles = new File[] { tarballFile, rsyncCatFile }; boolean[] append = new boolean[] { false, true }; for (int i = 0; i < outputFiles.length; i++) { File outputFile = outputFiles[i]; boolean appendToFile = append[i]; if (outputFile.exists() && !appendToFile) { /* We already stored this descriptor to disk before, so let's * not store it yet another time. */ break; } outputFile.getParentFile().mkdirs(); BufferedWriter bw = new BufferedWriter(new FileWriter(outputFile, appendToFile)); bw.write(Annotation.BridgeExtraInfo.toString()); bw.write(scrubbedDesc); if (descriptorDigestSha256Base64 != null) { bw.write("router-digest-sha256 " + descriptorDigestSha256Base64 + "\n"); } bw.write("router-digest " + descriptorDigest.toUpperCase() + "\n"); bw.close(); } } catch (Exception e) { logger.warn("Could not write sanitized " + "extra-info descriptor to disk.", e); } }
From source file:org.torproject.collector.relaydescs.CachedRelayDescriptorReader.java
/** Reads cached-descriptor files from one or more directories and * passes them to the given descriptor parser. */ public CachedRelayDescriptorReader(RelayDescriptorParser rdp, String[] inputDirectories, File statsDirectory) { if (rdp == null || inputDirectories == null || inputDirectories.length == 0 || statsDirectory == null) { throw new IllegalArgumentException(); }//w w w.j a v a 2 s . c om StringBuilder dumpStats = new StringBuilder( "Finished importing " + "relay descriptors from local Tor data directories:"); /* Read import history containing SHA-1 digests of previously parsed * statuses and descriptors, so that we can skip them in this run. */ Set<String> lastImportHistory = new HashSet<String>(); Set<String> currentImportHistory = new HashSet<String>(); File importHistoryFile = new File(statsDirectory, "cacheddesc-import-history"); if (importHistoryFile.exists()) { try { BufferedReader br = new BufferedReader(new FileReader(importHistoryFile)); String line; while ((line = br.readLine()) != null) { lastImportHistory.add(line); } br.close(); } catch (IOException e) { logger.warn("Could not read import history from " + importHistoryFile.getAbsolutePath() + ".", e); } } /* Read cached descriptors directories. */ for (String inputDirectory : inputDirectories) { File cachedDescDir = new File(inputDirectory); if (!cachedDescDir.exists()) { logger.warn("Directory " + cachedDescDir.getAbsolutePath() + " does not exist. Skipping."); continue; } logger.debug("Reading " + cachedDescDir.getAbsolutePath() + " directory."); SortedSet<File> cachedDescFiles = new TreeSet<File>(); Stack<File> files = new Stack<File>(); files.add(cachedDescDir); while (!files.isEmpty()) { File file = files.pop(); if (file.isDirectory()) { files.addAll(Arrays.asList(file.listFiles())); } else { cachedDescFiles.add(file); } } for (File f : cachedDescFiles) { try { // descriptors may contain non-ASCII chars; read as bytes to // determine digests BufferedInputStream bis = new BufferedInputStream(new FileInputStream(f)); ByteArrayOutputStream baos = new ByteArrayOutputStream(); int len; byte[] data = new byte[1024]; while ((len = bis.read(data, 0, 1024)) >= 0) { baos.write(data, 0, len); } bis.close(); byte[] allData = baos.toByteArray(); if (f.getName().equals("cached-consensus")) { /* Check if directory information is stale. */ BufferedReader br = new BufferedReader(new StringReader(new String(allData, "US-ASCII"))); String line = null; while ((line = br.readLine()) != null) { if (line.startsWith("valid-after ")) { dumpStats.append( "\n" + f.getName() + ": " + line.substring("valid-after ".length())); SimpleDateFormat dateTimeFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC")); if (dateTimeFormat.parse(line.substring("valid-after ".length())) .getTime() < System.currentTimeMillis() - 6L * 60L * 60L * 1000L) { logger.warn("Cached descriptor files in " + cachedDescDir.getAbsolutePath() + " are stale. " + "The valid-after line in cached-consensus is '" + line + "'."); dumpStats.append(" (stale!)"); } break; } } br.close(); /* Parse the cached consensus if we haven't parsed it before * (but regardless of whether it's stale or not). */ if (rdp != null) { String digest = Hex.encodeHexString(DigestUtils.sha(allData)); if (!lastImportHistory.contains(digest) && !currentImportHistory.contains(digest)) { rdp.parse(allData); } else { dumpStats.append(" (skipped)"); } currentImportHistory.add(digest); } } else if (f.getName().equals("v3-status-votes")) { int parsedNum = 0; int skippedNum = 0; String ascii = new String(allData, "US-ASCII"); String startToken = "network-status-version "; int end = ascii.length(); int start = ascii.indexOf(startToken); while (start >= 0 && start < end) { int next = ascii.indexOf(startToken, start + 1); if (next < 0) { next = end; } if (start < next) { byte[] rawNetworkStatusBytes = new byte[next - start]; System.arraycopy(allData, start, rawNetworkStatusBytes, 0, next - start); if (rdp != null) { String digest = Hex.encodeHexString(DigestUtils.sha(rawNetworkStatusBytes)); if (!lastImportHistory.contains(digest) && !currentImportHistory.contains(digest)) { rdp.parse(rawNetworkStatusBytes); parsedNum++; } else { skippedNum++; } currentImportHistory.add(digest); } } start = next; } dumpStats.append("\n" + f.getName() + ": parsed " + parsedNum + ", skipped " + skippedNum + " votes"); } else if (f.getName().startsWith("cached-descriptors") || f.getName().startsWith("cached-extrainfo")) { String ascii = new String(allData, "US-ASCII"); int start = -1; int sig = -1; int end = -1; String startToken = f.getName().startsWith("cached-descriptors") ? "router " : "extra-info "; String sigToken = "\nrouter-signature\n"; String endToken = "\n-----END SIGNATURE-----\n"; int parsedNum = 0; int skippedNum = 0; while (end < ascii.length()) { start = ascii.indexOf(startToken, end); if (start < 0) { break; } sig = ascii.indexOf(sigToken, start); if (sig < 0) { break; } sig += sigToken.length(); end = ascii.indexOf(endToken, sig); if (end < 0) { break; } end += endToken.length(); byte[] descBytes = new byte[end - start]; System.arraycopy(allData, start, descBytes, 0, end - start); if (rdp != null) { String digest = Hex.encodeHexString(DigestUtils.sha(descBytes)); if (!lastImportHistory.contains(digest) && !currentImportHistory.contains(digest)) { rdp.parse(descBytes); parsedNum++; } else { skippedNum++; } currentImportHistory.add(digest); } } dumpStats.append("\n" + f.getName() + ": parsed " + parsedNum + ", skipped " + skippedNum + " " + (f.getName().startsWith("cached-descriptors") ? "server" : "extra-info") + " descriptors"); } } catch (IOException e) { logger.warn("Failed reading " + cachedDescDir.getAbsolutePath() + " directory.", e); } catch (ParseException e) { logger.warn("Failed reading " + cachedDescDir.getAbsolutePath() + " directory.", e); } } logger.debug("Finished reading " + cachedDescDir.getAbsolutePath() + " directory."); } /* Write import history containing SHA-1 digests to disk. */ try { importHistoryFile.getParentFile().mkdirs(); BufferedWriter bw = new BufferedWriter(new FileWriter(importHistoryFile)); for (String digest : currentImportHistory) { bw.write(digest + "\n"); } bw.close(); } catch (IOException e) { logger.warn("Could not write import history to " + importHistoryFile.getAbsolutePath() + ".", e); } logger.info(dumpStats.toString()); }
From source file:org.torproject.ernie.db.SanitizedBridgesWriter.java
/** * Sanitizes a network status and writes it to disk. Processes every r * line separately and looks up whether the descriptor mapping contains * a bridge with given identity hash and descriptor publication time. */ public void sanitizeAndStoreNetworkStatus(byte[] data, String publicationTime) { /* Parse the given network status line by line. */ StringBuilder scrubbed = new StringBuilder(); try {//from ww w . j av a 2s . c o m BufferedReader br = new BufferedReader(new StringReader(new String(data, "US-ASCII"))); String line = null; while ((line = br.readLine()) != null) { /* r lines contain sensitive information that needs to be removed * or replaced. */ if (line.startsWith("r ")) { /* Parse the relevant parts of this r line. */ String[] parts = line.split(" "); String bridgeIdentity = parts[2]; String descPublicationTime = parts[4] + " " + parts[5]; String orPort = parts[7]; String dirPort = parts[8]; /* Look up the descriptor in the descriptor mapping, or add a * new mapping entry if there is none. */ String hashedBridgeIdentityHex = Hex .encodeHexString(DigestUtils.sha(Base64.decodeBase64(bridgeIdentity + "=="))) .toLowerCase(); String mappingKey = hashedBridgeIdentityHex + "," + descPublicationTime; DescriptorMapping mapping = null; if (this.bridgeDescriptorMappings.containsKey(mappingKey)) { mapping = this.bridgeDescriptorMappings.get(mappingKey); } else { mapping = new DescriptorMapping(hashedBridgeIdentityHex.toLowerCase(), descPublicationTime); this.bridgeDescriptorMappings.put(mappingKey, mapping); } /* Write scrubbed r line to buffer. */ String hashedBridgeIdentityBase64 = Base64 .encodeBase64String(DigestUtils.sha(Base64.decodeBase64(bridgeIdentity + "=="))) .substring(0, 27); String sdi = Base64 .encodeBase64String(Hex.decodeHex(mapping.serverDescriptorIdentifier.toCharArray())) .substring(0, 27); scrubbed.append("r Unnamed " + hashedBridgeIdentityBase64 + " " + sdi + " " + descPublicationTime + " 127.0.0.1 " + orPort + " " + dirPort + "\n"); /* Nothing special about s lines; just copy them. */ } else if (line.startsWith("s ")) { scrubbed.append(line + "\n"); /* There should be nothing else but r and s lines in the network * status. If there is, we should probably learn before writing * anything to the sanitized descriptors. */ } else { this.logger.fine( "Unknown line '" + line + "' in bridge " + "network status. Not writing to disk!"); return; } } br.close(); } catch (IOException e) { this.logger.log(Level.WARNING, "Could not parse bridge network " + "status.", e); return; } catch (DecoderException e) { this.logger.log(Level.WARNING, "Could not parse bridge network " + "status.", e); return; } /* Write the sanitized network status to disk. */ try { /* Determine file name. */ String syear = publicationTime.substring(0, 4); String smonth = publicationTime.substring(5, 7); String sday = publicationTime.substring(8, 10); String stime = publicationTime.substring(11, 13) + publicationTime.substring(14, 16) + publicationTime.substring(17, 19); File statusFile = new File(this.sanitizedBridgesDir + "/" + syear + "/" + smonth + "/statuses/" + sday + "/" + syear + smonth + sday + "-" + stime + "-" + "4A0CCD2DDC7995083D73F5D667100C8A5831F16D"); /* Create all parent directories to write this network status. */ statusFile.getParentFile().mkdirs(); /* Write sanitized network status to disk. */ BufferedWriter bw = new BufferedWriter(new FileWriter(statusFile)); bw.write(scrubbed.toString()); bw.close(); } catch (IOException e) { this.logger.log(Level.WARNING, "Could not write sanitized bridge " + "network status to disk.", e); return; } }
From source file:org.viafirma.nucleo.validacion.ValidadorHandler.java
/** * Chequea el hash del documento original coincide con el hash del documento * custodiado./*from ww w . j a v a 2 s . c o m*/ * * @param originalData * @param id * @param xmlSig * @return */ public boolean checkHash(byte[] originalData, String id, XMLSignature xmlSig) { // Realiza el digest del documento custodiado String digestCustodiadoString = XmlSignUtil.getInstance().getDigest(xmlSig, id)[1]; // Lo pasamos a Byte[] byte[] digestCustodiado = Base64.decode(digestCustodiadoString); // Bytes del documento original preprocesados ( Canonizados si fuese // necesario) byte[] originalDataPreprocess = null; try { // Recuperamos el tipo de documento a comprobar TypeFormatSign typeFormatSign = XmlSignUtil.getTypeFormatSign(id); if (typeFormatSign == TypeFormatSign.XMLSIG_ENVELOPING) { // Para este tipo de formato no es necesario canonizar, ej: // binario de un PDF originalDataPreprocess = originalData; } else if (typeFormatSign == TypeFormatSign.XADES_EPES_ENVELOPED) { // Recupera el documento custodiado InputStream input = new ByteArrayInputStream(originalData); // Reader readerXML = new InputStreamReader(input); InputSource ioXml = new InputSource(input); // parser javax.xml.parsers.DocumentBuilderFactory dbf = javax.xml.parsers.DocumentBuilderFactory .newInstance(); javax.xml.parsers.DocumentBuilder db; dbf.setNamespaceAware(true); dbf.setAttribute("http://xml.org/sax/features/namespaces", Boolean.TRUE); db = dbf.newDocumentBuilder(); org.w3c.dom.Document doc = db.parse(ioXml); org.w3c.dom.Element nscontext = XMLUtils.createDSctx(doc, "ds", Constants.SignatureSpecNS); NodeList nodeIterator = XPathAPI.selectNodeList(doc, "//ds:Signature", nscontext); // Extrae los hijos que ds:signature for (int i = 0; i < nodeIterator.getLength(); i++) { Node node = nodeIterator.item(i); node.getParentNode().removeChild(node); } // Canoniza el documento originalDataPreprocess = XmlSignUtil.getInstance().canonizar(doc); } else { throw new UnsupportedOperationException( "El tipo de firma no es soportado para su verificacion. " + typeFormatSign); } // Digiere los datos a comprobar byte[] digestOriginal = DigestUtils.sha(originalDataPreprocess); // Comprueba que los disges son efectivamente iguales if (Arrays.equals(digestCustodiado, digestOriginal)) { return true; } return false; } catch (ParserConfigurationException e) { log.error("Error con el configurador al parsear el documento ", e); } catch (SAXException e) { log.error("Error de nivel Sax al parsear el documento ", e); } catch (IOException e) { log.error("Error de flujo al parsear el documento", e); } catch (TransformerException e) { log.error("Error al transformar el documento", e); } catch (ExcepcionErrorInterno e) { log.error("Error de seguridad al digerir el documento", e); } return false; }
From source file:org.webcurator.auth.dbms.DebugSHAEncoder.java
public String encodePassword(String rawPass, Object salt) { String saltedPass = mergePasswordAndSalt(rawPass, salt, false); System.out.println("mergedPasswordAndSalt: [" + saltedPass + "]"); if (!getEncodeHashAsBase64()) { System.out.println("Not Doing base 64"); return DigestUtils.shaHex(saltedPass); }//from www .j a va2s . c om byte[] encoded = Base64.encodeBase64(DigestUtils.sha(saltedPass)); System.out.println("encodedPass: [" + new String(encoded) + "]"); return new String(encoded); }