List of usage examples for org.apache.commons.codec.digest DigestUtils shaHex
@Deprecated public static String shaHex(String data)
From source file:org.torproject.collector.relaydescs.RelayDescriptorParser.java
/** Parses the given bytes to find out the contained descriptor type, * forwards them to the archive writer to store them to disk, and tells * the relay descriptor downloader and archive reader about the * contained descriptor and all referenced descriptors. */ public boolean parse(byte[] data) { boolean stored = false; try {//from www . jav a 2 s.c o m /* Convert descriptor to ASCII for parsing. This means we'll lose * the non-ASCII chars, but we don't care about them for parsing * anyway. */ BufferedReader br = new BufferedReader(new StringReader(new String(data, "US-ASCII"))); String line; do { line = br.readLine(); } while (line != null && line.startsWith("@")); if (line == null) { logger.debug("We were given an empty descriptor for " + "parsing. Ignoring."); return false; } SimpleDateFormat parseFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); parseFormat.setTimeZone(TimeZone.getTimeZone("UTC")); if (line.startsWith("network-status-version 3")) { String statusType = "consensus"; if (line.equals("network-status-version 3 microdesc")) { statusType = "consensus-microdesc"; } String validAfterTime = null; String fingerprint = null; String dirSource = null; long validAfter = -1L; long dirKeyPublished = -1L; SortedSet<String> dirSources = new TreeSet<String>(); SortedSet<String> serverDescriptors = new TreeSet<String>(); SortedSet<String> serverDescriptorDigests = new TreeSet<String>(); SortedSet<String> microdescriptorKeys = new TreeSet<String>(); SortedSet<String> microdescriptorDigests = new TreeSet<String>(); StringBuilder certificateStringBuilder = null; String certificateString = null; String lastRelayIdentity = null; while ((line = br.readLine()) != null) { if (certificateStringBuilder != null) { if (line.startsWith("r ")) { certificateString = certificateStringBuilder.toString(); certificateStringBuilder = null; } else { certificateStringBuilder.append(line + "\n"); } } if (line.equals("vote-status vote")) { statusType = "vote"; } else if (line.startsWith("valid-after ")) { validAfterTime = line.substring("valid-after ".length()); validAfter = parseFormat.parse(validAfterTime).getTime(); } else if (line.startsWith("dir-source ")) { dirSource = line.split(" ")[2]; } else if (line.startsWith("vote-digest ")) { dirSources.add(dirSource); } else if (line.startsWith("dir-key-certificate-version ")) { certificateStringBuilder = new StringBuilder(); certificateStringBuilder.append(line + "\n"); } else if (line.startsWith("fingerprint ")) { fingerprint = line.split(" ")[1]; } else if (line.startsWith("dir-key-published ")) { String dirKeyPublishedTime = line.substring("dir-key-published ".length()); dirKeyPublished = parseFormat.parse(dirKeyPublishedTime).getTime(); } else if (line.startsWith("r ")) { String[] parts = line.split(" "); if (parts.length == 8) { lastRelayIdentity = Hex.encodeHexString(Base64.decodeBase64(parts[2] + "=")) .toLowerCase(); } else if (parts.length == 9) { lastRelayIdentity = Hex.encodeHexString(Base64.decodeBase64(parts[2] + "=")) .toLowerCase(); String serverDesc = Hex.encodeHexString(Base64.decodeBase64(parts[3] + "=")) .toLowerCase(); String publishedTime = parts[4] + " " + parts[5]; serverDescriptors.add(publishedTime + "," + lastRelayIdentity + "," + serverDesc); serverDescriptorDigests.add(serverDesc); } else { logger.warn("Could not parse r line '" + line + "' in descriptor. Skipping."); break; } } else if (line.startsWith("m ")) { String[] parts = line.split(" "); if (parts.length == 2 && parts[1].length() == 43) { String digest256Base64 = parts[1]; microdescriptorKeys .add(validAfterTime + "," + lastRelayIdentity + "," + digest256Base64); String digest256Hex = Hex.encodeHexString(Base64.decodeBase64(digest256Base64 + "=")) .toLowerCase(); microdescriptorDigests.add(digest256Hex); } else if (parts.length != 3 || !parts[2].startsWith("sha256=") || parts[2].length() != 50) { logger.warn("Could not parse m line '" + line + "' in descriptor. Skipping."); break; } } } if (statusType.equals("consensus")) { if (this.rdd != null) { this.rdd.haveParsedConsensus(validAfterTime, dirSources, serverDescriptors); } if (this.aw != null) { this.aw.storeConsensus(data, validAfter, dirSources, serverDescriptorDigests); stored = true; } } else if (statusType.equals("consensus-microdesc")) { if (this.rdd != null) { this.rdd.haveParsedMicrodescConsensus(validAfterTime, microdescriptorKeys); } if (this.ar != null) { this.ar.haveParsedMicrodescConsensus(validAfterTime, microdescriptorDigests); } if (this.aw != null) { this.aw.storeMicrodescConsensus(data, validAfter, microdescriptorDigests); stored = true; } } else { if (this.aw != null || this.rdd != null) { String ascii = new String(data, "US-ASCII"); String startToken = "network-status-version "; String sigToken = "directory-signature "; int start = ascii.indexOf(startToken); int sig = ascii.indexOf(sigToken); if (start >= 0 && sig >= 0 && sig > start) { sig += sigToken.length(); byte[] forDigest = new byte[sig - start]; System.arraycopy(data, start, forDigest, 0, sig - start); String digest = DigestUtils.shaHex(forDigest).toUpperCase(); if (this.aw != null) { this.aw.storeVote(data, validAfter, dirSource, digest, serverDescriptorDigests); stored = true; } if (this.rdd != null) { this.rdd.haveParsedVote(validAfterTime, fingerprint, serverDescriptors); } } if (certificateString != null) { if (this.aw != null) { this.aw.storeCertificate(certificateString.getBytes(), dirSource, dirKeyPublished); stored = true; } } } } } else if (line.startsWith("router ")) { String publishedTime = null; String extraInfoDigest = null; String relayIdentifier = null; long published = -1L; while ((line = br.readLine()) != null) { if (line.startsWith("published ")) { publishedTime = line.substring("published ".length()); published = parseFormat.parse(publishedTime).getTime(); } else if (line.startsWith("opt fingerprint") || line.startsWith("fingerprint")) { relayIdentifier = line.substring( line.startsWith("opt ") ? "opt fingerprint".length() : "fingerprint".length()) .replaceAll(" ", "").toLowerCase(); } else if (line.startsWith("opt extra-info-digest ") || line.startsWith("extra-info-digest ")) { extraInfoDigest = line.startsWith("opt ") ? line.split(" ")[2].toLowerCase() : line.split(" ")[1].toLowerCase(); } } String ascii = new String(data, "US-ASCII"); String startToken = "router "; String sigToken = "\nrouter-signature\n"; int start = ascii.indexOf(startToken); int sig = ascii.indexOf(sigToken) + sigToken.length(); String digest = null; if (start >= 0 || sig >= 0 || sig > start) { byte[] forDigest = new byte[sig - start]; System.arraycopy(data, start, forDigest, 0, sig - start); digest = DigestUtils.shaHex(forDigest); } if (this.aw != null && digest != null) { this.aw.storeServerDescriptor(data, digest, published, extraInfoDigest); stored = true; } if (this.rdd != null && digest != null) { this.rdd.haveParsedServerDescriptor(publishedTime, relayIdentifier, digest, extraInfoDigest); } } else if (line.startsWith("extra-info ")) { String publishedTime = null; String relayIdentifier = line.split(" ")[2]; long published = -1L; while ((line = br.readLine()) != null) { if (line.startsWith("published ")) { publishedTime = line.substring("published ".length()); published = parseFormat.parse(publishedTime).getTime(); } } String ascii = new String(data, "US-ASCII"); String startToken = "extra-info "; String sigToken = "\nrouter-signature\n"; String digest = null; int start = ascii.indexOf(startToken); if (start > 0) { /* Do not confuse "extra-info " in "@type extra-info 1.0" with * "extra-info 0000...". TODO This is a hack that should be * solved by using metrics-lib some day. */ start = ascii.indexOf("\n" + startToken); if (start > 0) { start++; } } int sig = ascii.indexOf(sigToken) + sigToken.length(); if (start >= 0 && sig >= 0 && sig > start) { byte[] forDigest = new byte[sig - start]; System.arraycopy(data, start, forDigest, 0, sig - start); digest = DigestUtils.shaHex(forDigest); } if (this.aw != null && digest != null) { this.aw.storeExtraInfoDescriptor(data, digest, published); stored = true; } if (this.rdd != null && digest != null) { this.rdd.haveParsedExtraInfoDescriptor(publishedTime, relayIdentifier.toLowerCase(), digest); } } else if (line.equals("onion-key")) { /* Cannot store microdescriptors without knowing valid-after * time(s) of microdesc consensuses containing them, because we * don't know which month directories to put them in. Have to use * storeMicrodescriptor below. */ } br.close(); } catch (IOException e) { logger.warn("Could not parse descriptor. " + "Skipping.", e); } catch (ParseException e) { logger.warn("Could not parse descriptor. " + "Skipping.", e); } return stored; }
From source file:org.torproject.ernie.db.BridgeDescriptorParser.java
public void parse(byte[] allData, String dateTime, boolean sanitized) { try {// w ww. j av a 2 s . com BufferedReader br = new BufferedReader(new StringReader(new String(allData, "US-ASCII"))); SimpleDateFormat timeFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); timeFormat.setTimeZone(TimeZone.getTimeZone("UTC")); String hashedIdentity = null, platformLine = null, publishedLine = null, geoipStartTimeLine = null, bridgeStatsEndLine = null; boolean skip = false; String line = null; while ((line = br.readLine()) != null) { if (line.startsWith("r ")) { if (this.sbw != null) { if (sanitized) { this.sbw.storeSanitizedNetworkStatus(allData, dateTime); } else { this.sbw.sanitizeAndStoreNetworkStatus(allData, dateTime); } } int runningBridges = 0; while ((line = br.readLine()) != null) { if (line.startsWith("s ") && line.contains(" Running")) { runningBridges++; } } if (this.csfh != null) { this.csfh.addBridgeConsensusResults(dateTime, runningBridges); } } else if (line.startsWith("router ")) { if (this.sbw != null) { if (sanitized) { this.sbw.storeSanitizedServerDescriptor(allData); } else { this.sbw.sanitizeAndStoreServerDescriptor(allData); } } } else if (line.startsWith("extra-info ")) { if (this.sbw != null) { if (sanitized) { this.sbw.storeSanitizedExtraInfoDescriptor(allData); } else { this.sbw.sanitizeAndStoreExtraInfoDescriptor(allData); } } hashedIdentity = sanitized ? line.split(" ")[2] : DigestUtils.shaHex(line.split(" ")[2]).toUpperCase(); if (this.bsfh != null) { skip = this.bsfh.isKnownRelay(hashedIdentity); } } else if (!skip && line.startsWith("platform ")) { platformLine = line; } else if (!skip && line.startsWith("published ")) { publishedLine = line; } else if (line.startsWith("opt fingerprint") || line.startsWith("fingerprint")) { String identity = line .substring( line.startsWith("opt ") ? "opt fingerprint".length() : "fingerprint".length()) .replaceAll(" ", "").toLowerCase(); hashedIdentity = sanitized ? identity : DigestUtils.shaHex(identity).toUpperCase(); } else if (!skip && line.startsWith("geoip-start-time ")) { geoipStartTimeLine = line; } else if (!skip && line.startsWith("geoip-client-origins") && line.split(" ").length > 1) { if (publishedLine == null || geoipStartTimeLine == null) { this.logger.warning("Either published line or " + "geoip-start-time line is not present in " + (sanitized ? "sanitized" : "non-sanitized") + " bridge descriptors from " + dateTime + "."); break; } long published = timeFormat.parse(publishedLine.substring("published ".length())).getTime(); long started = timeFormat.parse(geoipStartTimeLine.substring("geoip-start-time ".length())) .getTime(); long seconds = (published - started) / 1000L; double allUsers = 0.0D; Map<String, String> obs = new HashMap<String, String>(); String[] parts = line.split(" ")[1].split(","); for (String p : parts) { String country = p.substring(0, 2); double users = ((double) Long.parseLong(p.substring(3)) - 4L) * 86400.0D / ((double) seconds); allUsers += users; obs.put(country, String.format("%.2f", users)); } obs.put("zy", String.format("%.2f", allUsers)); String date = publishedLine.split(" ")[1]; String time = publishedLine.split(" ")[2]; if (this.bsfh != null) { this.bsfh.addObs(hashedIdentity, date, time, obs); } } else if (!skip && line.startsWith("bridge-stats-end ")) { bridgeStatsEndLine = line; } else if (!skip && line.startsWith("bridge-ips") && line.split(" ").length > 1) { if (bridgeStatsEndLine == null) { this.logger.warning("bridge-ips line without preceding " + "bridge-stats-end line in " + (sanitized ? "sanitized" : "non-sanitized") + " bridge descriptor."); break; } double allUsers = 0.0D; Map<String, String> obs = new HashMap<String, String>(); String[] parts = line.split(" ")[1].split(","); for (String p : parts) { String country = p.substring(0, 2); double users = (double) Long.parseLong(p.substring(3)) - 4L; allUsers += users; obs.put(country, String.format("%.2f", users)); } obs.put("zy", String.format("%.2f", allUsers)); String date = bridgeStatsEndLine.split(" ")[1]; String time = bridgeStatsEndLine.split(" ")[2]; if (this.bsfh != null) { this.bsfh.addObs(hashedIdentity, date, time, obs); } } } if (this.bsfh != null && platformLine != null && platformLine.startsWith("platform Tor 0.2.2")) { String date = publishedLine.split(" ")[1]; String time = publishedLine.split(" ")[2]; this.bsfh.addZeroTwoTwoDescriptor(hashedIdentity, date, time); } } catch (IOException e) { this.logger.log(Level.WARNING, "Could not parse bridge descriptor.", e); return; } catch (ParseException e) { this.logger.log(Level.WARNING, "Could not parse bridge descriptor.", e); return; } }
From source file:org.torproject.ernie.db.RelayDescriptorParser.java
public void parse(byte[] data) { try {// w w w .ja v a 2 s. c o m /* Convert descriptor to ASCII for parsing. This means we'll lose * the non-ASCII chars, but we don't care about them for parsing * anyway. */ BufferedReader br = new BufferedReader(new StringReader(new String(data, "US-ASCII"))); String line = br.readLine(); if (line == null) { this.logger.fine("We were given an empty descriptor for " + "parsing. Ignoring."); return; } SimpleDateFormat parseFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); parseFormat.setTimeZone(TimeZone.getTimeZone("UTC")); if (line.equals("network-status-version 3")) { // TODO when parsing the current consensus, check the fresh-until // time to see when we switch from hourly to half-hourly // consensuses boolean isConsensus = true; int exit = 0, fast = 0, guard = 0, running = 0, stable = 0; String validAfterTime = null, nickname = null, relayIdentity = null, serverDesc = null, version = null, ports = null; String fingerprint = null, dirSource = null, address = null; long validAfter = -1L, published = -1L, bandwidth = -1L, orPort = 0L, dirPort = 0L; SortedSet<String> dirSources = new TreeSet<String>(); SortedSet<String> serverDescriptors = new TreeSet<String>(); SortedSet<String> hashedRelayIdentities = new TreeSet<String>(); SortedSet<String> relayFlags = null; StringBuilder rawStatusEntry = null; while ((line = br.readLine()) != null) { if (line.equals("vote-status vote")) { isConsensus = false; } else if (line.startsWith("valid-after ")) { validAfterTime = line.substring("valid-after ".length()); validAfter = parseFormat.parse(validAfterTime).getTime(); } else if (line.startsWith("dir-source ")) { dirSource = line.split(" ")[2]; } else if (line.startsWith("vote-digest ")) { dirSources.add(dirSource); } else if (line.startsWith("fingerprint ")) { fingerprint = line.split(" ")[1]; } else if (line.startsWith("r ")) { if (isConsensus && relayIdentity != null && this.rddi != null) { byte[] rawDescriptor = rawStatusEntry.toString().getBytes(); this.rddi.addStatusEntry(validAfter, nickname, relayIdentity, serverDesc, published, address, orPort, dirPort, relayFlags, version, bandwidth, ports, rawDescriptor); relayFlags = null; version = null; bandwidth = -1L; ports = null; } rawStatusEntry = new StringBuilder(line + "\n"); String[] parts = line.split(" "); if (parts.length < 9) { this.logger.log(Level.WARNING, "Could not parse r line '" + line + "' in descriptor. Skipping."); break; } String publishedTime = parts[4] + " " + parts[5]; nickname = parts[1]; relayIdentity = Hex.encodeHexString(Base64.decodeBase64(parts[2] + "=")).toLowerCase(); serverDesc = Hex.encodeHexString(Base64.decodeBase64(parts[3] + "=")).toLowerCase(); serverDescriptors.add(publishedTime + "," + relayIdentity + "," + serverDesc); hashedRelayIdentities .add(DigestUtils.shaHex(Base64.decodeBase64(parts[2] + "=")).toUpperCase()); published = parseFormat.parse(parts[4] + " " + parts[5]).getTime(); address = parts[6]; orPort = Long.parseLong(parts[7]); dirPort = Long.parseLong(parts[8]); } else if (line.startsWith("s ") || line.equals("s")) { rawStatusEntry.append(line + "\n"); if (line.contains(" Running")) { exit += line.contains(" Exit") ? 1 : 0; fast += line.contains(" Fast") ? 1 : 0; guard += line.contains(" Guard") ? 1 : 0; stable += line.contains(" Stable") ? 1 : 0; running++; } relayFlags = new TreeSet<String>(); if (line.length() > 2) { for (String flag : line.substring(2).split(" ")) { relayFlags.add(flag); } } } else if (line.startsWith("v ")) { rawStatusEntry.append(line + "\n"); version = line.substring(2); } else if (line.startsWith("w ")) { rawStatusEntry.append(line + "\n"); String[] parts = line.split(" "); for (String part : parts) { if (part.startsWith("Bandwidth=")) { bandwidth = Long.parseLong(part.substring("Bandwidth=".length())); } } } else if (line.startsWith("p ")) { rawStatusEntry.append(line + "\n"); ports = line.substring(2); } } if (isConsensus) { if (this.rddi != null) { this.rddi.addConsensus(validAfter, data); if (relayIdentity != null) { byte[] rawDescriptor = rawStatusEntry.toString().getBytes(); this.rddi.addStatusEntry(validAfter, nickname, relayIdentity, serverDesc, published, address, orPort, dirPort, relayFlags, version, bandwidth, ports, rawDescriptor); } } if (this.bsfh != null) { for (String hashedRelayIdentity : hashedRelayIdentities) { this.bsfh.addHashedRelay(hashedRelayIdentity); } } if (this.csfh != null) { this.csfh.addConsensusResults(validAfterTime, exit, fast, guard, running, stable); } if (this.rdd != null) { this.rdd.haveParsedConsensus(validAfterTime, dirSources, serverDescriptors); } if (this.aw != null) { this.aw.storeConsensus(data, validAfter); } if (this.chc != null) { this.chc.processConsensus(validAfterTime, data); } } else { if (this.rddi != null) { this.rddi.addVote(validAfter, dirSource, data); } if (this.rdd != null) { this.rdd.haveParsedVote(validAfterTime, fingerprint, serverDescriptors); } if (this.aw != null) { String ascii = new String(data, "US-ASCII"); String startToken = "network-status-version "; String sigToken = "directory-signature "; int start = ascii.indexOf(startToken); int sig = ascii.indexOf(sigToken); if (start >= 0 && sig >= 0 && sig > start) { sig += sigToken.length(); byte[] forDigest = new byte[sig - start]; System.arraycopy(data, start, forDigest, 0, sig - start); String digest = DigestUtils.shaHex(forDigest).toUpperCase(); if (this.aw != null) { this.aw.storeVote(data, validAfter, dirSource, digest); } } } if (this.chc != null) { this.chc.processVote(validAfterTime, dirSource, data); } } } else if (line.startsWith("router ")) { String platformLine = null, publishedLine = null, publishedTime = null, bandwidthLine = null, extraInfoDigest = null, relayIdentifier = null; String[] parts = line.split(" "); String nickname = parts[1]; String address = parts[2]; int orPort = Integer.parseInt(parts[3]); int dirPort = Integer.parseInt(parts[4]); long published = -1L, uptime = -1L; while ((line = br.readLine()) != null) { if (line.startsWith("platform ")) { platformLine = line; } else if (line.startsWith("published ")) { publishedTime = line.substring("published ".length()); published = parseFormat.parse(publishedTime).getTime(); } else if (line.startsWith("opt fingerprint") || line.startsWith("fingerprint")) { relayIdentifier = line.substring( line.startsWith("opt ") ? "opt fingerprint".length() : "fingerprint".length()) .replaceAll(" ", "").toLowerCase(); } else if (line.startsWith("bandwidth ")) { bandwidthLine = line; } else if (line.startsWith("opt extra-info-digest ") || line.startsWith("extra-info-digest ")) { extraInfoDigest = line.startsWith("opt ") ? line.split(" ")[2].toLowerCase() : line.split(" ")[1].toLowerCase(); } else if (line.startsWith("uptime ")) { uptime = Long.parseLong(line.substring("uptime ".length())); } } String ascii = new String(data, "US-ASCII"); String startToken = "router "; String sigToken = "\nrouter-signature\n"; int start = ascii.indexOf(startToken); int sig = ascii.indexOf(sigToken) + sigToken.length(); String digest = null; if (start >= 0 || sig >= 0 || sig > start) { byte[] forDigest = new byte[sig - start]; System.arraycopy(data, start, forDigest, 0, sig - start); digest = DigestUtils.shaHex(forDigest); } if (this.aw != null && digest != null) { this.aw.storeServerDescriptor(data, digest, published); } if (this.rdd != null && digest != null) { this.rdd.haveParsedServerDescriptor(publishedTime, relayIdentifier, digest, extraInfoDigest); } if (this.rddi != null && digest != null) { String[] bwParts = bandwidthLine.split(" "); long bandwidthAvg = Long.parseLong(bwParts[1]); long bandwidthBurst = Long.parseLong(bwParts[2]); long bandwidthObserved = Long.parseLong(bwParts[3]); String platform = platformLine.substring("platform ".length()); this.rddi.addServerDescriptor(digest, nickname, address, orPort, dirPort, relayIdentifier, bandwidthAvg, bandwidthBurst, bandwidthObserved, platform, published, uptime, extraInfoDigest, data); } } else if (line.startsWith("extra-info ")) { String nickname = line.split(" ")[1]; String publishedTime = null, relayIdentifier = line.split(" ")[2]; long published = -1L; String dir = line.split(" ")[2]; String statsEnd = null; long seconds = -1L; SortedMap<String, String> bandwidthHistory = new TreeMap<String, String>(); boolean skip = false; while ((line = br.readLine()) != null) { if (line.startsWith("published ")) { publishedTime = line.substring("published ".length()); published = parseFormat.parse(publishedTime).getTime(); } else if (line.startsWith("read-history ") || line.startsWith("write-history ") || line.startsWith("dirreq-read-history ") || line.startsWith("dirreq-write-history ")) { String[] parts = line.split(" "); if (parts.length == 6) { String type = parts[0]; String intervalEndTime = parts[1] + " " + parts[2]; long intervalEnd = dateTimeFormat.parse(intervalEndTime).getTime(); if (Math.abs(published - intervalEnd) > 7L * 24L * 60L * 60L * 1000L) { this.logger.fine("Extra-info descriptor publication time " + publishedTime + " and last interval time " + intervalEndTime + " in " + type + " line differ by " + "more than 7 days! Not adding this line!"); continue; } try { long intervalLength = Long.parseLong(parts[3].substring(1)); String[] values = parts[5].split(","); for (int i = values.length - 1; i >= 0; i--) { Long.parseLong(values[i]); bandwidthHistory.put(intervalEnd + "," + type, intervalEnd + "," + type + "," + values[i]); intervalEnd -= intervalLength * 1000L; } } catch (NumberFormatException e) { this.logger.log(Level.WARNING, "Could not parse " + line.split(" ")[0] + " line '" + line + "' in " + "descriptor. Skipping.", e); break; } } } else if (line.startsWith("dirreq-stats-end ")) { String[] parts = line.split(" "); if (parts.length < 5) { this.logger.warning("Could not parse dirreq-stats-end " + "line '" + line + "' in descriptor. Skipping."); break; } statsEnd = parts[1] + " " + parts[2]; seconds = Long.parseLong(parts[3].substring(1)); } else if (line.startsWith("dirreq-v3-reqs ") && line.length() > "dirreq-v3-reqs ".length()) { if (this.dsfh != null) { try { int allUsers = 0; Map<String, String> obs = new HashMap<String, String>(); String[] parts = line.substring("dirreq-v3-reqs ".length()).split(","); for (String p : parts) { String country = p.substring(0, 2); int users = Integer.parseInt(p.substring(3)) - 4; allUsers += users; obs.put(country, "" + users); } obs.put("zy", "" + allUsers); this.dsfh.addObs(dir, statsEnd, seconds, obs); } catch (NumberFormatException e) { this.logger.log(Level.WARNING, "Could not parse " + "dirreq-v3-reqs line '" + line + "' in descriptor. " + "Skipping.", e); break; } } } } String ascii = new String(data, "US-ASCII"); String startToken = "extra-info "; String sigToken = "\nrouter-signature\n"; String digest = null; int start = ascii.indexOf(startToken); int sig = ascii.indexOf(sigToken) + sigToken.length(); if (start >= 0 || sig >= 0 || sig > start) { byte[] forDigest = new byte[sig - start]; System.arraycopy(data, start, forDigest, 0, sig - start); digest = DigestUtils.shaHex(forDigest); } if (this.aw != null && digest != null) { this.aw.storeExtraInfoDescriptor(data, digest, published); } if (this.rdd != null && digest != null) { this.rdd.haveParsedExtraInfoDescriptor(publishedTime, relayIdentifier.toLowerCase(), digest); } if (this.rddi != null && digest != null) { this.rddi.addExtraInfoDescriptor(digest, nickname, dir.toLowerCase(), published, data, bandwidthHistory); } } } catch (IOException e) { this.logger.log(Level.WARNING, "Could not parse descriptor. " + "Skipping.", e); } catch (ParseException e) { this.logger.log(Level.WARNING, "Could not parse descriptor. " + "Skipping.", e); } }
From source file:org.torproject.ernie.db.SanitizedBridgesWriter.java
/** * Sanitizes a bridge server descriptor and writes it to disk. Looks up * up bridge identity hash and publication time in the descriptor * mapping. After sanitizing a server descriptor, its publication time * is noted down, so that all network statuses that might be referencing * this server descriptor can be re-written at the end of the sanitizing * procedure.//from w w w .j a va 2s. c om */ public void sanitizeAndStoreServerDescriptor(byte[] data) { /* Parse descriptor to generate a sanitized version and to look it up * in the descriptor mapping. */ String scrubbedDesc = null; DescriptorMapping mapping = null; try { BufferedReader br = new BufferedReader(new StringReader(new String(data, "US-ASCII"))); StringBuilder scrubbed = new StringBuilder(); String line = null, hashedBridgeIdentity = null, published = null; boolean skipCrypto = false; while ((line = br.readLine()) != null) { /* When we have parsed both published and fingerprint line, look * up descriptor in the descriptor mapping or create a new one if * there is none. */ if (mapping == null && published != null && hashedBridgeIdentity != null) { String mappingKey = hashedBridgeIdentity + "," + published; if (this.bridgeDescriptorMappings.containsKey(mappingKey)) { mapping = this.bridgeDescriptorMappings.get(mappingKey); } else { mapping = new DescriptorMapping(hashedBridgeIdentity, published); this.bridgeDescriptorMappings.put(mappingKey, mapping); } } /* Skip all crypto parts that might be used to derive the bridge's * identity fingerprint. */ if (skipCrypto && !line.startsWith("-----END ")) { continue; /* Parse the original IP address for looking it up in the GeoIP * database and replace it with 127.0.0.1 in the scrubbed * version. */ } else if (line.startsWith("router ")) { scrubbed = new StringBuilder("router Unnamed 127.0.0.1 " + line.split(" ")[3] + " " + line.split(" ")[4] + " " + line.split(" ")[5] + "\n"); /* Parse the publication time and add it to the list of descriptor * publication times to re-write network statuses at the end of * the sanitizing procedure. */ } else if (line.startsWith("published ")) { published = line.substring("published ".length()); this.descriptorPublicationTimes.add(published); scrubbed.append(line + "\n"); /* Parse the fingerprint to determine the hashed bridge * identity. */ } else if (line.startsWith("opt fingerprint ")) { String fingerprint = line .substring( line.startsWith("opt ") ? "opt fingerprint".length() : "fingerprint".length()) .replaceAll(" ", "").toLowerCase(); hashedBridgeIdentity = DigestUtils.shaHex(Hex.decodeHex(fingerprint.toCharArray())) .toLowerCase(); scrubbed.append("opt fingerprint"); for (int i = 0; i < hashedBridgeIdentity.length() / 4; i++) scrubbed.append(" " + hashedBridgeIdentity.substring(4 * i, 4 * (i + 1)).toUpperCase()); scrubbed.append("\n"); /* Replace the contact line (if present) with a generic one. */ } else if (line.startsWith("contact ")) { scrubbed.append("contact somebody\n"); /* When we reach the signature, we're done. Write the sanitized * descriptor to disk below. */ } else if (line.startsWith("router-signature")) { scrubbedDesc = scrubbed.toString(); break; /* Replace extra-info digest with the one we know from our * descriptor mapping (which might be all 0's if we didn't parse * the extra-info descriptor before). */ } else if (line.startsWith("opt extra-info-digest ")) { scrubbed.append( "opt extra-info-digest " + mapping.extraInfoDescriptorIdentifier.toUpperCase() + "\n"); /* Write the following lines unmodified to the sanitized * descriptor. */ } else if (line.startsWith("reject ") || line.startsWith("accept ") || line.startsWith("platform ") || line.startsWith("opt protocols ") || line.startsWith("uptime ") || line.startsWith("bandwidth ") || line.startsWith("opt hibernating ") || line.equals("opt hidden-service-dir") || line.equals("opt caches-extra-info") || line.equals("opt allow-single-hop-exits")) { scrubbed.append(line + "\n"); /* Replace node fingerprints in the family line with their hashes * and nicknames with Unnamed. */ } else if (line.startsWith("family ")) { StringBuilder familyLine = new StringBuilder("family"); for (String s : line.substring(7).split(" ")) { if (s.startsWith("$")) { familyLine.append(" $" + DigestUtils.shaHex(Hex.decodeHex(s.substring(1).toCharArray())) .toUpperCase()); } else { familyLine.append(" Unnamed"); } } scrubbed.append(familyLine.toString() + "\n"); /* Skip the purpose line that the bridge authority adds to its * cached-descriptors file. */ } else if (line.startsWith("@purpose ")) { continue; /* Skip all crypto parts that might leak the bridge's identity * fingerprint. */ } else if (line.startsWith("-----BEGIN ") || line.equals("onion-key") || line.equals("signing-key")) { skipCrypto = true; /* Stop skipping lines when the crypto parts are over. */ } else if (line.startsWith("-----END ")) { skipCrypto = false; /* If we encounter an unrecognized line, stop parsing and print * out a warning. We might have overlooked sensitive information * that we need to remove or replace for the sanitized descriptor * version. */ } else { this.logger.fine("Unrecognized line '" + line + "'. Skipping."); return; } } br.close(); } catch (Exception e) { this.logger.log(Level.WARNING, "Could not parse server " + "descriptor.", e); return; } /* Determine new descriptor digest and write it to descriptor * mapping. */ String scrubbedHash = DigestUtils.shaHex(scrubbedDesc); mapping.serverDescriptorIdentifier = scrubbedHash; /* Determine filename of sanitized server descriptor. */ String dyear = mapping.published.substring(0, 4); String dmonth = mapping.published.substring(5, 7); File newFile = new File(this.sanitizedBridgesDir + "/" + dyear + "/" + dmonth + "/server-descriptors/" + "/" + scrubbedHash.charAt(0) + "/" + scrubbedHash.charAt(1) + "/" + scrubbedHash); /* Write sanitized server descriptor to disk, including all its parent * directories. */ try { newFile.getParentFile().mkdirs(); BufferedWriter bw = new BufferedWriter(new FileWriter(newFile)); bw.write(scrubbedDesc); bw.close(); } catch (IOException e) { this.logger.log(Level.WARNING, "Could not write sanitized server " + "descriptor to disk.", e); return; } }
From source file:org.torproject.ernie.db.SanitizedBridgesWriter.java
/** * Sanitizes an extra-info descriptor and writes it to disk. Looks up * the bridge identity hash and publication time in the descriptor * mapping. If the corresponding server descriptor was sanitized before, * it is re-written to include the new extra-info descriptor digest and * the publication time is noted down, too, so that all network statuses * possibly referencing this extra-info descriptor and its corresponding * server descriptor can be re-written at the end of the sanitizing * procedure.//from www . j a va 2 s.c o m */ public void sanitizeAndStoreExtraInfoDescriptor(byte[] data) { /* Parse descriptor to generate a sanitized version and to look it up * in the descriptor mapping. */ String scrubbedDesc = null, published = null; DescriptorMapping mapping = null; try { BufferedReader br = new BufferedReader(new StringReader(new String(data, "US-ASCII"))); String line = null; StringBuilder scrubbed = null; String hashedBridgeIdentity = null; while ((line = br.readLine()) != null) { /* When we have parsed both published and fingerprint line, look * up descriptor in the descriptor mapping or create a new one if * there is none. */ if (mapping == null && published != null && hashedBridgeIdentity != null) { String mappingKey = hashedBridgeIdentity + "," + published; if (this.bridgeDescriptorMappings.containsKey(mappingKey)) { mapping = this.bridgeDescriptorMappings.get(mappingKey); } else { mapping = new DescriptorMapping(hashedBridgeIdentity, published); this.bridgeDescriptorMappings.put(mappingKey, mapping); } } /* Parse bridge identity from extra-info line and replace it with * its hash in the sanitized descriptor. */ if (line.startsWith("extra-info ")) { hashedBridgeIdentity = DigestUtils.shaHex(Hex.decodeHex(line.split(" ")[2].toCharArray())) .toLowerCase(); scrubbed = new StringBuilder("extra-info Unnamed " + hashedBridgeIdentity.toUpperCase() + "\n"); /* Parse the publication time and add it to the list of descriptor * publication times to re-write network statuses at the end of * the sanitizing procedure. */ } else if (line.startsWith("published ")) { scrubbed.append(line + "\n"); published = line.substring("published ".length()); /* Write the following lines unmodified to the sanitized * descriptor. */ } else if (line.startsWith("write-history ") || line.startsWith("read-history ") || line.startsWith("geoip-start-time ") || line.startsWith("geoip-client-origins ") || line.startsWith("bridge-stats-end ") || line.startsWith("bridge-ips ")) { scrubbed.append(line + "\n"); /* When we reach the signature, we're done. Write the sanitized * descriptor to disk below. */ } else if (line.startsWith("router-signature")) { scrubbedDesc = scrubbed.toString(); break; /* Don't include statistics that should only be contained in relay * extra-info descriptors. */ } else if (line.startsWith("dirreq-") || line.startsWith("cell-") || line.startsWith("exit-")) { continue; /* If we encounter an unrecognized line, stop parsing and print * out a warning. We might have overlooked sensitive information * that we need to remove or replace for the sanitized descriptor * version. */ } else { this.logger.fine("Unrecognized line '" + line + "'. Skipping."); return; } } br.close(); } catch (IOException e) { this.logger.log(Level.WARNING, "Could not parse extra-info " + "descriptor.", e); return; } catch (DecoderException e) { this.logger.log(Level.WARNING, "Could not parse extra-info " + "descriptor.", e); return; } /* Determine new descriptor digest and check if write it to descriptor * mapping. */ String scrubbedDescHash = DigestUtils.shaHex(scrubbedDesc); boolean extraInfoDescriptorIdentifierHasChanged = !scrubbedDescHash .equals(mapping.extraInfoDescriptorIdentifier); mapping.extraInfoDescriptorIdentifier = scrubbedDescHash; if (extraInfoDescriptorIdentifierHasChanged && !mapping.serverDescriptorIdentifier.equals(NULL_REFERENCE)) { this.rewriteServerDescriptor(mapping); this.descriptorPublicationTimes.add(published); } /* Determine filename of sanitized server descriptor. */ String dyear = mapping.published.substring(0, 4); String dmonth = mapping.published.substring(5, 7); File newFile = new File(this.sanitizedBridgesDir + "/" + dyear + "/" + dmonth + "/extra-infos/" + scrubbedDescHash.charAt(0) + "/" + scrubbedDescHash.charAt(1) + "/" + scrubbedDescHash); /* Write sanitized server descriptor to disk, including all its parent * directories. */ try { newFile.getParentFile().mkdirs(); BufferedWriter bw = new BufferedWriter(new FileWriter(newFile)); bw.write(scrubbedDesc); bw.close(); } catch (Exception e) { this.logger.log(Level.WARNING, "Could not write sanitized " + "extra-info descriptor to disk.", e); } }
From source file:org.torproject.ernie.db.SanitizedBridgesWriter.java
public void storeSanitizedServerDescriptor(byte[] data) { try {/* www. j a v a2 s . c o m*/ String ascii = new String(data, "US-ASCII"); BufferedReader br2 = new BufferedReader(new StringReader(ascii)); StringBuilder sb = new StringBuilder(); String line2 = null, published = null; String hashedBridgeIdentity = null; DescriptorMapping mapping = null; while ((line2 = br2.readLine()) != null) { if (mapping == null && published != null && hashedBridgeIdentity != null) { String mappingKey = (hashedBridgeIdentity + "," + published).toLowerCase(); if (this.bridgeDescriptorMappings.containsKey(mappingKey)) { mapping = this.bridgeDescriptorMappings.get(mappingKey); } else { mapping = new DescriptorMapping(hashedBridgeIdentity.toLowerCase(), published); this.bridgeDescriptorMappings.put(mappingKey, mapping); } } if (line2.startsWith("router ")) { sb.append("router Unnamed 127.0.0.1 " + line2.split(" ")[3] + " " + line2.split(" ")[4] + " " + line2.split(" ")[5] + "\n"); } else if (line2.startsWith("published ")) { published = line2.substring("published ".length()); sb.append(line2 + "\n"); this.descriptorPublicationTimes.add(published); } else if (line2.startsWith("opt fingerprint ")) { hashedBridgeIdentity = line2.substring("opt fingerprint".length()).replaceAll(" ", "") .toLowerCase(); sb.append(line2 + "\n"); } else if (line2.startsWith("opt extra-info-digest ")) { sb.append( "opt extra-info-digest " + mapping.extraInfoDescriptorIdentifier.toUpperCase() + "\n"); } else { sb.append(line2 + "\n"); } } br2.close(); String scrubbedDesc = sb.toString(); String scrubbedHash = DigestUtils.shaHex(scrubbedDesc); mapping.serverDescriptorIdentifier = scrubbedHash; String dyear = published.substring(0, 4); String dmonth = published.substring(5, 7); File newFile = new File(this.sanitizedBridgesDir + "/" + dyear + "/" + dmonth + "/server-descriptors/" + scrubbedHash.substring(0, 1) + "/" + scrubbedHash.substring(1, 2) + "/" + scrubbedHash); this.logger.finer("Storing server descriptor " + newFile.getAbsolutePath()); newFile.getParentFile().mkdirs(); BufferedWriter bw = new BufferedWriter(new FileWriter(newFile)); bw.write(scrubbedDesc); bw.close(); } catch (IOException e) { this.logger.log(Level.WARNING, "Could not store unsanitized server " + "descriptor.", e); } }
From source file:org.torproject.ernie.db.SanitizedBridgesWriter.java
public void storeSanitizedExtraInfoDescriptor(byte[] data) { try {/* www . j av a 2s. c o m*/ String ascii = new String(data, "US-ASCII"); BufferedReader br2 = new BufferedReader(new StringReader(ascii)); StringBuilder sb = new StringBuilder(); String line2 = null, published = null; String hashedBridgeIdentity = null; DescriptorMapping mapping = null; while ((line2 = br2.readLine()) != null) { if (mapping == null && published != null && hashedBridgeIdentity != null) { String mappingKey = (hashedBridgeIdentity + "," + published).toLowerCase(); if (this.bridgeDescriptorMappings.containsKey(mappingKey)) { mapping = this.bridgeDescriptorMappings.get(mappingKey); } else { mapping = new DescriptorMapping(hashedBridgeIdentity.toLowerCase(), published); this.bridgeDescriptorMappings.put(mappingKey, mapping); } } if (line2.startsWith("extra-info ")) { hashedBridgeIdentity = line2.split(" ")[2]; sb.append("extra-info Unnamed " + hashedBridgeIdentity + "\n"); } else if (line2.startsWith("published ")) { sb.append(line2 + "\n"); published = line2.substring("published ".length()); this.descriptorPublicationTimes.add(published); } else { sb.append(line2 + "\n"); } } br2.close(); String scrubbedDesc = sb.toString(); String scrubbedHash = DigestUtils.shaHex(scrubbedDesc); mapping.extraInfoDescriptorIdentifier = scrubbedHash; String dyear = published.substring(0, 4); String dmonth = published.substring(5, 7); File newFile = new File(this.sanitizedBridgesDir + "/" + dyear + "/" + dmonth + "/extra-infos/" + scrubbedHash.substring(0, 1) + "/" + scrubbedHash.substring(1, 2) + "/" + scrubbedHash); this.logger.finer("Storing extra-info descriptor " + newFile.getAbsolutePath()); newFile.getParentFile().mkdirs(); BufferedWriter bw = new BufferedWriter(new FileWriter(newFile)); bw.write(scrubbedDesc); bw.close(); } catch (IOException e) { this.logger.log(Level.WARNING, "Could not store sanitized " + "extra-info descriptor.", e); } }
From source file:org.trancecode.xproc.step.HashStepProcessor.java
@Override protected void execute(final StepInput input, final StepOutput output) { final XdmNode sourceDocument = input.readNode(XProcPorts.SOURCE); final String value = input.getOptionValue(XProcOptions.VALUE); assert value != null; LOG.trace("value = {}", value); final String algorithm = input.getOptionValue(XProcOptions.ALGORITHM); assert algorithm != null; LOG.trace("algorithm = {}", algorithm); final String match = input.getOptionValue(XProcOptions.MATCH); assert match != null; LOG.trace("match = {}", match); final String version = input.getOptionValue(XProcOptions.VERSION); LOG.trace("version = {}", version); final String hashValue; if (StringUtils.equalsIgnoreCase("crc", algorithm)) { if ("32".equals(version) || version == null) { final CRC32 crc32 = new CRC32(); crc32.update(value.getBytes()); hashValue = Long.toHexString(crc32.getValue()); } else {/* w w w . j a v a 2s . c o m*/ throw XProcExceptions.xc0036(input.getLocation()); } } else if (StringUtils.equalsIgnoreCase("md", algorithm)) { if (version == null || "5".equals(version)) { hashValue = DigestUtils.md5Hex(value); } else { throw XProcExceptions.xc0036(input.getLocation()); } } else if (StringUtils.equalsIgnoreCase("sha", algorithm)) { if (version == null || "1".equals(version)) { hashValue = DigestUtils.shaHex(value); } else if ("256".equals(version)) { hashValue = DigestUtils.sha256Hex(value); } else if ("384".equals(version)) { hashValue = DigestUtils.sha384Hex(value); } else if ("512".equals(version)) { hashValue = DigestUtils.sha512Hex(value); } else { throw XProcExceptions.xc0036(input.getLocation()); } } else { throw XProcExceptions.xc0036(input.getLocation()); } final SaxonProcessorDelegate hashDelegate = new AbstractSaxonProcessorDelegate() { @Override public boolean startDocument(final XdmNode node, final SaxonBuilder builder) { return true; } @Override public void endDocument(final XdmNode node, final SaxonBuilder builder) { } @Override public EnumSet<NextSteps> startElement(final XdmNode element, final SaxonBuilder builder) { builder.text(hashValue); return EnumSet.noneOf(NextSteps.class); } @Override public void endElement(final XdmNode node, final SaxonBuilder builder) { builder.endElement(); } @Override public void attribute(final XdmNode node, final SaxonBuilder builder) { builder.attribute(node.getNodeName(), hashValue); } @Override public void comment(final XdmNode node, final SaxonBuilder builder) { builder.comment(hashValue); } @Override public void processingInstruction(final XdmNode node, final SaxonBuilder builder) { builder.processingInstruction(node.getNodeName().getLocalName(), hashValue); } @Override public void text(final XdmNode node, final SaxonBuilder builder) { builder.text(hashValue); } }; final SaxonProcessor hashProcessor = new SaxonProcessor(input.getPipelineContext().getProcessor(), SaxonProcessorDelegates.forXsltMatchPattern(input.getPipelineContext().getProcessor(), match, input.getStep().getNode(), hashDelegate, new CopyingSaxonProcessorDelegate())); final XdmNode result = hashProcessor.apply(sourceDocument); output.writeNodes(XProcPorts.RESULT, result); }
From source file:org.webcurator.auth.dbms.DebugSHAEncoder.java
public String encodePassword(String rawPass, Object salt) { String saltedPass = mergePasswordAndSalt(rawPass, salt, false); System.out.println("mergedPasswordAndSalt: [" + saltedPass + "]"); if (!getEncodeHashAsBase64()) { System.out.println("Not Doing base 64"); return DigestUtils.shaHex(saltedPass); }// w w w . ja va 2s . c om byte[] encoded = Base64.encodeBase64(DigestUtils.sha(saltedPass)); System.out.println("encodedPass: [" + new String(encoded) + "]"); return new String(encoded); }
From source file:org.wso2.carbon.appmgt.gateway.utils.GatewayUtils.java
public static String getMD5Hash(String input) { return DigestUtils.shaHex(input); }