List of usage examples for com.google.common.collect TreeMultimap keySet
@Override @GwtIncompatible("NavigableSet") public NavigableSet<K> keySet()
Because a TreeMultimap has unique sorted keys, this method returns a NavigableSet , instead of the java.util.Set specified in the Multimap interface.
From source file:org.apache.ctakes.ytex.kernel.wsd.WordSenseDisambiguatorImpl.java
@Override public String disambiguate(List<Set<String>> sentenceConcepts, int index, Set<String> contextConcepts, int windowSize, SimilarityMetricEnum metric, Map<String, Double> scoreMap, boolean weighted) { // get the candidate concepts that we want to disambiguate Set<String> candidateConcepts = sentenceConcepts.get(index); if (candidateConcepts.size() == 1) return candidateConcepts.iterator().next(); // allocate set to hold all the concepts to compare to Map<String, Integer> windowContextConcepts = new HashMap<String, Integer>(); // add context concepts (e.g. title concepts) if (contextConcepts != null) { addConcepts(windowContextConcepts, contextConcepts); }/*from w w w . j a v a 2 s .co m*/ // add windowSize concepts from the sentence // get left, then right concepts // case 1 - enough tokens on both sides int indexLeftStart = index - windowSize - 1; int indexRightStart = index + windowSize + 1; if (indexLeftStart < 0) { // case 2 - not enough tokens on left indexRightStart += (-1 * indexLeftStart); indexLeftStart = 0; } else if (indexRightStart >= sentenceConcepts.size()) { // case 3 - not enough tokens on right indexLeftStart -= indexRightStart - sentenceConcepts.size() - 1; indexRightStart = sentenceConcepts.size() - 1; } // make sure the range is in bounds if (indexLeftStart < 0) indexLeftStart = 0; if (indexRightStart >= sentenceConcepts.size()) indexRightStart = sentenceConcepts.size() - 1; // add the concepts in the ranges if (indexLeftStart < index) { for (Set<String> cs : sentenceConcepts.subList(indexLeftStart, index)) { addConcepts(windowContextConcepts, cs); } } if (indexRightStart > index) { for (Set<String> cs : sentenceConcepts.subList(index + 1, indexRightStart + 1)) { addConcepts(windowContextConcepts, cs); } } // allocate map to hold scores TreeMultimap<Double, String> scoreConceptMap = TreeMultimap.create(); for (String c : candidateConcepts) { scoreConceptMap.put(scoreConcept(c, windowContextConcepts, metric, weighted), c); } // if scoreMap is not null, fill it in with the concept scores - invert // scoreConceptMap boolean bNonZero = false; if (scoreMap != null) { for (Map.Entry<Double, String> scoreConcept : scoreConceptMap.entries()) { scoreMap.put(scoreConcept.getValue(), scoreConcept.getKey()); } } SortedSet<String> bestConcepts = scoreConceptMap.get(scoreConceptMap.keySet().last()); String bestConcept = null; if (bestConcepts.size() == 1) { // only 1 concept with high score bestConcept = bestConcepts.iterator().next(); } else if (bestConcepts.size() == candidateConcepts.size()) { // all concepts have same score bestConcept = null; } else { // multiple best candidates - pick concept with lowest ic - most // general concept double ic = 1e6; Map<String, ConcRel> conceptMap = this.getConceptSimilarityService().getConceptGraph().getConceptMap(); for (String c : bestConcepts) { ConcRel cr = conceptMap.get(c); if (cr != null && cr.getIntrinsicInfoContent() < ic) { ic = cr.getIntrinsicInfoContent(); bestConcept = c; } } } // get the best scoring concept return bestConcept; }
From source file:com.publictransitanalytics.scoregenerator.output.SectorReachInformation.java
public SectorReachInformation(final Set<MovementPath> bestPaths, final int count, final Set<LocalDateTime> reachTimes) throws InterruptedException { reachCount = count;//from ww w.jav a2 s .com this.reachTimes = reachTimes.stream().map(time -> time.toLocalTime().toString()) .collect(Collectors.toSet()); final TreeMultimap<Integer, SimplePath> frequencyMap = TreeMultimap.create(Integer::compareTo, (p1, p2) -> p1.toString().compareTo(p2.toString())); if (bestPaths != null) { final ImmutableMultiset.Builder<SimplePath> bestSimplePathsBuilder = ImmutableMultiset.builder(); for (final MovementPath bestPath : bestPaths) { bestSimplePathsBuilder.add(new SimplePath(bestPath)); } final ImmutableMultiset<SimplePath> bestSimplePaths = bestSimplePathsBuilder.build(); for (final SimplePath path : bestSimplePaths.elementSet()) { frequencyMap.put(bestSimplePaths.count(path), path); } pathCounts = new LinkedHashMap<>(); for (final Integer frequency : frequencyMap.keySet().descendingSet()) { final NavigableSet<SimplePath> pathsForFrequency = frequencyMap.get(frequency); for (final SimplePath pathForFrequency : pathsForFrequency) { pathCounts.put(pathForFrequency, frequency); } } } else { pathCounts = null; } }
From source file:feign.jaxb.examples.AWSSignatureVersion4.java
@Override public Request apply(RequestTemplate input) { input.header("Host", URI.create(input.url()).getHost()); TreeMultimap<String, String> sortedLowercaseHeaders = TreeMultimap.create(); for (String key : input.headers().keySet()) { sortedLowercaseHeaders.putAll(trimToLowercase.apply(key), transform(input.headers().get(key), trimToLowercase)); }//from w ww. ja v a 2s .com String timestamp; synchronized (iso8601) { timestamp = iso8601.format(new Date()); } String credentialScope = Joiner.on('/').join(timestamp.substring(0, 8), region, service, "aws4_request"); input.query("X-Amz-Algorithm", "AWS4-HMAC-SHA256"); input.query("X-Amz-Credential", accessKey + "/" + credentialScope); input.query("X-Amz-Date", timestamp); input.query("X-Amz-SignedHeaders", Joiner.on(';').join(sortedLowercaseHeaders.keySet())); String canonicalString = canonicalString(input, sortedLowercaseHeaders); String toSign = toSign(timestamp, credentialScope, canonicalString); byte[] signatureKey = signatureKey(secretKey, timestamp); String signature = base16().lowerCase().encode(hmacSHA256(toSign, signatureKey)); input.query("X-Amz-Signature", signature); return input.request(); }
From source file:sadl.models.pdrta.PDRTA.java
private PDRTA(TreeMultimap<Integer, String> trans, TreeMultimap<Integer, String> stats, PDRTAInput inp) { input = inp;//from w w w.ja v a 2 s . co m states = new TIntObjectHashMap<>(); for (final Integer idxInt : trans.keySet()) { final int idx = idxInt.intValue(); final StateStatistic s = StateStatistic.reconstructStat(input.getAlphSize(), getHistSizes(), stats.get(idxInt)); states.put(idx, new PDRTAState(this, idx, s)); } for (final Entry<Integer, String> eT : trans.entries()) { // 75 -> 76 [ label = "0 [3, 989] p=1.0" ]; final String[] split = eT.getValue().split(" "); final int source = Integer.parseInt(split[0]); final int target = Integer.parseInt(split[2]); final String sym = split[6].substring(1); final int begin = Integer.parseInt(split[7].substring(1, split[7].length() - 1)); final int end = Integer.parseInt(split[8].substring(0, split[8].length() - 1)); double prob; if (split[9].endsWith("\"")) { prob = Double.parseDouble(split[9].substring(2, split[9].length() - 1)); } else { prob = Double.parseDouble(split[9].substring(2)); } final PDRTAState s = states.get(source); PDRTAState t; if (!states.containsKey(target)) { final StateStatistic st = StateStatistic.reconstructStat(getAlphSize(), getHistSizes(), stats.get(new Integer(target))); t = new PDRTAState(this, target, st); states.put(target, t); } else { t = states.get(target); } Interval in = s.getInterval(input.getAlphIndex(sym), end); assert (in != null); assert (in.getTarget() == null); assert (s.getStat().getTransProb(input.getAlphIndex(sym), in) == 0.0); assert (in.contains(begin)); Interval newIn; if (end < in.getEnd()) { newIn = in.split(end); s.getIntervals(input.getAlphIndex(sym)).put(new Integer(newIn.getEnd()), newIn); in = newIn; } if (begin > in.getBegin()) { newIn = in.split(begin - 1); s.getIntervals(input.getAlphIndex(sym)).put(new Integer(newIn.getEnd()), newIn); } in.setTarget(t); s.getStat().addInterval(input.getAlphIndex(sym), in, prob); } root = states.get(0); }
From source file:org.apromore.similaritysearch.common.algos.GraphEditDistanceGreedy.java
public Set<TwoVertices> compute(Graph sg1, Graph sg2) { init(sg1, sg2);/*ww w . ja v a2 s.c o m*/ //INIT BestMapping mapping = new BestMapping(); Set<TwoVertices> openCouples = times(sg1.getVertices(), sg2.getVertices(), ledcutoff); double shortestEditDistance = Double.MAX_VALUE; Random randomized = new Random(123456789); int stepn = 0; //STEP boolean doStep = true; while (doStep) { doStep = false; stepn++; Vector<TwoVertices> bestCandidates = new Vector<TwoVertices>(); double newShortestEditDistance = shortestEditDistance; for (TwoVertices couple : openCouples) { double newEditDistance = this.editDistance(mapping, couple); if (newEditDistance < newShortestEditDistance) { bestCandidates = new Vector<TwoVertices>(); bestCandidates.add(couple); newShortestEditDistance = newEditDistance; } else if (newEditDistance == newShortestEditDistance) { bestCandidates.add(couple); } } if (bestCandidates.size() > 0) { TwoVertices couple; // Case 1: Only one candidate pair if (bestCandidates.size() == 1) { couple = bestCandidates.firstElement(); } else { // CASE 2: Lexicographical order is enough TreeMultimap<String, TwoVertices> tmap = TreeMultimap.create(); for (TwoVertices pair : bestCandidates) { String label1 = sg1.getVertexLabel(pair.v1); String label2 = sg2.getVertexLabel(pair.v2); if (label1 != null && label2 != null && label1.compareTo(label2) > 0) { String tmp = label1; label1 = label2; label2 = tmp; } tmap.put(label1 + label2, pair); } String firstkey = tmap.keySet().first(); if (tmap.get(firstkey).size() == 1) { couple = tmap.get(firstkey).first(); } else if (tmap.get(firstkey).size() > 1) { Set<TwoVertices> set = tmap.get(firstkey); TreeMultimap<String, TwoVertices> tmapp = TreeMultimap.create(); String label1; String tmpLabel; TreeMultiset<String> mset = TreeMultiset.create(); for (TwoVertices pair : set) { label1 = sg1.getVertexLabel(pair.v1); mset.clear(); for (Vertex n : sg1.getPreset(pair.v1)) { tmpLabel = sg1.getVertexLabel(n.getID()); if (tmpLabel != null) { mset.add(tmpLabel); } } label1 += mset.toString(); mset.clear(); for (Vertex n : sg1.getPostset(pair.v1)) { tmpLabel = sg1.getVertexLabel(n.getID()); if (tmpLabel != null) { mset.add(tmpLabel); } } label1 += mset.toString(); String label2 = sg2.getVertexLabel(pair.v2); mset.clear(); for (Vertex n : sg2.getPreset(pair.v2)) { tmpLabel = sg2.getVertexLabel(n.getID()); if (tmpLabel != null) { mset.add(tmpLabel); } } label2 += mset.toString(); mset.clear(); for (Vertex n : sg2.getPostset(pair.v2)) { tmpLabel = sg2.getVertexLabel(n.getID()); if (tmpLabel != null) { mset.add(tmpLabel); } } label2 += mset.toString(); if (label1.compareTo(label2) > 0) { String tmp = label1; label1 = label2; label2 = tmp; } tmapp.put(label1 + label2, pair); } String contextkey = tmapp.keySet().first(); // CASE 3: Composite labels (concatenation of labels of nodes surrounding the target vertex) if (tmapp.get(contextkey).size() == 1) { couple = tmapp.get(contextkey).first(); } else { // CASE 4: Non deterministic choice (Choose a random candidate) deterministic = false; couple = bestCandidates.get(randomized.nextInt(bestCandidates.size())); } } else { // CASE 5: Non deterministic choice (Choose a random candidate) // System.out.println("oops ..."); deterministic = false; couple = bestCandidates.get(randomized.nextInt(bestCandidates.size())); } } Set<TwoVertices> newOpenCouples = new HashSet<TwoVertices>(); for (TwoVertices p : openCouples) { if (!p.v1.equals(couple.v1) && !p.v2.equals(couple.v2)) { newOpenCouples.add(p); } } openCouples = newOpenCouples; mapping.addPair(couple); shortestEditDistance = newShortestEditDistance; doStep = true; } } //Return the smallest edit distance return mapping.mapping; }
From source file:org.onebusaway.nyc.vehicle_tracking.impl.inference.VehicleInferenceInstance.java
private RunResults findRunIdMatches(NycRawLocationRecord observation) { final Date obsDate = new Date(observation.getTime()); String opAssignedRunId = null; final String operatorId = observation.getOperatorId(); final boolean noOperatorIdGiven = StringUtils.isEmpty(operatorId) || StringUtils.containsOnly(operatorId, "0"); final Set<AgencyAndId> routeIds = Sets.newHashSet(); if (!noOperatorIdGiven) { try {/*from w w w . j a v a 2s .c om*/ final OperatorAssignmentItem oai = _operatorAssignmentService .getOperatorAssignmentItemForServiceDate(new ServiceDate(obsDate), new AgencyAndId(observation.getVehicleId().getAgencyId(), operatorId)); if (oai != null) { if (_runService.isValidRunId(oai.getRunId())) { opAssignedRunId = oai.getRunId(); routeIds.addAll(_runService.getRoutesForRunId(opAssignedRunId)); } } } catch (final Exception e) { _log.warn(e.getMessage()); } } Set<String> fuzzyMatches = Collections.emptySet(); final String reportedRunId = observation.getRunId(); Integer bestFuzzyDistance = null; if (StringUtils.isEmpty(opAssignedRunId) && !noOperatorIdGiven) { _log.info("no assigned run found for operator=" + operatorId); } if (StringUtils.isNotEmpty(reportedRunId) && !StringUtils.containsOnly(reportedRunId, new char[] { '0', '-' })) { try { final TreeMultimap<Integer, String> fuzzyReportedMatches = _runService .getBestRunIdsForFuzzyId(reportedRunId); if (fuzzyReportedMatches.isEmpty()) { _log.info("couldn't find a fuzzy match for reported runId=" + reportedRunId); } else { bestFuzzyDistance = fuzzyReportedMatches.keySet().first(); if (bestFuzzyDistance <= 0) { fuzzyMatches = fuzzyReportedMatches.get(bestFuzzyDistance); for (final String runId : fuzzyMatches) { routeIds.addAll(_runService.getRoutesForRunId(runId)); } } } } catch (final IllegalArgumentException ex) { _log.warn(ex.getMessage()); } } return new RunResults(opAssignedRunId, fuzzyMatches, bestFuzzyDistance, routeIds); }
From source file:org.apromore.similaritysearch.common.algos.GraphEditDistanceGreedy.java
public double computeGED(Graph sg1, Graph sg2) { BestMapping mapping = new BestMapping(); double shortestEditDistance = Double.MAX_VALUE; Random randomized = new Random(123456789); try {//ww w. ja va2 s .co m // INIT init(sg1, sg2); Set<TwoVertices> openCouples = times(sg1.getVertices(), sg2.getVertices(), ledcutoff); // STEP boolean doStep = true; while (doStep) { doStep = false; Vector<TwoVertices> bestCandidates = new Vector<TwoVertices>(); double newShortestEditDistance = shortestEditDistance; for (TwoVertices couple : openCouples) { double newEditDistance = this.editDistance(mapping, couple); if (newEditDistance < newShortestEditDistance) { bestCandidates = new Vector<TwoVertices>(); bestCandidates.add(couple); newShortestEditDistance = newEditDistance; } else if (newEditDistance == newShortestEditDistance) { bestCandidates.add(couple); } } if (bestCandidates.size() > 0) { TwoVertices couple; // Case 1: Only one candidate pair if (bestCandidates.size() == 1) { couple = bestCandidates.firstElement(); } else { // CASE 2: Lexicographical order is enough TreeMultimap<String, TwoVertices> tmap = TreeMultimap.create(); for (TwoVertices pair : bestCandidates) { String label1 = sg1.getVertexLabel(pair.v1); String label2 = sg2.getVertexLabel(pair.v2); if (label1 != null && label2 != null && label1.compareTo(label2) > 0) { String tmp = label1; label1 = label2; label2 = tmp; } tmap.put(label1 + label2, pair); } String firstkey = tmap.keySet().first(); if (tmap.get(firstkey).size() == 1) { couple = tmap.get(firstkey).first(); } else if (tmap.get(firstkey).size() > 1) { Set<TwoVertices> set = tmap.get(firstkey); TreeMultimap<String, TwoVertices> tmapp = TreeMultimap.create(); String label1; String tmpLabel; TreeMultiset<String> mset = TreeMultiset.create(); for (TwoVertices pair : set) { label1 = sg1.getVertexLabel(pair.v1); mset.clear(); for (Vertex n : sg1.getPreset(pair.v1)) { tmpLabel = sg1.getVertexLabel(n.getID()); if (tmpLabel != null) { mset.add(tmpLabel); } } label1 += mset.toString(); mset.clear(); for (Vertex n : sg1.getPostset(pair.v1)) { tmpLabel = sg1.getVertexLabel(n.getID()); if (tmpLabel != null) { mset.add(tmpLabel); } } label1 += mset.toString(); String label2 = sg2.getVertexLabel(pair.v2); mset.clear(); for (Vertex n : sg2.getPreset(pair.v2)) { tmpLabel = sg2.getVertexLabel(n.getID()); if (tmpLabel != null) { mset.add(tmpLabel); } } label2 += mset.toString(); mset.clear(); for (Vertex n : sg2.getPostset(pair.v2)) { tmpLabel = sg2.getVertexLabel(n.getID()); if (tmpLabel != null) { mset.add(tmpLabel); } } label2 += mset.toString(); if (label1.compareTo(label2) > 0) { String tmp = label1; label1 = label2; label2 = tmp; } tmapp.put(label1 + label2, pair); } String contextkey = tmapp.keySet().first(); // CASE 3: Composite labels (concatenation of labels of nodes surrounding the target vertex) if (tmapp.get(contextkey).size() == 1) { couple = tmapp.get(contextkey).first(); } else { // CASE 4: Non deterministic choice (Choose a random candidate) deterministic = false; couple = bestCandidates.get(randomized.nextInt(bestCandidates.size())); } } else { // CASE 5: Non deterministic choice (Choose a random candidate) // System.out.println("oops ..."); deterministic = false; couple = bestCandidates.get(randomized.nextInt(bestCandidates.size())); } } Set<TwoVertices> newOpenCouples = new HashSet<TwoVertices>(); for (TwoVertices p : openCouples) { if (!p.v1.equals(couple.v1) && !p.v2.equals(couple.v2)) { newOpenCouples.add(p); } } openCouples = newOpenCouples; mapping.addPair(couple); shortestEditDistance = newShortestEditDistance; doStep = true; } } nrSubstitudedVertices = mapping.size(); } catch (Exception e) { LOGGER.error("Error occured while processing Distance Greedy Similarity Search ", e); } // Return the smallest edit distance return shortestEditDistance; }
From source file:common.algos.GraphEditDistanceGreedy.java
public double computeGED(Graph sg1, Graph sg2) { BestMapping mapping = new BestMapping(); double shortestEditDistance = Double.MAX_VALUE; Random randomized = new Random(123456789); try {//from www . j a v a 2 s . c o m // INIT init(sg1, sg2); Set<TwoVertices> openCouples = times(sg1.getVertices(), sg2.getVertices(), ledcutoff); // STEP boolean doStep = true; while (doStep) { doStep = false; Vector<TwoVertices> bestCandidates = new Vector<TwoVertices>(); double newShortestEditDistance = shortestEditDistance; for (TwoVertices couple : openCouples) { double newEditDistance = this.editDistance(mapping, couple); if (newEditDistance < newShortestEditDistance) { bestCandidates = new Vector<TwoVertices>(); bestCandidates.add(couple); newShortestEditDistance = newEditDistance; } else if (newEditDistance == newShortestEditDistance) { bestCandidates.add(couple); } } if (bestCandidates.size() > 0) { TwoVertices couple; // Case 1: Only one candidate pair if (bestCandidates.size() == 1) { couple = bestCandidates.firstElement(); } else { // CASE 2: Lexicographical order is enough TreeMultimap<String, TwoVertices> tmap = TreeMultimap.create(); for (TwoVertices pair : bestCandidates) { String label1 = sg1.getVertexLabel(pair.v1); String label2 = sg2.getVertexLabel(pair.v2); if (label1 != null && label2 != null && label1.compareTo(label2) > 0) { String tmp = label1; label1 = label2; label2 = tmp; } tmap.put(label1 + label2, pair); } String firstkey = tmap.keySet().first(); if (tmap.get(firstkey).size() == 1) { couple = tmap.get(firstkey).first(); } else if (tmap.get(firstkey).size() > 1) { Set<TwoVertices> set = tmap.get(firstkey); TreeMultimap<String, TwoVertices> tmapp = TreeMultimap.create(); String label1; String tmpLabel; TreeMultiset<String> mset = TreeMultiset.create(); for (TwoVertices pair : set) { label1 = sg1.getVertexLabel(pair.v1); mset.clear(); for (Vertex n : sg1.getPreset(pair.v1)) { tmpLabel = sg1.getVertexLabel(n.getID()); if (tmpLabel != null) { mset.add(tmpLabel); } } label1 += mset.toString(); mset.clear(); for (Vertex n : sg1.getPostset(pair.v1)) { tmpLabel = sg1.getVertexLabel(n.getID()); if (tmpLabel != null) { mset.add(tmpLabel); } } label1 += mset.toString(); String label2 = sg2.getVertexLabel(pair.v2); mset.clear(); for (Vertex n : sg2.getPreset(pair.v2)) { tmpLabel = sg2.getVertexLabel(n.getID()); if (tmpLabel != null) { mset.add(tmpLabel); } } label2 += mset.toString(); mset.clear(); for (Vertex n : sg2.getPostset(pair.v2)) { tmpLabel = sg2.getVertexLabel(n.getID()); if (tmpLabel != null) { mset.add(tmpLabel); } } label2 += mset.toString(); if (label1.compareTo(label2) > 0) { String tmp = label1; label1 = label2; label2 = tmp; } tmapp.put(label1 + label2, pair); } String contextkey = tmapp.keySet().first(); // CASE 3: Composite labels (concatenation of labels of nodes surrounding the target vertex) if (tmapp.get(contextkey).size() == 1) { couple = tmapp.get(contextkey).first(); } else { // CASE 4: Non deterministic choice (Choose a random candidate) deterministic = false; couple = bestCandidates.get(randomized.nextInt(bestCandidates.size())); } } else { // CASE 5: Non deterministic choice (Choose a random candidate) // System.out.println("oops ..."); deterministic = false; couple = bestCandidates.get(randomized.nextInt(bestCandidates.size())); } } Set<TwoVertices> newOpenCouples = new HashSet<TwoVertices>(); for (TwoVertices p : openCouples) { if (!p.v1.equals(couple.v1) && !p.v2.equals(couple.v2)) { newOpenCouples.add(p); } } openCouples = newOpenCouples; mapping.addPair(couple); shortestEditDistance = newShortestEditDistance; doStep = true; } } nrSubstitudedVertices = mapping.size(); } catch (Exception e) { } // Return the smallest edit distance return shortestEditDistance; }
From source file:org.commoncrawl.util.CompressedURLFPListV2.java
public static void validateDuplicateChecking() { TreeMultimap<Long, URLFPV2> sourceMap = TreeMultimap.create(); TreeMultimap<Long, URLFPV2> destMap = TreeMultimap.create(); ;/* w w w . ja v a2 s . c o m*/ ByteArrayOutputStream byteStream = new ByteArrayOutputStream(); Builder firstBuilder = new Builder(FLAG_ARCHIVE_SEGMENT_ID | FLAG_SERIALIZE_URLFP_FLAGS); insertURLFPItem(sourceMap, "http://www.google.com/hello", 1, 255); insertURLFPItem(sourceMap, "http://google.com/hello", 1, 255); insertURLFPItem(sourceMap, "http://google.com/foobar", 1, 255); insertURLFPItem(sourceMap, "http://www.google.com/hello", 1, 255); addMapToBuilder(firstBuilder, sourceMap); try { // flush to byte stream ... firstBuilder.flush(byteStream); // now set up to read the stream ByteArrayInputStream inputStream = new ByteArrayInputStream(byteStream.toByteArray(), 0, byteStream.size()); Reader reader = new Reader(inputStream); while (reader.hasNext()) { URLFPV2 fp = reader.next(); destMap.put(fp.getRootDomainHash(), fp); } reader.close(); for (long rootDomain : sourceMap.keySet()) { for (URLFPV2 URLFPV2 : sourceMap.get(rootDomain)) { System.out.println("SourceFP Root:" + URLFPV2.getRootDomainHash() + " Domain:" + URLFPV2.getDomainHash() + " URL:" + URLFPV2.getUrlHash()); } } for (long rootDomain : destMap.keySet()) { for (URLFPV2 URLFPV2 : destMap.get(rootDomain)) { System.out.println("DestFP Root:" + URLFPV2.getRootDomainHash() + " Domain:" + URLFPV2.getDomainHash() + " URL:" + URLFPV2.getUrlHash()); } } Assert.assertTrue(sourceMap.equals(destMap)); } catch (IOException e) { e.printStackTrace(); } }
From source file:eu.lp0.cursus.scoring.scores.impl.GenericOverallPositionData.java
@Override protected LinkedListMultimap<Integer, Pilot> calculateOverallPositionsWithOrder() { // Invert race points with ordered lists of pilots Comparator<Pilot> racePlacings = new PilotRacePlacingComparator<T>(scores, placingMethod); Comparator<Pilot> fallbackOrdering = new PilotRaceNumberComparator(); TreeMultimap<Integer, Pilot> invOverallPoints = TreeMultimap.create(Ordering.natural(), Ordering.from(racePlacings).compound(fallbackOrdering)); Multimaps.invertFrom(Multimaps.forMap(scores.getOverallPoints()), invOverallPoints); // Calculate overall positions LinkedListMultimap<Integer, Pilot> overallPositions = LinkedListMultimap.create(); List<Pilot> collectedPilots = new ArrayList<Pilot>(scores.getPilots().size()); LinkedList<SortedSet<Pilot>> pilotPointsOrdering = new LinkedList<SortedSet<Pilot>>(); int position = 1; if (allSimulatedToEnd) { final Map<Pilot, ? extends Set<Race>> simulatedPilotPoints = scores.getSimulatedPilotPoints(); Predicate<Pilot> allSimulatedPilot = new Predicate<Pilot>() { private final int raceCount = scores.getRaces().size(); @Override// w w w.jav a2s. c o m public boolean apply(Pilot input) { return simulatedPilotPoints.get(input).size() == raceCount; } }; for (Integer points : invOverallPoints.keySet()) { SortedSet<Pilot> pilots = Sets.filter(invOverallPoints.get(points), Predicates.not(allSimulatedPilot)); if (!pilots.isEmpty()) { pilotPointsOrdering.add(pilots); } } for (Integer points : invOverallPoints.keySet()) { SortedSet<Pilot> pilots = Sets.filter(invOverallPoints.get(points), allSimulatedPilot); if (!pilots.isEmpty()) { pilotPointsOrdering.add(pilots); } } } else { for (Integer points : invOverallPoints.keySet()) { pilotPointsOrdering.add(invOverallPoints.get(points)); } } for (SortedSet<Pilot> pilots : pilotPointsOrdering) { switch (equalPositioning) { case ALWAYS: // Always put pilots with the same points in the same position overallPositions.putAll(position, pilots); position += pilots.size(); break; case IF_REQUIRED: // Try to put pilots with the same points in separate positions PeekingIterator<Pilot> it = Iterators.peekingIterator(pilots.iterator()); while (it.hasNext()) { Pilot pilot = it.next(); collectedPilots.add(pilot); // If this pilot compares equally with the next pilot, add them too while (it.hasNext() && racePlacings.compare(it.peek(), pilot) == 0) { collectedPilots.add(it.next()); } // Sort them by an arbitrary order Collections.sort(collectedPilots, fallbackOrdering); // Add them all to this position overallPositions.putAll(position, collectedPilots); position += collectedPilots.size(); collectedPilots.clear(); } break; } } return overallPositions; }