Example usage for com.google.common.collect Multiset remove

List of usage examples for com.google.common.collect Multiset remove

Introduction

In this page you can find the example usage for com.google.common.collect Multiset remove.

Prototype

int remove(@Nullable Object element, int occurrences);

Source Link

Document

Removes a number of occurrences of the specified element from this multiset.

Usage

From source file:org.apache.aurora.scheduler.preemptor.PendingTaskProcessor.java

/**
 * Creates execution sequence for pending task groups by interleaving batches of requested size of
 * their occurrences. For example: {G1, G1, G1, G2, G2} with batch size of 2 task per group will
 * be converted into {G1, G1, G2, G2, G1}.
 *
 * @param groups Multiset of task groups.
 * @param batchSize The batch size of tasks from each group to sequence together.
 * @return A task group execution sequence.
 *//*from   w w  w .j av  a  2 s  .c o  m*/
@VisibleForTesting
static List<TaskGroupKey> getPreemptionSequence(Multiset<TaskGroupKey> groups, int batchSize) {

    Preconditions.checkArgument(batchSize > 0, "batchSize should be positive.");

    Multiset<TaskGroupKey> mutableGroups = HashMultiset.create(groups);
    List<TaskGroupKey> instructions = Lists.newLinkedList();
    Set<TaskGroupKey> keys = ImmutableSet.copyOf(groups.elementSet());
    while (!mutableGroups.isEmpty()) {
        for (TaskGroupKey key : keys) {
            if (mutableGroups.contains(key)) {
                int elementCount = mutableGroups.remove(key, batchSize);
                int removedCount = Math.min(elementCount, batchSize);
                instructions.addAll(Collections.nCopies(removedCount, key));
            }
        }
    }

    return instructions;
}

From source file:com.google.javascript.jscomp.deps.SortedDependencies.java

private static <T> List<T> topologicalStableSort(List<T> items, Multimap<T, T> deps) {
    if (items.isEmpty()) {
        // Priority queue blows up if we give it a size of 0. Since we need
        // to special case this either way, just bail out.
        return new ArrayList<>();
    }//from  www  .  ja v a2s .co  m

    final Map<T, Integer> originalIndex = new HashMap<>();
    for (int i = 0; i < items.size(); i++) {
        originalIndex.put(items.get(i), i);
    }

    PriorityQueue<T> inDegreeZero = new PriorityQueue<>(items.size(), new Comparator<T>() {
        @Override
        public int compare(T a, T b) {
            return originalIndex.get(a).intValue() - originalIndex.get(b).intValue();
        }
    });
    List<T> result = new ArrayList<>();

    Multiset<T> inDegree = HashMultiset.create();
    Multimap<T, T> reverseDeps = ArrayListMultimap.create();
    Multimaps.invertFrom(deps, reverseDeps);

    // First, add all the inputs with in-degree 0.
    for (T item : items) {
        Collection<T> itemDeps = deps.get(item);
        inDegree.add(item, itemDeps.size());
        if (itemDeps.isEmpty()) {
            inDegreeZero.add(item);
        }
    }

    // Then, iterate to a fixed point over the reverse dependency graph.
    while (!inDegreeZero.isEmpty()) {
        T item = inDegreeZero.remove();
        result.add(item);
        for (T inWaiting : reverseDeps.get(item)) {
            inDegree.remove(inWaiting, 1);
            if (inDegree.count(inWaiting) == 0) {
                inDegreeZero.add(inWaiting);
            }
        }
    }

    return result;
}

From source file:com.github.rinde.rinsim.scenario.measure.Metrics.java

/**
 * Computes the number of occurrences of each event type in the specified
 * {@link Scenario}./*from  w  w w .j  a va 2 s.c o m*/
 * @param s The scenario to check.
 * @return A {@link ImmutableMultiset} of event types.
 */
public static ImmutableMultiset<Class<?>> getEventTypeCounts(Scenario s) {
    final Multiset<Class<?>> set = LinkedHashMultiset.create();
    for (final TimedEvent te : s.getEvents()) {
        set.add(te.getClass());
    }
    final List<Class<?>> toMove = new ArrayList<>();
    for (final Class<?> c : set.elementSet()) {
        if (!Modifier.isPublic(c.getModifiers()) && TimedEvent.class.isAssignableFrom(c.getSuperclass())
                && !set.contains(c.getSuperclass())) {
            toMove.add(c);
        }
    }
    for (final Class<?> c : toMove) {
        set.add(c.getSuperclass(), set.count(c));
        set.remove(c, set.count(c));
    }
    return ImmutableMultiset.copyOf(set);
}

From source file:com.github.fhirschmann.clozegen.lib.generators.CollocationGapGenerator.java

@Override
public Optional<Gap> generate(final int count) {
    checkNotNull(model);//from  ww w . j  av a 2  s.c  o m
    Gap gap = new Gap();
    gap.addValidAnswers(triplet.getValue1());

    // Collect a list of possible candidates for this gap
    final Multiset<String> candidates = ConcurrentHashMultiset.create(MultisetUtils.mergeMultiSets(
            model.getTails().get(triplet.getValue2()), model.getHeads().get(triplet.getValue0())));

    // Remove the correct answer from the candidate set
    candidates.remove(triplet.getValue1(), candidates.count(triplet.getValue1()));

    // Remove candidates p* which appear in the context (A, p*, B)
    for (Entry<String> entry : candidates.entrySet()) {
        if (model.getMultiset().contains(
                MiscUtils.WS_JOINER.join(triplet.getValue0(), entry.getElement(), triplet.getValue2()))) {
            candidates.remove(entry.getElement(), entry.getCount());
        }
    }

    if (candidates.elementSet().size() > count - 2) {
        final Set<String> invalidAnswers = Sets
                .newHashSet(MultisetUtils.sortedElementList(candidates, count - 1));
        gap.addInvalidAnswers(invalidAnswers);
        return Optional.of(gap);
    } else {
        return Optional.absent();
    }
}

From source file:eu.lp0.cursus.scoring.scores.impl.GenericRaceLapsData.java

@Override
protected List<Pilot> calculateRaceLapsInOrder(Race race, Map<Pilot, Integer> laps) {
    ListMultimap<Integer, Pilot> raceOrder = ArrayListMultimap.create(EXPECTED_MAXIMUM_LAPS,
            scores.getPilots().size());//  w w  w .  jav a  2  s.  c  o m

    extractRaceLaps(race, laps, raceOrder, null);

    // Get penalties for each pilot
    ListMultimap<Pilot, Penalty> cancelLaps = ArrayListMultimap.create(EXPECTED_MAXIMUM_PENALTIES,
            scores.getPilots().size());
    ListMultimap<Pilot, Penalty> adjustLaps = ArrayListMultimap.create(EXPECTED_MAXIMUM_PENALTIES,
            scores.getPilots().size());
    for (RaceAttendee attendee : Maps.filterKeys(race.getAttendees(), Predicates.in(scores.getPilots()))
            .values()) {
        for (Penalty penalty : Iterables.concat(Ordering.natural().immutableSortedCopy(attendee.getPenalties()),
                scores.getSimulatedRacePenalties(attendee.getPilot(), race))) {
            if (penalty.getValue() != 0) {
                switch (penalty.getType()) {
                case CANCEL_LAPS:
                    cancelLaps.put(attendee.getPilot(), penalty);
                    break;

                case ADJUST_LAPS:
                    adjustLaps.put(attendee.getPilot(), penalty);
                    break;

                default:
                    break;
                }
            }
        }
    }

    // Apply lap cancellation penalties
    if (!cancelLaps.isEmpty()) {
        final Multiset<Pilot> pilotLaps = HashMultiset.create(laps.size());

        for (Map.Entry<Pilot, Integer> pilotLapCount : laps.entrySet()) {
            pilotLaps.setCount(pilotLapCount.getKey(), pilotLapCount.getValue());
        }

        for (Map.Entry<Pilot, Penalty> entry : cancelLaps.entries()) {
            int value = entry.getValue().getValue();
            if (value > 0) {
                pilotLaps.remove(entry.getKey(), value);
            } else {
                pilotLaps.add(entry.getKey(), Math.abs(value));
            }
        }

        extractRaceLaps(race, laps, raceOrder, new Predicate<Pilot>() {
            @Override
            public boolean apply(@Nullable Pilot pilot) {
                return pilotLaps.remove(pilot);
            }
        });
    }

    // Save pilot order
    List<Pilot> origPilotOrder = getPilotOrder(raceOrder);
    SortedSet<Pilot> noLaps = new TreeSet<Pilot>(new PilotRaceNumberComparator());
    Set<Integer> changed = new HashSet<Integer>();

    // It is intentional that pilots can end up having 0 laps but be considered to have completed the race
    for (Map.Entry<Pilot, Penalty> entry : adjustLaps.entries()) {
        Pilot pilot = entry.getKey();
        int lapCount = laps.get(pilot);

        raceOrder.remove(lapCount, pilot);
        changed.add(lapCount);

        lapCount += entry.getValue().getValue();
        if (lapCount <= 0) {
            lapCount = 0;
            noLaps.add(pilot);
        }
        laps.put(pilot, lapCount);

        raceOrder.put(lapCount, pilot);
        changed.add(lapCount);
    }

    // Apply original pilot order
    if (!changed.isEmpty()) {
        origPilotOrder.addAll(noLaps);

        for (Integer lapCount : changed) {
            raceOrder.replaceValues(lapCount,
                    Ordering.explicit(origPilotOrder).immutableSortedCopy(raceOrder.get(lapCount)));
        }

        return getPilotOrder(raceOrder);
    } else {
        return origPilotOrder;
    }
}

From source file:org.dllearner.utilities.examples.AutomaticNegativeExampleFinderSPARQL2.java

private void keepMostSpecificClasses(Multiset<OWLClass> classes) {
    HashMultiset<OWLClass> copy = HashMultiset.create(classes);
    for (OWLClass nc1 : copy.elementSet()) {
        for (OWLClass nc2 : copy.elementSet()) {
            if (!nc1.equals(nc2)) {
                //remove class nc1 if it is superclass of another class nc2
                boolean isSubClassOf = false;
                if (sr.getClassHierarchy() != null) {
                    isSubClassOf = sr.getClassHierarchy().isSubclassOf(nc2, nc1);
                } else {
                    isSubClassOf = sr.isSuperClassOf(nc1, nc2);
                }//from www.  j a va  2  s  .  c  o m
                if (isSubClassOf) {
                    classes.remove(nc1, classes.count(nc1));
                    break;
                }
            }
        }
    }
}

From source file:cpw.mods.inventorysorter.SortingHandler.java

private void distributeInventory(Action.ActionContext context, Multiset<ItemStackHolder> itemcounts) {
    InventoryCrafting ic = (InventoryCrafting) context.slot.inventory;
    Multiset<ItemStackHolder> slotCounts = TreeMultiset.create(new InventoryHandler.ItemStackComparator());
    for (int x = 0; x < ic.getWidth(); x++) {
        for (int y = 0; y < ic.getHeight(); y++) {
            ItemStack is = ic.getStackInRowAndColumn(x, y);
            if (is != null) {
                slotCounts.add(new ItemStackHolder(is));
            }/*from w  ww.  j  a  v  a2 s .c  om*/
        }
    }

    final ImmutableMultiset<ItemStackHolder> staticcounts = ImmutableMultiset.copyOf(itemcounts);
    for (int x = 0; x < ic.getWidth(); x++) {
        for (int y = 0; y < ic.getHeight(); y++) {
            ItemStack is = ic.getStackInRowAndColumn(x, y);
            if (is != null) {
                ItemStackHolder ish = new ItemStackHolder(is);
                int count = staticcounts.count(ish);
                int slotNum = slotCounts.count(ish);
                final int occurrences = count / slotNum;
                itemcounts.remove(ish, occurrences);
                is.stackSize = occurrences;
            }
        }
    }
    for (int x = 0; x < ic.getWidth(); x++) {
        for (int y = 0; y < ic.getHeight(); y++) {
            ItemStack is = ic.getStackInRowAndColumn(x, y);
            if (is != null) {
                ItemStackHolder ish = new ItemStackHolder(is);
                if (itemcounts.count(ish) > 0) {
                    is.stackSize += itemcounts.setCount(ish, 0);
                }
            }
        }
    }
    for (int slot = context.slotMapping.begin; slot < context.slotMapping.end + 1; slot++) {
        context.player.openContainer.getSlot(slot).onSlotChanged();
    }
}

From source file:BibTex.IOmethods.java

public void writeTopContributorByCategoryToFile(Set<BibTexRef> refs) throws IOException {
    BufferedWriter bw = new BufferedWriter(new FileWriter(folder + "top contributors per categories.csv"));
    StringBuilder sb = new StringBuilder();
    String sep = "|";

    //creation of 2 convenient data structures for I/O
    Map<String, Multiset<Author>> categoriesToAuthors = new TreeMap();
    List<String> categoryNames = new ArrayList();

    for (BibTexRef ref : refs) {
        Set<Category> categories = ref.getCategories();

        for (Category category : categories) {
            if (!categoryNames.contains(category.getCategoryName())) {
                categoryNames.add(category.getCategoryName());
            }// www .  j  a  v a  2 s.  co  m
            if (categoriesToAuthors.containsKey(category.getCategoryName())) {
                categoriesToAuthors.get(category.getCategoryName()).addAll(ref.getAuthors());
            } else {
                Multiset<Author> authorsForOneCategory = HashMultiset.create();
                authorsForOneCategory.addAll(ref.getAuthors());

                categoriesToAuthors.put(category.getCategoryName(), authorsForOneCategory);
            }
        }

    }
    Collections.sort(categoryNames);

    //writing of the first line of the csv: headers of the categories.
    for (String categoryName : categoryNames) {
        sb.append(categoryName);
        sb.append(sep);
    }
    sb.append("\n");

    //writing of all subsequent lines: one per year
    int countCategoriesdone = 0;
    boolean continueLoop = true;
    while (continueLoop) {

        for (Iterator<String> it = categoriesToAuthors.keySet().iterator(); it.hasNext();) {
            String category = it.next();
            Multiset<Author> authorsForOneCategory = categoriesToAuthors.get(category);

            Iterator<Author> authorsIterator = Multisets.copyHighestCountFirst(authorsForOneCategory)
                    .elementSet().iterator();
            if (authorsIterator.hasNext()) {
                Author author = authorsIterator.next();
                sb.append(author.getFullname()).append("(").append(authorsForOneCategory.count(author))
                        .append(")").append(sep);
                authorsForOneCategory.remove(author, authorsForOneCategory.count(author));
            } else {
                sb.append(sep);
            }
        }
        sb.append("\n");

        for (String cat : categoriesToAuthors.keySet()) {
            if (categoriesToAuthors.get(cat).isEmpty()) {
                countCategoriesdone++;
            }
        }
        if (countCategoriesdone == categoryNames.size()) {
            continueLoop = false;
        } else {
            countCategoriesdone = 0;
        }

    }

    bw.write(sb.toString());
    bw.close();

}

From source file:BibTex.IOmethods.java

public void writeJournalsPerCategories(Set<BibTexRef> refs) throws IOException {
    JournalAbbreviationsMapping jmap = new JournalAbbreviationsMapping();
    jmap.loadMap();//from   www .  ja va 2 s  .com

    BufferedWriter bw = new BufferedWriter(new FileWriter(folder + "journals per categories.csv"));

    StringBuilder sb = new StringBuilder();
    String sep = "|";

    //creation of 2 convenient data structures for I/O
    Map<String, Multiset<String>> categoriesToJournals = new TreeMap();
    List<String> categoryNames = new ArrayList();

    for (BibTexRef ref : refs) {
        Set<Category> categories = ref.getCategories();

        String title = ref.getJournal();
        if (title == null || title.isEmpty()) {
            continue;
        }
        title = title.toLowerCase();

        Set<String> abbrev = (Set<String>) jmap.getJournalsToAbbrev().get(title);
        if (abbrev == null || abbrev.isEmpty()) {
            abbrev = new HashSet();
            abbrev.add(title);
        }

        String abbreviation = abbrev.iterator().next();

        for (Category category : categories) {
            if (!categoryNames.contains(category.getCategoryName())) {
                categoryNames.add(category.getCategoryName());
            }
            if (categoriesToJournals.containsKey(category.getCategoryName())) {
                categoriesToJournals.get(category.getCategoryName()).add(abbreviation);
            } else {
                Multiset<String> journalsForOneCategory = HashMultiset.create();
                journalsForOneCategory.add(abbreviation);

                categoriesToJournals.put(category.getCategoryName(), journalsForOneCategory);
            }
        }

    }
    Collections.sort(categoryNames);

    //writing of the first line of the csv: headers of the categories.
    for (String categoryName : categoryNames) {
        sb.append(categoryName);
        sb.append(sep);
    }
    sb.append("\n");

    //writing of all subsequent lines: one per year
    int countCategoriesdone = 0;
    boolean continueLoop = true;
    while (continueLoop) {

        for (Iterator<String> it = categoriesToJournals.keySet().iterator(); it.hasNext();) {
            String category = it.next();
            Multiset<String> journalsForOneCategory = categoriesToJournals.get(category);

            Iterator<String> journalsIterator = Multisets.copyHighestCountFirst(journalsForOneCategory)
                    .elementSet().iterator();
            if (journalsIterator.hasNext()) {
                String journal = journalsIterator.next();
                sb.append(journal).append(" (").append(journalsForOneCategory.count(journal)).append(")")
                        .append(sep);
                journalsForOneCategory.remove(journal, journalsForOneCategory.count(journal));
            } else {
                sb.append(sep);
            }
        }
        sb.append("\n");

        for (String cat : categoriesToJournals.keySet()) {
            if (categoriesToJournals.get(cat).isEmpty()) {
                countCategoriesdone++;
            }
        }
        if (countCategoriesdone == categoryNames.size()) {
            continueLoop = false;
        } else {
            countCategoriesdone = 0;
        }

    }

    bw.write(sb.toString());
    bw.close();

}