Example usage for com.google.common.collect Multimap remove

List of usage examples for com.google.common.collect Multimap remove

Introduction

In this page you can find the example usage for com.google.common.collect Multimap remove.

Prototype

boolean remove(@Nullable Object key, @Nullable Object value);

Source Link

Document

Removes a single key-value pair with the key key and the value value from this multimap, if such exists.

Usage

From source file:org.elasticsearch.cluster.metadata.MetaDataDeleteIndexService.java

public void deleteIndices(final Request request, final Listener userListener) {
    final Collection<String> indices = Arrays.asList(request.indices);
    clusterService.submitStateUpdateTask("delete-index " + indices,
            new ClusterStateUpdateTask(Priority.URGENT) {
                @Override/*from  www.j  av a2s  .c o  m*/
                public TimeValue timeout() {
                    return request.masterTimeout;
                }

                @Override
                public boolean doPresistMetaData() {
                    return true;
                }

                @Override
                public void onFailure(String source, Throwable t) {
                    userListener.onFailure(t);
                }

                @Override
                public ClusterState execute(final ClusterState currentState) throws Exception {
                    MetaData.Builder metaDataBuilder = MetaData.builder(currentState.metaData());
                    ClusterBlocks.Builder clusterBlocksBuilder = ClusterBlocks.builder()
                            .blocks(currentState.blocks());

                    Multimap<IndexMetaData, String> unindexedTables = HashMultimap.create();
                    for (final String index : indices) {
                        if (!currentState.metaData().hasConcreteIndex(index)) {
                            throw new IndexNotFoundException(index);
                        }

                        logger.debug("[{}] deleting index", index);

                        clusterBlocksBuilder.removeIndexBlocks(index);
                        metaDataBuilder.remove(index);

                        final IndexMetaData indexMetaData = currentState.metaData().index(index);
                        // record keyspace.table having useless 2i 
                        for (ObjectCursor<MappingMetaData> type : indexMetaData.getMappings().values())
                            if (!MapperService.DEFAULT_MAPPING.equals(type.value.type()))
                                unindexedTables.put(indexMetaData,
                                        InternalCassandraClusterService.typeToCfName(type.value.type()));
                    }

                    MetaData newMetaData = metaDataBuilder.build();

                    // remove keyspace.table still having ES indices from the unindexedTables
                    for (ObjectCursor<IndexMetaData> index : newMetaData.indices().values()) {
                        for (ObjectCursor<MappingMetaData> type : index.value.getMappings().values())
                            unindexedTables.remove(index.value.keyspace(), type.value);
                    }

                    logger.debug("unindexed tables={}", unindexedTables);

                    boolean clusterDropOnDelete = currentState.metaData().settings().getAsBoolean(
                            InternalCassandraClusterService.SETTING_CLUSTER_DROP_ON_DELETE_INDEX,
                            Boolean.getBoolean("es.drop_on_delete_index"));
                    for (IndexMetaData imd : unindexedTables.keySet()) {
                        if (Schema.instance.getKeyspaceInstance(imd.keyspace()) != null) {
                            // keyspace still exists.
                            if (imd.getSettings().getAsBoolean(IndexMetaData.SETTING_DROP_ON_DELETE_INDEX,
                                    clusterDropOnDelete)) {
                                int tableCount = 0;
                                for (CFMetaData tableOrView : Schema.instance
                                        .getKeyspaceInstance(imd.keyspace()).getMetadata().tablesAndViews()) {
                                    if (tableOrView.isCQLTable())
                                        tableCount++;
                                }
                                if (tableCount == unindexedTables.get(imd).size()) {
                                    // drop keyspace instead of droping all tables.
                                    MetaDataDeleteIndexService.this.clusterService
                                            .dropIndexKeyspace(imd.keyspace());
                                } else {
                                    // drop tables
                                    for (String table : unindexedTables.get(imd))
                                        MetaDataDeleteIndexService.this.clusterService.dropTable(imd.keyspace(),
                                                table);
                                }
                            } else {
                                // drop secondary indices
                                for (String table : unindexedTables.get(imd))
                                    MetaDataDeleteIndexService.this.clusterService
                                            .dropSecondaryIndex(imd.keyspace(), table);
                            }
                        }
                    }

                    ClusterBlocks blocks = clusterBlocksBuilder.build();
                    return ClusterState.builder(currentState).metaData(newMetaData).blocks(blocks).build();
                }

                @Override
                public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
                    userListener.onResponse(new Response(true));
                }
            });
}

From source file:com.b2international.snowowl.snomed.reasoner.server.classification.EquivalentConceptMerger.java

private void switchOutboundRelationships(final Concept conceptToKeep,
        final Collection<Concept> conceptsToRemove, final Concept conceptToRemove,
        Multimap<String, Relationship> inboundRelationshipMap, Multimap<Concept, Concept> equivalentConcepts) {
    for (final Relationship relationshipToRemove : newArrayList(conceptToRemove.getOutboundRelationships())) {
        boolean found = false;
        for (final Relationship replacementOutboundRelationship : conceptToKeep.getOutboundRelationships()) {
            if (isEquivalentOutboundRelationship(equivalentConcepts, relationshipToRemove,
                    replacementOutboundRelationship)) {
                found = true;/*w  w  w . j  a  v  a2s.  c  o m*/
                break;
            }
        }

        if (!found) {
            if (!conceptsToRemove.contains(relationshipToRemove.getSource())) {
                final String namespace = SnomedIdentifiers.create(relationshipToRemove.getId()).getNamespace();
                final Relationship newRelationship = editingContext.buildDefaultRelationship(conceptToKeep,
                        relationshipToRemove.getType(), relationshipToRemove.getDestination(),
                        relationshipToRemove.getCharacteristicType(), relationshipToRemove.getModule(),
                        namespace);

                inboundRelationshipMap.put(newRelationship.getDestination().getId(), newRelationship);
                switchRelationship(newRelationship, relationshipToRemove);
            }
        }
        if (inboundRelationshipMap.containsValue(relationshipToRemove)) {
            inboundRelationshipMap.remove(relationshipToRemove.getDestination().getId(), relationshipToRemove);
        }
        SnomedModelExtensions.removeOrDeactivate(relationshipToRemove);
    }
}

From source file:com.b2international.snowowl.snomed.reasoner.server.classification.EquivalentConceptMerger.java

private void switchInboundRelationships(final Concept conceptToKeep, final Collection<Concept> conceptsToRemove,
        final Concept conceptToRemove, Multimap<String, Relationship> inboundRelationshipMap,
        Multimap<Concept, Concept> equivalentConcepts) {
    for (final Relationship relationshipToRemove : newArrayList(
            inboundRelationshipMap.get(conceptToRemove.getId()))) {
        boolean found = false;
        for (final Relationship replacementInboundRelationship : Sets
                .newHashSet(inboundRelationshipMap.get(conceptToKeep.getId()))) {
            if (isEquivalentInboundRelationship(equivalentConcepts, relationshipToRemove,
                    replacementInboundRelationship)) {
                found = true;/*ww w . j a va  2s  .c o m*/
                break;
            }
        }
        if (!found) {
            if (!conceptsToRemove.contains(relationshipToRemove.getSource())) {
                final String namespace = SnomedIdentifiers.create(relationshipToRemove.getId()).getNamespace();
                final Relationship newRelationship = editingContext.buildDefaultRelationship(
                        relationshipToRemove.getSource(), relationshipToRemove.getType(), conceptToKeep,
                        relationshipToRemove.getCharacteristicType(), relationshipToRemove.getModule(),
                        namespace);

                inboundRelationshipMap.put(newRelationship.getDestination().getId(), newRelationship);
                switchRelationship(newRelationship, relationshipToRemove);
            }
        }
        if (inboundRelationshipMap.containsValue(relationshipToRemove)) {
            inboundRelationshipMap.remove(relationshipToRemove.getDestination().getId(), relationshipToRemove);
        }
        SnomedModelExtensions.removeOrDeactivate(relationshipToRemove);
    }
}

From source file:org.apache.beam.sdk.options.ProxyInvocationHandler.java

/**
 * Construct a mapping from an option name to its {@link PipelineOptions} interface(s)
 * declarations. An option may be declared in multiple interfaces. If it is overridden in a
 * type hierarchy, only the overriding interface will be included.
 *///from  ww w .  java 2s .  c  o  m
private Multimap<String, PipelineOptionSpec> buildOptionNameToSpecMap(Set<PipelineOptionSpec> props) {

    Multimap<String, PipelineOptionSpec> optionsMap = HashMultimap.create();
    for (PipelineOptionSpec prop : props) {
        optionsMap.put(prop.getName(), prop);
    }

    // Filter out overridden options
    for (Map.Entry<String, Collection<PipelineOptionSpec>> entry : optionsMap.asMap().entrySet()) {

        /* Compare all interfaces for an option pairwise (iface1, iface2) to look for type
         hierarchies. If one is the base-class of the other, remove it from the output and continue
         iterating.
                
         This is an N^2 operation per-option, but the number of interfaces defining an option
         should always be small (usually 1). */
        List<PipelineOptionSpec> specs = Lists.newArrayList(entry.getValue());
        if (specs.size() < 2) {
            // Only one known implementing interface, no need to check for inheritance
            continue;
        }

        for (int i = 0; i < specs.size() - 1; i++) {
            Class<?> iface1 = specs.get(i).getDefiningInterface();
            for (int j = i + 1; j < specs.size(); j++) {
                Class<?> iface2 = specs.get(j).getDefiningInterface();

                if (iface1.isAssignableFrom(iface2)) {
                    optionsMap.remove(entry.getKey(), specs.get(i));
                    specs.remove(i);

                    // Removed element at current "i" index. Set iterators to re-evaluate
                    // new "i" element in outer loop.
                    i--;
                    j = specs.size();
                } else if (iface2.isAssignableFrom(iface1)) {
                    optionsMap.remove(entry.getKey(), specs.get(j));
                    specs.remove(j);

                    // Removed element at current "j" index. Set iterator to re-evaluate
                    // new "j" element in inner-loop.
                    j--;
                }
            }
        }
    }

    return optionsMap;
}

From source file:accumulo.balancer.GroupBalancer.java

private void balanceExtraMultiple(Map<TServerInstance, TserverGroupInfo> tservers, int maxExtraGroups,
        Moves moves, Multimap<String, TserverGroupInfo> extraMultiple, boolean alwaysAdd) {

    ArrayList<Pair<String, TserverGroupInfo>> serversToRemove = new ArrayList<>();
    for (TserverGroupInfo destTgi : tservers.values()) {
        Map<String, Integer> extras = destTgi.getExtras();
        if (alwaysAdd || extras.size() < maxExtraGroups) {
            serversToRemove.clear();/* www. ja  v a 2s  . c o  m*/
            for (String group : extraMultiple.keySet()) {
                if (!extras.containsKey(group)) {
                    Collection<TserverGroupInfo> sources = extraMultiple.get(group);
                    Iterator<TserverGroupInfo> iter = sources.iterator();
                    TserverGroupInfo srcTgi = iter.next();

                    int num = srcTgi.getExtras().get(group);

                    moves.move(group, 1, srcTgi, destTgi);

                    if (num == 2) {
                        serversToRemove.add(new Pair<String, TserverGroupInfo>(group, srcTgi));
                    }

                    if (destTgi.getExtras().size() >= maxExtraGroups || moves.size() >= getMaxMigrations()) {
                        break;
                    }
                }
            }

            for (Pair<String, TserverGroupInfo> pair : serversToRemove) {
                extraMultiple.remove(pair.getFirst(), pair.getSecond());
            }

            if (extraMultiple.size() == 0 || moves.size() >= getMaxMigrations()) {
                break;
            }
        }
    }
}

From source file:it.sayservice.platform.smartplanner.cache.CacheManager.java

private Map<CacheEntryStatus, Collection<String>> buildCache(String router, String agencyId, boolean tripsIds,
        boolean csv) throws Exception {
    List<String> updated = new ArrayList<String>();
    List<String> written = new ArrayList<String>();

    Multimap<CacheEntryStatus, String> result = ArrayListMultimap.create();

    ObjectMapper mapper = new ObjectMapper();
    mapper.configure(Feature.FAIL_ON_UNKNOWN_PROPERTIES, false);

    String d0 = System.getenv("OTP_HOME") + System.getProperty("file.separator") + router
            + System.getProperty("file.separator") + Constants.CACHE_DIR + System.getProperty("file.separator")
            + destinationDirectory;//from   w w w  .j a  v  a 2s.  c o m
    String d = d0 + System.getProperty("file.separator") + agencyId;
    String d2 = System.getenv("OTP_HOME") + System.getProperty("file.separator") + router
            + System.getProperty("file.separator") + Constants.CACHE_DIR + System.getProperty("file.separator")
            + "csv" + System.getProperty("file.separator") + agencyId;

    File dir = new File(d);
    if (!dir.exists()) {
        dir.mkdir();
    }
    if (csv) {
        File dir2 = new File(d2);
        if (!dir2.exists()) {
            dir2.mkdir();
        }
    }

    Map<String, WeekdayFilter> weekdayFilter = handler.readAgencyWeekDay(router, agencyId);
    Map<String, WeekdayException> weekdayException = handler.readAgencyWeekDayExceptions(router, agencyId);

    Multimap<String, String> daysMap = ArrayListMultimap.create();
    DateFormat df = new SimpleDateFormat("yyyyMMdd");

    for (String eq : weekdayFilter.keySet()) {
        WeekdayFilter filter = weekdayFilter.get(eq);
        String from = filter.getFromDate();
        String to = filter.getToDate();

        Calendar fromDate = new GregorianCalendar();
        Calendar toDate = new GregorianCalendar();

        fromDate.setTime(df.parse(from));
        toDate.setTime(df.parse(to));
        Calendar date = new GregorianCalendar();
        date.setTime(fromDate.getTime());
        String prevDay = null;
        while (df.format(date.getTime()).compareTo(to) <= 0) {
            String day = df.format(date.getTime());

            boolean sameDay = day.equals(prevDay);

            if (!sameDay) {
                int dotw = convertDayOfTheWeek(date.get(Calendar.DAY_OF_WEEK));
                if (filter.getDays()[dotw]) {
                    daysMap.put(day, eq);
                }
            }
            prevDay = day;
            date.setTime(new Date(date.getTime().getTime() + (RecurrentUtil.DAY)));
        }

    }

    for (String key : weekdayException.keySet()) {
        WeekdayException ex = weekdayException.get(key);
        for (String toAdd : ex.getAdded()) {
            daysMap.put(toAdd, key);
        }
        for (String toRemove : ex.getRemoved()) {
            daysMap.remove(toRemove, key);
        }
    }

    List<Route> allRoutes = handler.getRoutes(router);

    for (Route route : allRoutes) {

        String id = null;
        if (route.getId().getAgency().equals(agencyId)) {
            id = route.getId().getId();
        } else {
            continue;
        }

        CalendarBuildResult buildResult = buildCalendarFile(router, agencyId, id, daysMap, written, result, d);

        for (String key : buildResult.getCalendar().keySet()) {

            if (buildResult.getEmpty().contains(key)) {
                continue;
            }

            List<String> days = (List<String>) buildResult.getCalendar().get(key);
            Collections.sort(days);
            String randomDay = (String) days.get(0);
            Calendar randomDate = new GregorianCalendar();
            randomDate.setTime(df.parse(randomDay));
            long from = randomDate.getTimeInMillis();
            long to = from + RecurrentUtil.DAY - 1000 * 60;

            String res2 = manager.getTransitSchedule(router, agencyId, id, from, to,
                    TransitScheduleResults.TIMES, tripsIds);

            TransitTimeTable ttt = mapper.readValue(res2, TransitTimeTable.class);
            CompressedTransitTimeTable cttt = new CompressedTransitTimeTable(ttt);

            List<String> routesIds = Lists.newArrayList();
            for (String tid : cttt.getTripIds()) {
                routesIds.add(route.getId().getId());
            }
            cttt.setRoutesIds(routesIds);

            String res3 = mapper.writeValueAsString(cttt);

            String fn = d + "/" + id + "_" + key + ".js";
            boolean toWrite = ttt.getTimes().get(0).size() != 0;
            boolean toIndex = !compare(fn, toWrite ? res3 : "");
            FileWriter fw = new FileWriter(fn);
            if (toWrite) {
                fw.write(res3);

                if (csv) {
                    String fn2 = d2 + "/" + id + "_" + key + ".csv";
                    FileWriter fw2 = new FileWriter(fn2);
                    fw2.write(ttt.toCSV());
                    fw2.close();
                }

                result.put(CacheEntryStatus.WRITTEN, id + "_" + key);
                if (toIndex) {
                    result.put(CacheEntryStatus.ADDED, id + "_" + key);
                }
            }
            written.add(id + "_" + key + ".js");
            fw.close();
        }

    }

    File[] old = new File(d).listFiles();
    for (File f : old) {
        if (!written.contains(f.getName())) {
            result.put(CacheEntryStatus.REMOVED, f.getName().replace(".js", ""));
            break;
        }
    }

    return result.asMap();
}

From source file:it.sayservice.platform.smartplanner.cache.CacheManager.java

private Map<CacheEntryStatus, Collection<String>> oldBuildCache(String router, String agencyId,
        boolean tripsIds, boolean csv) throws Exception {
    List<String> updated = new ArrayList<String>();
    List<String> written = new ArrayList<String>();

    Multimap<CacheEntryStatus, String> result = ArrayListMultimap.create();

    ObjectMapper mapper = new ObjectMapper();

    String d = System.getenv("OTP_HOME") + System.getProperty("file.separator") + Constants.CACHE_DIR
            + System.getProperty("file.separator") + destinationDirectory + System.getProperty("file.separator")
            + agencyId;//w w  w.ja v a2  s . c o  m
    String d2 = System.getenv("OTP_HOME") + System.getProperty("file.separator") + Constants.CACHE_DIR
            + System.getProperty("file.separator") + "csv" + System.getProperty("file.separator") + agencyId;

    File dir = new File(d);
    if (!dir.exists()) {
        dir.mkdir();
    }
    if (csv) {
        File dir2 = new File(d2);
        if (!dir2.exists()) {
            dir2.mkdir();
        }
    }

    Map<String, WeekdayFilter> weekdayFilter = handler.readAgencyWeekDay(router, agencyId);
    Map<String, WeekdayException> weekdayException = handler.readAgencyWeekDayExceptions(router, agencyId);

    Multimap<String, String> daysMap = ArrayListMultimap.create();
    DateFormat df = new SimpleDateFormat("yyyyMMdd");

    for (String eq : weekdayFilter.keySet()) {
        WeekdayFilter filter = weekdayFilter.get(eq);
        String from = filter.getFromDate();
        String to = filter.getToDate();

        Calendar fromDate = new GregorianCalendar();
        Calendar toDate = new GregorianCalendar();

        fromDate.setTime(df.parse(from));
        toDate.setTime(df.parse(to));
        Calendar date = new GregorianCalendar();
        date.setTime(fromDate.getTime());
        String prevDay = null;
        while (df.format(date.getTime()).compareTo(to) <= 0) {
            String day = df.format(date.getTime());

            boolean sameDay = day.equals(prevDay);

            if (!sameDay) {
                int dotw = convertDayOfTheWeek(date.get(Calendar.DAY_OF_WEEK));
                if (filter.getDays()[dotw]) {
                    daysMap.put(day, eq);
                }
            }
            prevDay = day;
            date.setTime(new Date(date.getTime().getTime() + (RecurrentUtil.DAY)));
        }

    }

    for (String key : weekdayException.keySet()) {
        WeekdayException ex = weekdayException.get(key);
        for (String toAdd : ex.getAdded()) {
            daysMap.put(toAdd, key);
        }
        for (String toRemove : ex.getRemoved()) {
            daysMap.remove(toRemove, key);
        }
    }

    Multimap<String, String> reversedDaysMap = ArrayListMultimap.create();
    for (String day : daysMap.keySet()) {
        String dayKey = getEqString(daysMap.get(day).toString(), agencyId);
        reversedDaysMap.put(dayKey, day);
    }

    SortedSet<String> calendarData = new TreeSet<String>();
    for (String key : reversedDaysMap.keySet()) {
        for (String day : reversedDaysMap.get(key)) {
            calendarData.add("\"" + day + "\":\"" + key + "\"");
        }
    }

    StringBuilder sb = new StringBuilder();
    sb.append("{\n");
    for (String line : calendarData) {
        sb.append(line + ",\n");
    }
    sb.replace(sb.length() - 2, sb.length() - 1, "");
    sb.append("};");
    String c = sb.toString();

    String fn = d + "/calendar.js";
    boolean toIndex = !compare(fn, c);
    written.add("calendar.js");
    result.put(CacheEntryStatus.WRITTEN, "calendar");
    if (toIndex) {
        result.put(CacheEntryStatus.ADDED, "calendar");
    }

    FileWriter fw = new FileWriter(fn);
    fw.write(c);
    fw.close();

    for (String key : reversedDaysMap.keySet()) {
        String randomDay = (String) ((List) reversedDaysMap.get(key)).get(0);

        Calendar randomDate = new GregorianCalendar();

        randomDate.setTime(df.parse(randomDay));

        long from = randomDate.getTimeInMillis();
        long to = from + RecurrentUtil.DAY - 1000 * 60;

        List<Route> allRoutes = handler.getRoutes(router);

        for (Route route : allRoutes) {

            String id = null;
            if (route.getId().getAgency().equals(agencyId)) {
                id = route.getId().getId();
            } else {
                continue;
            }
            String res2 = manager.getTransitSchedule(router, agencyId, id, from, to,
                    TransitScheduleResults.TIMES, tripsIds);

            TransitTimeTable ttt = mapper.readValue(res2, TransitTimeTable.class);
            CompressedTransitTimeTable cttt = new CompressedTransitTimeTable(ttt);

            String res3 = mapper.writeValueAsString(cttt);

            fn = d + "/" + id + "_" + key + ".js";
            boolean toWrite = ttt.getTimes().get(0).size() != 0;
            toIndex = !compare(fn, toWrite ? res3 : "");
            fw = new FileWriter(fn);
            if (toWrite) {
                fw.write(res3);
                result.put(CacheEntryStatus.WRITTEN, id + "_" + key);
                if (toIndex) {
                    result.put(CacheEntryStatus.ADDED, id + "_" + key);
                }
            }
            written.add(id + "_" + key + ".js");
            fw.close();
        }

    }

    File[] old = new File(d).listFiles();
    for (File f : old) {
        if (!written.contains(f.getName())) {
            result.put(CacheEntryStatus.REMOVED, f.getName().replace(".js", ""));
            break;
        }
    }

    return result.asMap();
}

From source file:com.cloudant.sync.datastore.BasicDatastore.java

/**
 * Removes revisions present in the datastore from the input map.
 *
 * @param revisions an multimap from document id to set of revisions. The
 *                  map is modified in place for performance consideration.
 *//*from  w ww .j a v a  2 s .  c o  m*/
void revsDiffBatch(Multimap<String, String> revisions) {

    final String sql = String.format(
            "SELECT docs.docid, revs.revid FROM docs, revs "
                    + "WHERE docs.doc_id = revs.doc_id AND docs.docid IN (%s) AND revs.revid IN (%s) "
                    + "ORDER BY docs.docid",
            SQLDatabaseUtils.makePlaceholders(revisions.keySet().size()),
            SQLDatabaseUtils.makePlaceholders(revisions.size()));

    String[] args = new String[revisions.keySet().size() + revisions.size()];
    String[] keys = revisions.keySet().toArray(new String[revisions.keySet().size()]);
    String[] values = revisions.values().toArray(new String[revisions.size()]);
    System.arraycopy(keys, 0, args, 0, revisions.keySet().size());
    System.arraycopy(values, 0, args, revisions.keySet().size(), revisions.size());

    Cursor cursor = null;
    try {
        cursor = this.sqlDb.rawQuery(sql, args);
        while (cursor.moveToNext()) {
            String docId = cursor.getString(0);
            String revId = cursor.getString(1);
            revisions.remove(docId, revId);
        }
    } catch (SQLException e) {
        e.printStackTrace();
    } finally {
        DatabaseUtils.closeCursorQuietly(cursor);
    }
}

From source file:org.sosy_lab.cpachecker.util.predicates.AssignmentToPathAllocator.java

/**
 * We need the variableEnvoirment and functionEnvoirment for their SSAIndeces.
 *///from  ww  w.  j av  a2 s. c  om
private void createAssignments(Model pModel, Collection<AssignableTerm> terms, Set<Assignment> termSet,
        Map<String, Assignment> variableEnvoirment, Map<LeftHandSide, Object> pVariables,
        Multimap<String, Assignment> functionEnvoirment, Map<String, Map<Address, Object>> memory) {

    for (AssignableTerm term : terms) {

        Assignment assignment = new Assignment(term, pModel.get(term));

        if (term instanceof Variable) {

            Variable variable = (Variable) term;
            String name = variable.getName();

            if (variableEnvoirment.containsKey(name)) {
                Variable oldVariable = (Variable) variableEnvoirment.get(name).getTerm();
                int oldIndex = oldVariable.getSSAIndex();
                int newIndex = variable.getSSAIndex();
                if (oldIndex < newIndex) {

                    //update variableEnvoirment for subsequent calculation
                    variableEnvoirment.remove(name);
                    variableEnvoirment.put(name, assignment);

                    LeftHandSide oldlhs = createLeftHandSide(oldVariable);
                    LeftHandSide lhs = createLeftHandSide(variable);
                    pVariables.remove(oldlhs);
                    pVariables.put(lhs, assignment.getValue());
                }
            } else {
                //update variableEnvoirment for subsequent calculation
                variableEnvoirment.put(name, assignment);

                LeftHandSide lhs = createLeftHandSide(variable);
                pVariables.put(lhs, assignment.getValue());
            }

        } else if (term instanceof Function) {

            Function function = (Function) term;
            String name = getName(function);

            if (functionEnvoirment.containsKey(name)) {

                boolean replaced = false;

                Set<Assignment> assignments = new HashSet<>(functionEnvoirment.get(name));

                for (Assignment oldAssignment : assignments) {
                    Function oldFunction = (Function) oldAssignment.getTerm();

                    if (isLessSSA(oldFunction, function)) {

                        //update functionEnvoirment for subsequent calculation
                        functionEnvoirment.remove(name, oldAssignment);
                        functionEnvoirment.put(name, assignment);
                        replaced = true;
                        removeHeapValue(memory, assignment);
                        addHeapValue(memory, assignment);

                    }
                }

                if (!replaced) {
                    functionEnvoirment.put(name, assignment);
                    addHeapValue(memory, assignment);
                }
            } else {
                functionEnvoirment.put(name, assignment);
                addHeapValue(memory, assignment);
            }
        }
        termSet.add(assignment);
    }
}

From source file:org.eclipse.xtext.xbase.typesystem.override.ResolvedFeatures.java

/**
 * When the inherited operations are computed for Java 8, we have to check for conflicting default interface method implementations.
 *///from   w  w  w. j  a  v  a 2  s .c o  m
private boolean handleOverridesAndConflicts(JvmOperation operation,
        Multimap<String, AbstractResolvedOperation> processedOperations) {
    String simpleName = operation.getSimpleName();
    if (!processedOperations.containsKey(simpleName)) {
        return true;
    }
    List<AbstractResolvedOperation> conflictingOperations = null;
    for (AbstractResolvedOperation candidate : processedOperations.get(simpleName)) {
        OverrideTester overrideTester = candidate.getOverrideTester();
        IOverrideCheckResult checkResult = overrideTester.isSubsignature(candidate, operation, false);
        if (checkResult.getDetails().contains(OverrideCheckDetails.DEFAULT_IMPL_CONFLICT)) {
            // The current operation conflicts with the candidate
            if (conflictingOperations == null)
                conflictingOperations = Lists.newLinkedList();
            conflictingOperations.add(candidate);
        } else if (checkResult.isOverridingOrImplementing()) {
            return false;
        }
    }
    if (conflictingOperations != null) {
        if (conflictingOperations.size() == 1
                && conflictingOperations.get(0) instanceof ConflictingDefaultOperation) {
            // The current operation contributes to the already existing conflict
            ConflictingDefaultOperation conflictingDefaultOperation = (ConflictingDefaultOperation) conflictingOperations
                    .get(0);
            boolean isOverridden = false;
            for (IResolvedOperation conflictingOp : conflictingDefaultOperation.getConflictingOperations()) {
                if (conflictingOp.getResolvedDeclarator().isSubtypeOf(operation.getDeclaringType())) {
                    isOverridden = true;
                    break;
                }
            }
            if (!isOverridden)
                conflictingDefaultOperation.getConflictingOperations().add(createResolvedOperation(operation));
            return false;
        }
        // A new conflict of default implementations was found
        if (operation.isAbstract()) {
            ConflictingDefaultOperation resolvedOperation = createConflictingOperation(
                    conflictingOperations.get(0).getDeclaration());
            resolvedOperation.getConflictingOperations().add(createResolvedOperation(operation));
            for (AbstractResolvedOperation conflictingOp : conflictingOperations) {
                processedOperations.remove(simpleName, conflictingOp);
                if (conflictingOp.getDeclaration() != resolvedOperation.getDeclaration()) {
                    resolvedOperation.getConflictingOperations().add(conflictingOp);
                }
            }
            processedOperations.put(simpleName, resolvedOperation);
        } else {
            ConflictingDefaultOperation resolvedOperation = createConflictingOperation(operation);
            for (AbstractResolvedOperation conflictingOp : conflictingOperations) {
                processedOperations.remove(simpleName, conflictingOp);
                resolvedOperation.getConflictingOperations().add(conflictingOp);
            }
            processedOperations.put(simpleName, resolvedOperation);
        }
        return false;
    }
    return true;
}