Example usage for com.google.common.collect Sets difference

List of usage examples for com.google.common.collect Sets difference

Introduction

In this page you can find the example usage for com.google.common.collect Sets difference.

Prototype

public static <E> SetView<E> difference(final Set<E> set1, final Set<?> set2) 

Source Link

Document

Returns an unmodifiable view of the difference of two sets.

Usage

From source file:org.kamanja.pmml.testtool.PmmlTestTool.java

@Override
public void execute() throws Exception {
    MetricRegistry metricRegistry = new MetricRegistry();

    ConsoleReporter reporter = ConsoleReporter.forRegistry(metricRegistry).convertRatesTo(TimeUnit.SECONDS)
            .convertDurationsTo(TimeUnit.MILLISECONDS).build();

    CsvUtil.Table inputTable = readTable(this._dataset, this.separator);

    Function<String, String> parseFunction = new Function<String, String>() {

        @Override//  w w  w.j  a  v a 2 s.c om
        public String apply(String string) {

            if (("").equals(string) || ("N/A").equals(string) || ("NA").equals(string)) {
                return null;
            }

            // Remove leading and trailing quotation marks
            string = stripQuotes(string, '\"');
            string = stripQuotes(string, '\"');

            // Standardize European-style decimal marks (',') to US-style decimal marks ('.')
            if (string.indexOf(',') > -1) {
                String usString = string.replace(',', '.');

                try {
                    Double.parseDouble(usString);

                    string = usString;
                } catch (NumberFormatException nfe) {
                    // Ignored
                }
            }

            return string;
        }

        private String stripQuotes(String string, char quoteChar) {

            if (string.length() > 1
                    && ((string.charAt(0) == quoteChar) && (string.charAt(string.length() - 1) == quoteChar))) {
                return string.substring(1, string.length() - 1);
            }

            return string;
        }
    };

    List<? extends Map<FieldName, ?>> inputRecords = BatchUtil.parseRecords(inputTable, parseFunction);

    PMML pmml = readPMML(this._pmmlSrc);
    // 1320, 1313 Changes begin
    if (pmml.getHeader().getApplication().getName().contains("SAS")) {
        Visitor visitor = new org.jpmml.sas.visitors.ExpressionCorrector();
        visitor.applyTo(pmml);
    }

    // 1320, 1313 Changes end

    ModelEvaluatorFactory modelEvaluatorFactory = ModelEvaluatorFactory.newInstance();

    Evaluator evaluator = modelEvaluatorFactory.newModelManager(pmml);

    // Perform self-testing
    evaluator.verify();

    List<FieldName> activeFields = evaluator.getActiveFields();
    List<FieldName> groupFields = evaluator.getGroupFields();

    if (inputRecords.size() > 0) {
        Map<FieldName, ?> inputRecord = inputRecords.get(0);

        Sets.SetView<FieldName> missingActiveFields = Sets.difference(new LinkedHashSet<>(activeFields),
                inputRecord.keySet());
        if (missingActiveFields.size() > 0) {
            throw new IllegalArgumentException("Missing active field(s): " + missingActiveFields.toString());
        }

        Sets.SetView<FieldName> missingGroupFields = Sets.difference(new LinkedHashSet<>(groupFields),
                inputRecord.keySet());
        if (missingGroupFields.size() > 0) {
            throw new IllegalArgumentException("Missing group field(s): " + missingGroupFields.toString());
        }
    }

    if (groupFields.size() == 1) {
        FieldName groupField = groupFields.get(0);

        inputRecords = org.jpmml.evaluator.EvaluatorUtil.groupRows(groupField, inputRecords);
    } else if (groupFields.size() > 1) {
        throw new EvaluationException();
    }

    List<Map<FieldName, ?>> outputRecords = new ArrayList<>();

    Timer timer = new Timer(new SlidingWindowReservoir(this._loop));

    metricRegistry.register("main", timer);

    int epoch = 0;

    do {
        Timer.Context context = timer.time();

        try {
            for (Map<FieldName, ?> inputRecord : inputRecords) {
                Map<FieldName, FieldValue> arguments = new LinkedHashMap<>();

                for (FieldName activeField : activeFields) {
                    FieldValue activeValue = org.jpmml.evaluator.EvaluatorUtil.prepare(evaluator, activeField,
                            inputRecord.get(activeField));

                    arguments.put(activeField, activeValue);
                }

                Map<FieldName, ?> result = evaluator.evaluate(arguments);

                outputRecords.add(result);
            }
        } finally {
            context.close();
        }

        epoch++;
    } while (epoch < this._loop);

    List<FieldName> targetFields = evaluator.getTargetFields();
    List<FieldName> outputFields = evaluator.getOutputFields();

    Function<Object, String> formatFunction = new Function<Object, String>() {

        @Override
        public String apply(Object object) {
            object = org.jpmml.evaluator.EvaluatorUtil.decode(object);

            if (object == null) {
                return "N/A";
            }

            return object.toString();
        }
    };

    CsvUtil.Table outputTable = new CsvUtil.Table();
    outputTable.setSeparator(inputTable.getSeparator());
    outputTable.addAll(BatchUtil.formatRecords(outputRecords,
            Lists.newArrayList(Iterables.concat(targetFields, outputFields)), formatFunction));

    if (!_omitInputs) {
        if (inputTable.size() == outputTable.size()) {
            /** insert the inputs in front of any target and output fields */
            for (int i = 0; i < inputTable.size(); i++) {
                List<String> inputRow = inputTable.get(i);
                List<String> outputRow = outputTable.get(i);

                outputRow.addAll(0, inputRow);
            }
        }
    }

    writeTable(outputTable, this.outputPath);

    if (this._loop > 1) {
        reporter.report();
    }

    reporter.close();
}

From source file:co.cask.hydrator.plugin.source.KafkaStreamingSource.java

@Override
public JavaDStream<StructuredRecord> getStream(StreamingContext context) throws Exception {
    context.registerLineage(conf.referenceName);

    Map<String, String> kafkaParams = new HashMap<>();
    kafkaParams.put("metadata.broker.list", conf.getBrokers());

    List<SimpleConsumer> consumers = new ArrayList<>();
    for (Map.Entry<String, Integer> brokerEntry : conf.getBrokerMap().entrySet()) {
        consumers.add(new SimpleConsumer(brokerEntry.getKey(), brokerEntry.getValue(), 20 * 1000, 128 * 1024,
                "partitionLookup"));
    }/*  ww w  .  j a va  2  s. c om*/

    try {
        Map<TopicAndPartition, Long> offsets = conf.getInitialPartitionOffsets(getPartitions(consumers));
        // KafkaUtils doesn't understand -1 and -2 as smallest offset and latest offset.
        // so we have to replace them with the actual smallest and latest
        Map<TopicAndPartition, PartitionOffsetRequestInfo> offsetsToRequest = new HashMap<>();
        for (Map.Entry<TopicAndPartition, Long> entry : offsets.entrySet()) {
            TopicAndPartition topicAndPartition = entry.getKey();
            Long offset = entry.getValue();
            if (offset == OffsetRequest.EarliestTime() || offset == OffsetRequest.LatestTime()) {
                offsetsToRequest.put(topicAndPartition, new PartitionOffsetRequestInfo(offset, 1));
            }
        }

        kafka.javaapi.OffsetRequest offsetRequest = new kafka.javaapi.OffsetRequest(offsetsToRequest,
                OffsetRequest.CurrentVersion(), "offsetLookup");
        Set<TopicAndPartition> offsetsFound = new HashSet<>();
        for (SimpleConsumer consumer : consumers) {
            OffsetResponse response = consumer.getOffsetsBefore(offsetRequest);
            for (TopicAndPartition topicAndPartition : offsetsToRequest.keySet()) {
                String topic = topicAndPartition.topic();
                int partition = topicAndPartition.partition();
                if (response.errorCode(topic, partition) == 0) {
                    offsets.put(topicAndPartition, response.offsets(topic, partition)[0]);
                    offsetsFound.add(topicAndPartition);
                }
            }
        }

        Set<TopicAndPartition> missingOffsets = Sets.difference(offsetsToRequest.keySet(), offsetsFound);
        if (!missingOffsets.isEmpty()) {
            throw new IllegalStateException(String.format(
                    "Could not find offsets for %s. Please check all brokers were included in the broker list.",
                    missingOffsets));
        }
        LOG.info("Using initial offsets {}", offsets);

        return KafkaUtils.createDirectStream(context.getSparkStreamingContext(), byte[].class, byte[].class,
                DefaultDecoder.class, DefaultDecoder.class, MessageAndMetadata.class, kafkaParams, offsets,
                new Function<MessageAndMetadata<byte[], byte[]>, MessageAndMetadata>() {
                    @Override
                    public MessageAndMetadata call(MessageAndMetadata<byte[], byte[]> in) throws Exception {
                        return in;
                    }
                }).transform(new RecordTransform(conf));
    } finally {
        for (SimpleConsumer consumer : consumers) {
            try {
                consumer.close();
            } catch (Exception e) {
                LOG.warn("Error closing Kafka consumer {}.", e);
            }
        }
    }
}

From source file:com.google.devtools.build.lib.runtime.BuildEventStreamer.java

/**
 * Clear all events that are still announced; events not naturally closed by the expected event
 * normally only occur if the build is aborted.
 *///from w w  w  . j a v  a  2 s  . co  m
private void clearAnnouncedEvents() {
    if (announcedEvents != null) {
        // create a copy of the identifiers to clear, as the post method
        // will change the set of already announced events.
        Set<BuildEventId> ids;
        synchronized (this) {
            ids = Sets.difference(announcedEvents, postedEvents);
        }
        for (BuildEventId id : ids) {
            post(new AbortedEvent(id, abortReason, ""));
        }
    }
}

From source file:org.apache.cassandra.cql.CreateColumnFamilyStatement.java

/** Perform validation of parsed params */
private void validate() throws InvalidRequestException {
    // Column family name
    if (!name.matches("\\w+"))
        throw new InvalidRequestException(String.format("\"%s\" is not a valid column family name", name));

    // Catch the case where someone passed a kwarg that is not recognized.
    for (String bogus : Sets.difference(properties.keySet(), Sets.union(keywords, obsoleteKeywords)))
        throw new InvalidRequestException(bogus + " is not a valid keyword argument for CREATE COLUMNFAMILY");
    for (String obsolete : Sets.intersection(properties.keySet(), obsoleteKeywords))
        logger.warn("Ignoring obsolete property {}", obsolete);

    // Validate min/max compaction thresholds
    Integer minCompaction = getPropertyInt(KW_MINCOMPACTIONTHRESHOLD, null);
    Integer maxCompaction = getPropertyInt(KW_MAXCOMPACTIONTHRESHOLD, null);

    if ((minCompaction != null) && (maxCompaction != null)) // Both min and max are set
    {/*ww  w. j  av  a 2 s  . c  o  m*/
        if ((minCompaction > maxCompaction) && (maxCompaction != 0))
            throw new InvalidRequestException(String.format("%s cannot be larger than %s",
                    KW_MINCOMPACTIONTHRESHOLD, KW_MAXCOMPACTIONTHRESHOLD));
    } else if (minCompaction != null) // Only the min threshold is set
    {
        if (minCompaction > CFMetaData.DEFAULT_MAX_COMPACTION_THRESHOLD)
            throw new InvalidRequestException(
                    String.format("%s cannot be larger than %s, (default %s)", KW_MINCOMPACTIONTHRESHOLD,
                            KW_MAXCOMPACTIONTHRESHOLD, CFMetaData.DEFAULT_MAX_COMPACTION_THRESHOLD));
    } else if (maxCompaction != null) // Only the max threshold is set
    {
        if ((maxCompaction < CFMetaData.DEFAULT_MIN_COMPACTION_THRESHOLD) && (maxCompaction != 0))
            throw new InvalidRequestException(
                    String.format("%s cannot be smaller than %s, (default %s)", KW_MAXCOMPACTIONTHRESHOLD,
                            KW_MINCOMPACTIONTHRESHOLD, CFMetaData.DEFAULT_MIN_COMPACTION_THRESHOLD));
    }

    // Ensure that exactly one key has been specified.
    if (keyValidator.size() < 1)
        throw new InvalidRequestException("You must specify a PRIMARY KEY");
    else if (keyValidator.size() > 1)
        throw new InvalidRequestException("You may only specify one PRIMARY KEY");

    AbstractType<?> comparator;

    try {
        comparator = getComparator();
    } catch (ConfigurationException e) {
        throw new InvalidRequestException(e.toString());
    }

    for (Map.Entry<Term, String> column : columns.entrySet()) {
        ByteBuffer name = column.getKey().getByteBuffer(comparator);

        if (keyAlias != null && keyAlias.equals(name))
            throw new InvalidRequestException("Invalid column name: " + column.getKey().getText()
                    + ", because it equals to the key_alias.");

    }
}

From source file:org.apache.james.dlp.eventsourcing.aggregates.DLPDomainConfiguration.java

private Optional<Event> generateAddedRulesEvent(Set<DLPConfigurationItem> existingRules,
        Set<DLPConfigurationItem> updateRulesSet, EventId nextEventId) {
    Set<DLPConfigurationItem> addedRules = Sets.difference(updateRulesSet, existingRules);
    if (!addedRules.isEmpty()) {
        return Optional.of(new ConfigurationItemsAdded(aggregateId, nextEventId, addedRules));
    }/*from   www . j  a  v  a  2  s.c o m*/
    return Optional.empty();
}

From source file:mvm.rya.indexing.accumulo.entity.AccumuloDocIdIndexer.java

@Override
public CloseableIteration<BindingSet, QueryEvaluationException> queryDocIndex(StarQuery query,
        Collection<BindingSet> constraints) throws TableNotFoundException, QueryEvaluationException {

    final StarQuery starQ = query;
    final Iterator<BindingSet> bs = constraints.iterator();
    final Iterator<BindingSet> bs2 = constraints.iterator();
    final Set<String> unCommonVarNames;
    final Set<String> commonVarNames;
    if (bs2.hasNext()) {
        BindingSet currBs = bs2.next();/* www  .j a v  a2s .c o m*/
        commonVarNames = StarQuery.getCommonVars(query, currBs);
        unCommonVarNames = Sets.difference(currBs.getBindingNames(), commonVarNames);
    } else {
        commonVarNames = Sets.newHashSet();
        unCommonVarNames = Sets.newHashSet();
    }

    if (commonVarNames.size() == 1 && !query.commonVarConstant()
            && commonVarNames.contains(query.getCommonVarName())) {

        final HashMultimap<String, BindingSet> map = HashMultimap.create();
        final String commonVar = starQ.getCommonVarName();
        final Iterator<Entry<Key, Value>> intersections;
        final BatchScanner scan;
        Set<Range> ranges = Sets.newHashSet();

        while (bs.hasNext()) {

            BindingSet currentBs = bs.next();

            if (currentBs.getBinding(commonVar) == null) {
                continue;
            }

            String row = currentBs.getBinding(commonVar).getValue().stringValue();
            ranges.add(new Range(row));
            map.put(row, currentBs);

        }
        scan = runQuery(starQ, ranges);
        intersections = scan.iterator();

        return new CloseableIteration<BindingSet, QueryEvaluationException>() {

            private QueryBindingSet currentSolutionBs = null;
            private boolean hasNextCalled = false;
            private boolean isEmpty = false;
            private Iterator<BindingSet> inputSet = (new ArrayList<BindingSet>()).iterator();
            private BindingSet currentBs;
            private Key key;

            @Override
            public boolean hasNext() throws QueryEvaluationException {
                if (!hasNextCalled && !isEmpty) {
                    while (inputSet.hasNext() || intersections.hasNext()) {
                        if (!inputSet.hasNext()) {
                            key = intersections.next().getKey();
                            inputSet = map.get(key.getRow().toString()).iterator();
                        }
                        currentBs = inputSet.next();
                        currentSolutionBs = deserializeKey(key, starQ, currentBs, unCommonVarNames);

                        if (currentSolutionBs.size() == unCommonVarNames.size() + starQ.getUnCommonVars().size()
                                + 1) {
                            hasNextCalled = true;
                            return true;
                        }

                    }

                    isEmpty = true;
                    return false;

                } else if (isEmpty) {
                    return false;
                } else {
                    return true;
                }

            }

            @Override
            public BindingSet next() throws QueryEvaluationException {

                if (hasNextCalled) {
                    hasNextCalled = false;
                } else if (isEmpty) {
                    throw new NoSuchElementException();
                } else {
                    if (this.hasNext()) {
                        hasNextCalled = false;
                    } else {
                        throw new NoSuchElementException();
                    }
                }

                return currentSolutionBs;
            }

            @Override
            public void remove() throws QueryEvaluationException {
                throw new UnsupportedOperationException();
            }

            @Override
            public void close() throws QueryEvaluationException {
                scan.close();
            }

        };

    } else {

        return new CloseableIteration<BindingSet, QueryEvaluationException>() {

            @Override
            public void remove() throws QueryEvaluationException {
                throw new UnsupportedOperationException();
            }

            private Iterator<Entry<Key, Value>> intersections = null;
            private QueryBindingSet currentSolutionBs = null;
            private boolean hasNextCalled = false;
            private boolean isEmpty = false;
            private boolean init = false;
            private BindingSet currentBs;
            private StarQuery sq = new StarQuery(starQ);
            private Set<Range> emptyRangeSet = Sets.newHashSet();
            private BatchScanner scan;

            @Override
            public BindingSet next() throws QueryEvaluationException {
                if (hasNextCalled) {
                    hasNextCalled = false;
                } else if (isEmpty) {
                    throw new NoSuchElementException();
                } else {
                    if (this.hasNext()) {
                        hasNextCalled = false;
                    } else {
                        throw new NoSuchElementException();
                    }
                }
                return currentSolutionBs;
            }

            @Override
            public boolean hasNext() throws QueryEvaluationException {

                if (!init) {
                    if (intersections == null && bs.hasNext()) {
                        currentBs = bs.next();
                        sq = StarQuery.getConstrainedStarQuery(sq, currentBs);
                        scan = runQuery(sq, emptyRangeSet);
                        intersections = scan.iterator();
                        // binding set empty
                    } else if (intersections == null && !bs.hasNext()) {
                        currentBs = new QueryBindingSet();
                        scan = runQuery(starQ, emptyRangeSet);
                        intersections = scan.iterator();
                    }

                    init = true;
                }

                if (!hasNextCalled && !isEmpty) {
                    while (intersections.hasNext() || bs.hasNext()) {
                        if (!intersections.hasNext()) {
                            scan.close();
                            currentBs = bs.next();
                            sq = StarQuery.getConstrainedStarQuery(sq, currentBs);
                            scan = runQuery(sq, emptyRangeSet);
                            intersections = scan.iterator();
                        }
                        if (intersections.hasNext()) {
                            currentSolutionBs = deserializeKey(intersections.next().getKey(), sq, currentBs,
                                    unCommonVarNames);
                        } else {
                            continue;
                        }

                        if (sq.commonVarConstant() && currentSolutionBs.size() == unCommonVarNames.size()
                                + sq.getUnCommonVars().size()) {
                            hasNextCalled = true;
                            return true;
                        } else if (currentSolutionBs.size() == unCommonVarNames.size()
                                + sq.getUnCommonVars().size() + 1) {
                            hasNextCalled = true;
                            return true;
                        }
                    }

                    isEmpty = true;
                    return false;

                } else if (isEmpty) {
                    return false;
                } else {
                    return true;
                }
            }

            @Override
            public void close() throws QueryEvaluationException {
                scan.close();
            }
        };
    }
}

From source file:com.google.devtools.build.lib.skyframe.GraphBackedRecursivePackageProvider.java

@Override
public Map<PackageIdentifier, Package> bulkGetPackages(EventHandler eventHandler,
        Iterable<PackageIdentifier> pkgIds) throws NoSuchPackageException, InterruptedException {
    Set<SkyKey> pkgKeys = ImmutableSet.copyOf(PackageValue.keys(pkgIds));

    ImmutableMap.Builder<PackageIdentifier, Package> pkgResults = ImmutableMap.builder();
    Map<SkyKey, SkyValue> packages = graph.getSuccessfulValues(pkgKeys);
    for (Map.Entry<SkyKey, SkyValue> pkgEntry : packages.entrySet()) {
        PackageIdentifier pkgId = (PackageIdentifier) pkgEntry.getKey().argument();
        PackageValue pkgValue = (PackageValue) pkgEntry.getValue();
        pkgResults.put(pkgId, Preconditions.checkNotNull(pkgValue.getPackage(), pkgId));
    }//from   w w  w .j  a v  a 2s.c o  m

    SetView<SkyKey> unknownKeys = Sets.difference(pkgKeys, packages.keySet());
    if (!Iterables.isEmpty(unknownKeys)) {
        LOGGER.warning("Unable to find " + unknownKeys + " in the batch lookup of " + pkgKeys
                + ". Successfully looked up " + packages.keySet());
    }
    for (Map.Entry<SkyKey, Exception> missingOrExceptionEntry : graph.getMissingAndExceptions(unknownKeys)
            .entrySet()) {
        PackageIdentifier pkgIdentifier = (PackageIdentifier) missingOrExceptionEntry.getKey().argument();
        Exception exception = missingOrExceptionEntry.getValue();
        if (exception == null) {
            // If the package key does not exist in the graph, then it must not correspond to any
            // package, because the SkyQuery environment has already loaded the universe.
            throw new BuildFileNotFoundException(pkgIdentifier, "Package not found");
        }
        Throwables.propagateIfInstanceOf(exception, NoSuchPackageException.class);
        Throwables.propagate(exception);
    }
    return pkgResults.build();
}

From source file:com.thinkbiganalytics.metadata.rest.model.nifi.NiFiFlowCacheSync.java

public Set<String> getStreamingFeedsUpdatedSinceLastSync(Set<String> streamingFeeds) {
    com.google.common.collect.Sets.SetView<String> diff = Sets.difference(streamingFeeds,
            snapshot.getAllStreamingFeeds());
    return diff.copyInto(new HashSet<>());
}

From source file:org.eclipse.sw360.portal.tags.DisplayReleaseChanges.java

private void renderReleaseIdToRelationship(StringBuilder display, User user) {

    if (ensureSomethingTodoAndNoNull(Release._Fields.RELEASE_ID_TO_RELATIONSHIP)) {

        Set<String> changedReleaseIds = Sets.intersection(additions.getReleaseIdToRelationship().keySet(),
                deletions.getReleaseIdToRelationship().keySet());
        Set<String> releaseIdsInDb = nullToEmptyMap(actual.getReleaseIdToRelationship()).keySet();
        //keep only releases that are still in the database
        changedReleaseIds = Sets.intersection(changedReleaseIds, releaseIdsInDb);

        Set<String> removedReleaseIds = Sets.difference(deletions.getReleaseIdToRelationship().keySet(),
                changedReleaseIds);/*from   w  ww.  j av  a2  s  .c  om*/
        removedReleaseIds = Sets.intersection(removedReleaseIds, releaseIdsInDb);

        Set<String> addedReleaseIds = Sets.difference(additions.getReleaseIdToRelationship().keySet(),
                changedReleaseIds);

        display.append("<h3> Changes in linked releases </h3>");
        LinkedReleaseRenderer renderer = new LinkedReleaseRenderer(display, tableClasses, idPrefix, user);
        renderer.renderReleaseLinkList(display, deletions.getReleaseIdToRelationship(), removedReleaseIds,
                "Removed Release Links");
        renderer.renderReleaseLinkList(display, additions.getReleaseIdToRelationship(), addedReleaseIds,
                "Added Release Links");
        renderer.renderReleaseLinkListCompare(display, actual.getReleaseIdToRelationship(),
                deletions.getReleaseIdToRelationship(), additions.getReleaseIdToRelationship(),
                changedReleaseIds);
    }
}

From source file:org.opendaylight.ovsdb.hwvtepsouthbound.reconciliation.configuration.GlobalConfigOperationalChangeGetter.java

static Set<String> getLogicalSwitchesToBeRemoved(Node configNode, Node opNode) {
    Set<String> opSwitchNames = new HashSet<>();
    Set<String> cfgSwitchNames = new HashSet<>();
    List<LogicalSwitches> cfgLogicalSwitches = Lists.newArrayList();

    List<LogicalSwitches> opLogicalSwitches = opNode.getAugmentation(HwvtepGlobalAugmentation.class)
            .getLogicalSwitches();/*from  w  ww .ja v  a 2s . c om*/
    if (configNode != null) {
        cfgLogicalSwitches = configNode.getAugmentation(HwvtepGlobalAugmentation.class).getLogicalSwitches();
    }
    if (opLogicalSwitches != null) {
        for (LogicalSwitches ls : opLogicalSwitches) {
            opSwitchNames.add(ls.getHwvtepNodeName().getValue());
        }
    }
    if (cfgLogicalSwitches != null) {
        for (LogicalSwitches ls : cfgLogicalSwitches) {
            cfgSwitchNames.add(ls.getHwvtepNodeName().getValue());
        }
    }
    final Set<String> removedSwitchNames = Sets.difference(opSwitchNames, cfgSwitchNames);
    return removedSwitchNames;
}