Example usage for com.google.common.collect MapDifference entriesOnlyOnRight

List of usage examples for com.google.common.collect MapDifference entriesOnlyOnRight

Introduction

In this page you can find the example usage for com.google.common.collect MapDifference entriesOnlyOnRight.

Prototype

Map<K, V> entriesOnlyOnRight();

Source Link

Document

Returns an unmodifiable map containing the entries from the right map whose keys are not present in the left map.

Usage

From source file:org.wso2.carbon.governance.comparator.wsdl.WSDLImportsComparator.java

protected void compareImports(Map<String, Vector<Import>> base, Map<String, Vector<Import>> changed,
        DefaultComparison comparison) {//from www. j  ava  2s .  c o  m
    DefaultComparison.DefaultSection section = null;
    MapDifference<String, Vector<Import>> mapDiff = Maps.difference(base, changed);

    //If both side imports are equal, return
    if (mapDiff.areEqual()) {
        return;
    }

    Map<String, Vector<Import>> additions = mapDiff.entriesOnlyOnRight();
    if (section == null && additions.size() > 0) {
        section = comparison.newSection();
    }
    processAdditions(comparison, section, additions);

    Map<String, Vector<Import>> removals = mapDiff.entriesOnlyOnLeft();
    if (section == null && removals.size() > 0) {
        section = comparison.newSection();
    }
    processRemovals(comparison, section, removals);

    Map<String, MapDifference.ValueDifference<Vector<Import>>> changes = mapDiff.entriesDiffering();
    if (section == null && changes.size() > 0) {
        section = comparison.newSection();
    }
    processChanges(comparison, section, changes);

    if (section != null) {
        comparison.addSection(ComparatorConstants.WSDL_IMPORTS, section);
    }

}

From source file:org.esco.grouperui.web.tag.renderer.EscoHtmlTableRenderer.java

/**
 * Allow to output a log at the end of the process. It will compare the
 * requested parameters and the obtained parameters.
 * /* w w  w. j a v a2  s  . c  o  m*/
 * @param theGroupDb
 *            the parameter source.
 */
private void verifyAndLogParameter(final ParameterGroup theGroupDb) {
    // The obtained parameters
    Map<String, Parameter> reqParameter = (Map<String, Parameter>) FacesContext.getCurrentInstance()
            .getExternalContext().getRequestMap().get(EscoHtmlTableRenderer.PARAMETER);

    // The requested parameters.
    Map<String, Parameter> groupParam = new HashMap<String, Parameter>();
    for (Parameter param : theGroupDb.getParameters()) {
        groupParam.put(param.getKey(), param);
    }

    if (reqParameter != null) {
        // The difference between the two map.
        MapDifference<String, Parameter> mapDiffs = Maps.difference(reqParameter, groupParam);

        this.logDifferences(mapDiffs.entriesOnlyOnLeft(), mapDiffs.entriesOnlyOnRight());
    }
}

From source file:org.opendaylight.sxp.route.core.RouteReactorImpl.java

/**
 * Process all newly added {@link RoutingDefinition} and create new interface and virtual ip-address for them,
 * adds them into into provided {@link List}
 *
 * @param routingDifference         contains configuration changes
 * @param outcomingRouteDefinitions where result will be stored
 *///from w  w w.  j a v a2  s  .  c o  m
@VisibleForTesting
void processAdded(final MapDifference<IpAddress, RoutingDefinition> routingDifference,
        final List<RoutingDefinition> outcomingRouteDefinitions) {
    routingDifference.entriesOnlyOnRight().forEach((vIface, routingDef) -> {
        final boolean readyToAdd;
        // clean old unexpected state if any
        final Routing existingRouting = routingServiceMap.get(vIface);
        if (existingRouting != null) {
            LOG.info("Found unexpected route -> closing it: {}", existingRouting);
            findSxpNodesOnVirtualIp(vIface).forEach(SxpNode::shutdown);
            final boolean removalSucceded = existingRouting.removeRouteForCurrentService();
            if (!removalSucceded) {
                LOG.warn("Route cannot be closed (cleaning before A): {}", existingRouting);
                RoutingDefinition oldDefinition = RouteUtil.extractRoutingDefinition(existingRouting);
                outcomingRouteDefinitions.add(RouteUtil.createOperationalRouteDefinition(oldDefinition, false,
                        "route can not be closed (by cleaning before add)"));
                findSxpNodesOnVirtualIp(vIface).forEach(SxpNode::start);
                readyToAdd = false;
            } else {
                readyToAdd = true;
            }
        } else {
            readyToAdd = true;
        }

        if (readyToAdd) {
            final Routing routeService = routingServiceFactory.instantiateRoutingService(routingDef);
            routingServiceMap.put(vIface, routeService);
            final boolean succeeded = routeService.addRouteForCurrentService();
            if (succeeded) {
                routeService.updateArpTableForCurrentService();
                findSxpNodesOnVirtualIp(vIface).forEach(SxpNode::start);
                outcomingRouteDefinitions
                        .add(RouteUtil.createOperationalRouteDefinition(routingDef, true, "added"));
            } else {
                LOG.warn("Route can not be created (by add): {}", routeService);
                outcomingRouteDefinitions.add(RouteUtil.createOperationalRouteDefinition(routingDef, false,
                        "route can not be created (by add)"));
            }
        }
    });
}

From source file:com.example.getstarted.util.DatastoreSessionFilter.java

@Override
public void doFilter(ServletRequest servletReq, ServletResponse servletResp, FilterChain chain)
        throws IOException, ServletException {
    HttpServletRequest req = (HttpServletRequest) servletReq;
    HttpServletResponse resp = (HttpServletResponse) servletResp;

    // Check if the session cookie is there, if not there, make a session cookie using a unique
    // identifier.
    String sessionId = getCookieValue(req, "bookshelfSessionId");
    if (sessionId.equals("")) {
        String sessionNum = new BigInteger(130, new SecureRandom()).toString(32);
        Cookie session = new Cookie("bookshelfSessionId", sessionNum);
        session.setPath("/");
        resp.addCookie(session);//from   w  w  w  . j av  a 2s  .com
    }

    Map<String, String> datastoreMap = loadSessionVariables(req); // session variables for request

    chain.doFilter(servletReq, servletResp); // Allow the servlet to process request and response

    HttpSession session = req.getSession(); // Create session map
    Map<String, String> sessionMap = new HashMap<>();
    Enumeration<String> attrNames = session.getAttributeNames();
    while (attrNames.hasMoreElements()) {
        String attrName = attrNames.nextElement();
        sessionMap.put(attrName, (String) session.getAttribute(attrName));
    }

    // Create a diff between the new session variables and the existing session variables
    // to minimize datastore access
    MapDifference<String, String> diff = Maps.difference(sessionMap, datastoreMap);
    Map<String, String> setMap = diff.entriesOnlyOnLeft();
    Map<String, String> deleteMap = diff.entriesOnlyOnRight();

    // Apply the diff
    setSessionVariables(sessionId, setMap);
    deleteSessionVariables(sessionId, FluentIterable.from(deleteMap.keySet()).toArray(String.class));
}

From source file:org.sonarsource.sonarlint.core.container.connected.update.check.PluginsUpdateChecker.java

public void checkForUpdates(DefaultStorageUpdateCheckResult result, List<SonarAnalyzer> pluginList) {
    PluginReferences serverPluginReferences = pluginReferenceDownloader.fetchPlugins(pluginList);
    PluginReferences storagePluginReferences = storageManager.readPluginReferencesFromStorage();
    Map<String, String> serverPluginHashes = serverPluginReferences.getReferenceList().stream()
            .collect(Collectors.toMap(PluginReference::getKey, PluginReference::getHash));
    Map<String, String> storagePluginHashes = storagePluginReferences.getReferenceList().stream()
            .collect(Collectors.toMap(PluginReference::getKey, PluginReference::getHash));
    MapDifference<String, String> pluginDiff = Maps.difference(storagePluginHashes, serverPluginHashes);
    if (!pluginDiff.areEqual()) {
        for (Map.Entry<String, String> entry : pluginDiff.entriesOnlyOnLeft().entrySet()) {
            result.appendToChangelog(String.format("Plugin '%s' removed", entry.getKey()));
        }// w  ww. j av a 2s  .c  o  m
        for (Map.Entry<String, String> entry : pluginDiff.entriesOnlyOnRight().entrySet()) {
            result.appendToChangelog("Plugin '" + entry.getKey() + "' added");
        }
        for (Map.Entry<String, ValueDifference<String>> entry : pluginDiff.entriesDiffering().entrySet()) {
            result.appendToChangelog("Plugin '" + entry.getKey() + "' updated");
        }
    }
}

From source file:com.thinkbiganalytics.metadata.rest.model.nifi.NiFiFlowCacheSync.java

public Map<String, NiFiFlowCacheConnectionData> getConnectionIdToConnectionUpdatedSinceLastSync(
        Map<String, String> latestConnectionIdToNameMap,
        Map<String, NiFiFlowCacheConnectionData> latestConnectionDataMap) {
    MapDifference<String, String> diff = Maps.difference(snapshot.getConnectionIdToConnectionName(),
            latestConnectionIdToNameMap);
    Map<String, NiFiFlowCacheConnectionData> differences = new HashMap<>();
    Map<String, String> diffs = diff.entriesOnlyOnRight();
    if (diffs != null && !diffs.isEmpty()) {
        for (String connId : diffs.keySet()) {
            differences.put(connId, latestConnectionDataMap.get(connId));
        }/*from   ww  w  .  j av  a2 s . c  om*/
    }

    Set<String> updates = diff.entriesDiffering().keySet();
    if (updates != null) {
        for (String key : updates) {
            differences.put(key, latestConnectionDataMap.get(key));
        }
    }

    return differences;
}

From source file:org.obm.push.mail.EmailChangesComputerImpl.java

@Override
public EmailChanges computeChanges(Iterable<Email> before, Iterable<Email> actual) {
    MapDifference<Long, Email> difference = Maps.difference(iterableToMap(before), iterableToMap(actual),
            new EmailEquivalence());

    return EmailChanges.builder().deletions(Sets.newHashSet(difference.entriesOnlyOnLeft().values()))
            .changes(Sets.newHashSet(rightValueDifferences(difference.entriesDiffering().values())))
            .additions(Sets.newHashSet(difference.entriesOnlyOnRight().values())).build();
}

From source file:org.apache.cassandra.schema.LegacySchemaTables.java

private static Set<String> mergeKeyspaces(Map<DecoratedKey, ColumnFamily> before,
        Map<DecoratedKey, ColumnFamily> after) {
    List<Row> created = new ArrayList<>();
    List<String> altered = new ArrayList<>();
    Set<String> dropped = new HashSet<>();

    /*//w  w  w . ja  va2  s  .co m
     * - we don't care about entriesOnlyOnLeft() or entriesInCommon(), because only the changes are of interest to us
     * - of all entriesOnlyOnRight(), we only care about ones that have live columns; it's possible to have a ColumnFamily
     *   there that only has the top-level deletion, if:
     *      a) a pushed DROP KEYSPACE change for a keyspace hadn't ever made it to this node in the first place
     *      b) a pulled dropped keyspace that got dropped before it could find a way to this node
     * - of entriesDiffering(), we don't care about the scenario where both pre and post-values have zero live columns:
     *   that means that a keyspace had been recreated and dropped, and the recreated keyspace had never found a way
     *   to this node
     */
    MapDifference<DecoratedKey, ColumnFamily> diff = Maps.difference(before, after);

    for (Map.Entry<DecoratedKey, ColumnFamily> entry : diff.entriesOnlyOnRight().entrySet())
        if (entry.getValue().hasColumns())
            created.add(new Row(entry.getKey(), entry.getValue()));

    for (Map.Entry<DecoratedKey, MapDifference.ValueDifference<ColumnFamily>> entry : diff.entriesDiffering()
            .entrySet()) {
        String keyspaceName = AsciiType.instance.compose(entry.getKey().getKey());

        ColumnFamily pre = entry.getValue().leftValue();
        ColumnFamily post = entry.getValue().rightValue();

        if (pre.hasColumns() && post.hasColumns())
            altered.add(keyspaceName);
        else if (pre.hasColumns())
            dropped.add(keyspaceName);
        else if (post.hasColumns()) // a (re)created keyspace
            created.add(new Row(entry.getKey(), post));
    }

    for (Row row : created)
        Schema.instance.addKeyspace(createKeyspaceFromSchemaPartition(row));
    for (String name : altered)
        Schema.instance.updateKeyspace(name);
    return dropped;
}

From source file:org.apache.cassandra.schema.LegacySchemaTables.java

private static void mergeTables(Map<DecoratedKey, ColumnFamily> before, Map<DecoratedKey, ColumnFamily> after) {
    List<CFMetaData> created = new ArrayList<>();
    List<CFMetaData> altered = new ArrayList<>();
    List<CFMetaData> dropped = new ArrayList<>();

    MapDifference<DecoratedKey, ColumnFamily> diff = Maps.difference(before, after);

    for (Map.Entry<DecoratedKey, ColumnFamily> entry : diff.entriesOnlyOnRight().entrySet())
        if (entry.getValue().hasColumns())
            created.addAll(createTablesFromTablesPartition(new Row(entry.getKey(), entry.getValue())).values());

    for (Map.Entry<DecoratedKey, MapDifference.ValueDifference<ColumnFamily>> entry : diff.entriesDiffering()
            .entrySet()) {/*w  ww. j  a v  a 2s  .co m*/
        String keyspaceName = AsciiType.instance.compose(entry.getKey().getKey());

        ColumnFamily pre = entry.getValue().leftValue();
        ColumnFamily post = entry.getValue().rightValue();

        if (pre.hasColumns() && post.hasColumns()) {
            MapDifference<String, CFMetaData> delta = Maps.difference(
                    Schema.instance.getKSMetaData(keyspaceName).cfMetaData(),
                    createTablesFromTablesPartition(new Row(entry.getKey(), post)));

            dropped.addAll(delta.entriesOnlyOnLeft().values());
            created.addAll(delta.entriesOnlyOnRight().values());
            Iterables.addAll(altered, Iterables.transform(delta.entriesDiffering().values(),
                    new Function<MapDifference.ValueDifference<CFMetaData>, CFMetaData>() {
                        public CFMetaData apply(MapDifference.ValueDifference<CFMetaData> pair) {
                            return pair.rightValue();
                        }
                    }));
        } else if (pre.hasColumns()) {
            dropped.addAll(Schema.instance.getKSMetaData(keyspaceName).cfMetaData().values());
        } else if (post.hasColumns()) {
            created.addAll(createTablesFromTablesPartition(new Row(entry.getKey(), post)).values());
        }
    }

    for (CFMetaData cfm : created)
        Schema.instance.addTable(cfm);
    for (CFMetaData cfm : altered)
        Schema.instance.updateTable(cfm.ksName, cfm.cfName);
    for (CFMetaData cfm : dropped)
        Schema.instance.dropTable(cfm.ksName, cfm.cfName);
}

From source file:org.nuxeo.tools.esync.checker.TypeCardinalityChecker.java

@Override
void check() {//from  w w  w  .ja va  2s .c  o  m
    Map<String, Long> esTypes = es.getTypeCardinality();
    Map<String, Long> dbTypes = db.getTypeCardinality();
    MapDifference<String, Long> diff = Maps.difference(dbTypes, esTypes);
    if (diff.areEqual()) {
        postMessage("Found same types cardinality");
        return;
    }
    postMessage("Difference found in types cardinality.");
    for (String key : diff.entriesOnlyOnLeft().keySet()) {
        postError(String.format("Missing type on ES: %s, expected: %d", key, dbTypes.get(key)));
    }
    for (String key : diff.entriesOnlyOnRight().keySet()) {
        postError(String.format("Spurious type in ES: %s, actual: %d", key, esTypes.get(key)));
    }
    for (String key : diff.entriesDiffering().keySet()) {
        long esCount = 0;
        long dbCount = 0;
        if (esTypes.containsKey(key)) {
            esCount = esTypes.get(key);
        }
        if (dbTypes.containsKey(key)) {
            dbCount = dbTypes.get(key);
        }
        postError(String.format("Document type %s (including versions), expected: %d, actual: %d, diff: %d",
                key, dbCount, esCount, dbCount - esCount));
        post(new DiffTypeEvent(key, "diff"));
    }
}