Example usage for com.google.common.collect MapDifference entriesOnlyOnLeft

List of usage examples for com.google.common.collect MapDifference entriesOnlyOnLeft

Introduction

In this page you can find the example usage for com.google.common.collect MapDifference entriesOnlyOnLeft.

Prototype

Map<K, V> entriesOnlyOnLeft();

Source Link

Document

Returns an unmodifiable map containing the entries from the left map whose keys are not present in the right map.

Usage

From source file:additionalpipes.inventory.components.PropertyMap.java

@Override
public void writeData(DataOutputStream data) throws IOException {
    MapDifference<Property, Property> diff = Maps.difference(prevValue, value);

    Map<Property, ValueDifference<Property>> changed = diff.entriesDiffering();
    Map<Property, Property> added = diff.entriesOnlyOnRight();
    Map<Property, Property> removed = diff.entriesOnlyOnLeft();
    data.writeInt(changed.size() + added.size() + removed.size());

    for (Entry<Property, ValueDifference<Property>> e : changed.entrySet()) {
        data.writeBoolean(false);/*from   w  ww  . jav  a2 s  .  c o  m*/
        Property.writePacket(e.getKey(), data);
        Property.writePacket(e.getValue().rightValue(), data);
    }
    for (Entry<Property, Property> e : added.entrySet()) {
        data.writeBoolean(false);
        Property.writePacket(e.getKey(), data);
        Property.writePacket(e.getValue(), data);
    }
    for (Entry<Property, Property> e : removed.entrySet()) {
        data.writeBoolean(true);
        Property.writePacket(e.getKey(), data);
    }

    prevValue = ImmutableMap.copyOf(value);
}

From source file:org.apache.cassandra.schema.SchemaKeyspace.java

public static synchronized void mergeSchema(Collection<Mutation> mutations) {
    // only compare the keyspaces affected by this set of schema mutations
    Set<String> affectedKeyspaces = mutations.stream().map(m -> UTF8Type.instance.compose(m.key().getKey()))
            .collect(Collectors.toSet());

    // fetch the current state of schema for the affected keyspaces only
    Keyspaces before = Schema.instance.getKeyspaces(affectedKeyspaces);

    // apply the schema mutations and flush
    mutations.forEach(Mutation::apply);//from   www  .j  ava 2s  . c o  m
    if (FLUSH_SCHEMA_TABLES)
        flush();

    // fetch the new state of schema from schema tables (not applied to Schema.instance yet)
    Keyspaces after = fetchKeyspacesOnly(affectedKeyspaces);

    // deal with the diff
    MapDifference<String, KeyspaceMetadata> keyspacesDiff = before.diff(after);

    // dropped keyspaces
    for (KeyspaceMetadata keyspace : keyspacesDiff.entriesOnlyOnLeft().values()) {
        keyspace.functions.udas().forEach(Schema.instance::dropAggregate);
        keyspace.functions.udfs().forEach(Schema.instance::dropFunction);
        keyspace.views.forEach(v -> Schema.instance.dropView(v.ksName, v.viewName));
        keyspace.tables.forEach(t -> Schema.instance.dropTable(t.ksName, t.cfName));
        keyspace.types.forEach(Schema.instance::dropType);
        Schema.instance.dropKeyspace(keyspace.name);
    }

    // new keyspaces
    for (KeyspaceMetadata keyspace : keyspacesDiff.entriesOnlyOnRight().values()) {
        Schema.instance.addKeyspace(KeyspaceMetadata.create(keyspace.name, keyspace.params));
        keyspace.types.forEach(Schema.instance::addType);
        keyspace.tables.forEach(Schema.instance::addTable);
        keyspace.views.forEach(Schema.instance::addView);
        keyspace.functions.udfs().forEach(Schema.instance::addFunction);
        keyspace.functions.udas().forEach(Schema.instance::addAggregate);
    }

    // updated keyspaces
    for (Map.Entry<String, MapDifference.ValueDifference<KeyspaceMetadata>> diff : keyspacesDiff
            .entriesDiffering().entrySet())
        updateKeyspace(diff.getKey(), diff.getValue().leftValue(), diff.getValue().rightValue());
}

From source file:org.opendaylight.controller.filtervalve.cors.jaxb.Filter.java

public synchronized void initialize(String fileName, Optional<Filter> maybeTemplate) {
    checkState(initialized == false, "Already initialized");
    logger.trace("Initializing filter {} : {}", filterName, filterClass);
    for (InitParam initParam : initParams) {
        initParam.inititialize();/*from  w w w .  j  ava2s  .c om*/
    }
    if (maybeTemplate.isPresent()) {
        // merge non conflicting init params
        Filter template = maybeTemplate.get();
        checkArgument(template.isTemplate);
        Map<String, InitParam> templateParams = template.getInitParamsMap();
        Map<String, InitParam> currentParams = getInitParamsMap();
        // add values of template that are not present in current
        MapDifference<String, InitParam> difference = Maps.difference(templateParams, currentParams);
        for (Entry<String, InitParam> templateUnique : difference.entriesOnlyOnLeft().entrySet()) {
            initParams.add(templateUnique.getValue());
        }
        // merge filterClass
        if (filterClass == null) {
            filterClass = template.filterClass;
        } else if (Objects.equals(filterClass, template.filterClass) == false) {
            logger.error(
                    "Conflict detected in filter-class of {} defined in {}, template class {}, child class {}",
                    filterName, fileName, template.filterClass, filterClass);
            throw new IllegalStateException("Conflict detected in template/filter filter-class definitions,"
                    + " filter name: " + filterName + " in file " + fileName);
        }
    }
    initParams = Collections.unmodifiableList(new ArrayList<>(initParams));
    Class<?> clazz;
    try {
        clazz = Class.forName(filterClass);
    } catch (Exception e) {
        throw new IllegalStateException(
                "Cannot instantiate class defined in filter " + filterName + " in file " + fileName, e);
    }
    try {
        actualFilter = (javax.servlet.Filter) clazz.newInstance();
    } catch (Exception e) {
        throw new IllegalStateException(
                "Cannot instantiate class defined in filter " + filterName + " in file " + fileName, e);
    }
    logger.trace("Initializing {} with following init-params:{}", filterName, getInitParams());
    try {
        actualFilter.init(this);
    } catch (Exception e) {
        throw new IllegalStateException("Cannot initialize filter " + filterName + " in file " + fileName, e);
    }
    initialized = true;
}

From source file:org.guvnor.ala.build.maven.util.RepositoryVisitor.java

public RepositoryVisitor(final Path projectPath, final String _projectRoot, final boolean cleanTempDir) {
    this.root = makeTempRootDirectory(_projectRoot, cleanTempDir);

    try {//from  w ww  . j av  a 2 s.  co  m
        if (_projectRoot != null && !_projectRoot.equals("")) {
            loadIndex(root.getAbsolutePath());
        }
        visitPaths(root, Files.newDirectoryStream(projectPath));
        if (oldIdentityHash != null) {
            MapDifference<String, String> difference = Maps.difference(oldIdentityHash, identityHash);
            Map<String, String> deletedFiles = difference.entriesOnlyOnLeft();
            for (String path : deletedFiles.keySet()) {
                boolean deleted = new File(
                        root.getAbsolutePath().replace(projectPath.toString(), "") + "/" + path).delete();
                System.out.println("Deleted: " + root.getAbsolutePath().replace(projectPath.toString(), "")
                        + "/" + path + " -> " + deleted);
            }
        }
        storeIndex(root.getAbsolutePath());
    } catch (IOException | NoSuchAlgorithmException ex) {
        throw new RuntimeException(ex);
    }
}

From source file:com.example.getstarted.util.DatastoreSessionFilter.java

@Override
public void doFilter(ServletRequest servletReq, ServletResponse servletResp, FilterChain chain)
        throws IOException, ServletException {
    HttpServletRequest req = (HttpServletRequest) servletReq;
    HttpServletResponse resp = (HttpServletResponse) servletResp;

    // Check if the session cookie is there, if not there, make a session cookie using a unique
    // identifier.
    String sessionId = getCookieValue(req, "bookshelfSessionId");
    if (sessionId.equals("")) {
        String sessionNum = new BigInteger(130, new SecureRandom()).toString(32);
        Cookie session = new Cookie("bookshelfSessionId", sessionNum);
        session.setPath("/");
        resp.addCookie(session);/*from w  w  w . j a  va2  s .  c om*/
    }

    Map<String, String> datastoreMap = loadSessionVariables(req); // session variables for request

    chain.doFilter(servletReq, servletResp); // Allow the servlet to process request and response

    HttpSession session = req.getSession(); // Create session map
    Map<String, String> sessionMap = new HashMap<>();
    Enumeration<String> attrNames = session.getAttributeNames();
    while (attrNames.hasMoreElements()) {
        String attrName = attrNames.nextElement();
        sessionMap.put(attrName, (String) session.getAttribute(attrName));
    }

    // Create a diff between the new session variables and the existing session variables
    // to minimize datastore access
    MapDifference<String, String> diff = Maps.difference(sessionMap, datastoreMap);
    Map<String, String> setMap = diff.entriesOnlyOnLeft();
    Map<String, String> deleteMap = diff.entriesOnlyOnRight();

    // Apply the diff
    setSessionVariables(sessionId, setMap);
    deleteSessionVariables(sessionId, FluentIterable.from(deleteMap.keySet()).toArray(String.class));
}

From source file:org.jfrog.hudson.AbstractBuildInfoDeployer.java

protected Build createBuildInfo(String buildAgentName, String buildAgentVersion, BuildType buildType) {
    String buildinfoName = build.getParent().getFullName();
    if (buildAgentName.compareTo("Generic") == 0) {
        buildinfoName = Util.replaceMacro(((ArtifactoryGenericConfigurator) configurator).getBuildInfoName(),
                env);/*  ww  w.j ava 2  s. co m*/
    }
    BuildInfoBuilder builder = new BuildInfoBuilder(ExtractorUtils.sanitizeBuildName(buildinfoName))
            .number(build.getNumber() + "").type(buildType)
            .buildAgent(new BuildAgent(buildAgentName, buildAgentVersion))
            .agent(new Agent("hudson", build.getHudsonVersion()));
    String buildUrl = ActionableHelper.getBuildUrl(build);
    if (StringUtils.isNotBlank(buildUrl)) {
        builder.url(buildUrl);
    }

    Calendar startedTimestamp = build.getTimestamp();
    builder.startedDate(startedTimestamp.getTime());

    long duration = System.currentTimeMillis() - startedTimestamp.getTimeInMillis();
    builder.durationMillis(duration);

    String artifactoryPrincipal = configurator.getArtifactoryServer().getResolvingCredentials().getUsername();
    if (StringUtils.isBlank(artifactoryPrincipal)) {
        artifactoryPrincipal = "";
    }
    builder.artifactoryPrincipal(artifactoryPrincipal);

    String userCause = ActionableHelper.getUserCausePrincipal(build);
    if (userCause != null) {
        builder.principal(userCause);
    }

    Cause.UpstreamCause parent = ActionableHelper.getUpstreamCause(build);
    if (parent != null) {
        String parentProject = ExtractorUtils.sanitizeBuildName(parent.getUpstreamProject());
        int parentNumber = parent.getUpstreamBuild();
        builder.parentName(parentProject);
        builder.parentNumber(parentNumber + "");
        if (StringUtils.isBlank(userCause)) {
            builder.principal("auto");
        }
    }

    gatherSysPropInfo(builder);
    addBuildInfoVariables(builder);

    String revision = ExtractorUtils.getVcsRevision(env);
    if (StringUtils.isNotBlank(revision)) {
        builder.vcsRevision(revision);
    }
    if (configurator.isIncludeEnvVars()) {
        for (Map.Entry<String, String> entry : env.entrySet()) {
            builder.addProperty(BuildInfoProperties.BUILD_INFO_ENVIRONMENT_PREFIX + entry.getKey(),
                    entry.getValue());
        }
    } else {
        MapDifference<String, String> difference = Maps.difference(env, System.getenv());
        Map<String, String> filteredEnvVars = difference.entriesOnlyOnLeft();
        for (Map.Entry<String, String> entry : filteredEnvVars.entrySet()) {
            builder.addProperty(BuildInfoProperties.BUILD_INFO_ENVIRONMENT_PREFIX + entry.getKey(),
                    entry.getValue());
        }
    }

    LicenseControl licenseControl = new LicenseControl(configurator.isRunChecks());
    if (configurator.isRunChecks()) {
        if (StringUtils.isNotBlank(configurator.getViolationRecipients())) {
            licenseControl.setLicenseViolationsRecipientsList(configurator.getViolationRecipients());
        }
        if (StringUtils.isNotBlank(configurator.getScopes())) {
            licenseControl.setScopesList(configurator.getScopes());
        }
    }
    licenseControl.setIncludePublishedArtifacts(configurator.isIncludePublishArtifacts());
    licenseControl.setAutoDiscover(configurator.isLicenseAutoDiscovery());
    builder.licenseControl(licenseControl);
    BuildRetention buildRetention = new BuildRetention(configurator.isDiscardBuildArtifacts());
    if (configurator.isDiscardOldBuilds()) {
        buildRetention = BuildRetentionFactory.createBuildRetention(build,
                configurator.isDiscardBuildArtifacts());
    }
    builder.buildRetention(buildRetention);

    if ((Hudson.getInstance().getPlugin("jira") != null) && configurator.isEnableIssueTrackerIntegration()) {
        new IssuesTrackerHelper(build, listener, configurator.isAggregateBuildIssues(),
                configurator.getAggregationBuildStatus()).setIssueTrackerInfo(builder);
    }

    // add staging status if it is a release build
    ReleaseAction release = ActionableHelper.getLatestAction(build, ReleaseAction.class);
    if (release != null) {
        String stagingRepoKey = release.getStagingRepositoryKey();
        if (StringUtils.isBlank(stagingRepoKey)) {
            stagingRepoKey = configurator.getRepositoryKey();
        }
        builder.addStatus(new PromotionStatusBuilder(Promotion.STAGED).timestampDate(startedTimestamp.getTime())
                .comment(release.getStagingComment()).repository(stagingRepoKey).ciUser(userCause)
                .user(artifactoryPrincipal).build());
    }

    Build buildInfo = builder.build();
    // for backwards compatibility for Artifactory 2.2.3
    if (parent != null) {
        buildInfo.setParentBuildId(parent.getUpstreamProject());
    }

    return buildInfo;
}

From source file:org.jfrog.hudson.BuildInfoDeployer.java

private Build gatherBuildInfo(MavenModuleSetBuild build) throws IOException, InterruptedException {
    BuildInfoBuilder infoBuilder = new BuildInfoBuilder(build.getParent().getDisplayName())
            .number(build.getNumber() + "")
            .buildAgent(new BuildAgent("Maven", build.getParent().getMaven().getName()))
            .agent(new Agent("hudson", build.getHudsonVersion())).type(BuildType.MAVEN);

    if (Hudson.getInstance().getRootUrl() != null) {
        infoBuilder.url(Hudson.getInstance().getRootUrl() + build.getUrl());
    }//from   w w w  . j a v  a2s  .co m

    Calendar startedTimestamp = build.getTimestamp();
    infoBuilder.startedDate(startedTimestamp.getTime());

    long duration = System.currentTimeMillis() - startedTimestamp.getTimeInMillis();
    infoBuilder.durationMillis(duration);

    ArtifactoryServer server = publisher.getArtifactoryServer();
    infoBuilder.artifactoryPrincipal(server.getUserName());

    CauseAction action = ActionableHelper.getLatestAction(build, CauseAction.class);
    if (action != null) {
        for (Cause cause : action.getCauses()) {
            if (cause instanceof Cause.UserCause) {
                infoBuilder.principal(((Cause.UserCause) cause).getUserName());
            }
        }
    }

    Cause.UpstreamCause parent = ActionableHelper.getUpstreamCause(build);
    if (parent != null) {
        String parentProject = parent.getUpstreamProject();
        int buildNumber = parent.getUpstreamBuild();
        infoBuilder.parentName(parentProject);
        infoBuilder.parentNumber(buildNumber + "");
    }

    gatherModuleAndDependencyInfo(infoBuilder, build);
    gatherSysPropInfo(infoBuilder);
    addBuildInfoVariables(infoBuilder);
    EnvVars envVars = build.getEnvironment(listener);
    String revision = envVars.get("SVN_REVISION");
    if (StringUtils.isNotBlank(revision)) {
        infoBuilder.vcsRevision(revision);
    }
    if (publisher.isIncludeEnvVars()) {
        for (Map.Entry<String, String> entry : envVars.entrySet()) {
            infoBuilder.addProperty(BuildInfoProperties.BUILD_INFO_ENVIRONMENT_PREFIX + entry.getKey(),
                    entry.getValue());
        }
    } else {
        MapDifference<String, String> difference = Maps.difference(envVars, System.getenv());
        Map<String, String> filteredEnvVars = difference.entriesOnlyOnLeft();
        for (Map.Entry<String, String> entry : filteredEnvVars.entrySet()) {
            infoBuilder.addProperty(BuildInfoProperties.BUILD_INFO_ENVIRONMENT_PREFIX + entry.getKey(),
                    entry.getValue());
        }
    }
    Build buildInfo = infoBuilder.build();
    // for backwards compatibility for Artifactory 2.2.3
    if (parent != null) {
        buildInfo.setParentBuildId(parent.getUpstreamProject());
    }
    return buildInfo;
}

From source file:org.locationtech.geogig.remotes.pack.DiffRemoteRefsOp.java

@Override
protected List<RefDiff> _call() {
    checkState(remote != null, "no remote provided");
    // list of refs/remotes/<remote>/<refname> or refs/heads according to formatAsRemoteRefs
    Map<String, Ref> remotes;
    Map<String, Ref> locals;
    {/* ww w .  ja v a  2  s  . c o  m*/
        // current live remote refs in the remote's local namespace (e.g. refs/heads/<branch>)
        Iterable<Ref> remoteRefs = getRemoteRefs();
        if (formatAsRemoteRefs) {
            // format refs returned by the remote in its local namespaces to our repository's
            // remotes namespace
            remoteRefs = command(MapRef.class)//
                    .setRemote(remote.getInfo())//
                    .convertToRemote()//
                    .addAll(remoteRefs)//
                    .call();
        }
        // current local local copy of the remote refs (e.g. refs/remotes/<remote>/<branch>
        List<Ref> remoteLocalRefs = Lists.newArrayList(getRemoteLocalRefs());
        if (!formatAsRemoteRefs) {
            // format local repository copies of the remote refs to the remote's local namespace
            remoteLocalRefs = command(MapRef.class)//
                    .setRemote(remote.getInfo())//
                    .convertToLocal()//
                    .addAll(remoteLocalRefs)//
                    .call();
        }
        if (this.getTags) {
            Map<String, RevTag> tags = Maps.uniqueIndex(command(TagListOp.class).call(), (t) -> t.getName());
            for (Ref rf : remoteRefs) {
                if (rf.getName().startsWith(Ref.TAGS_PREFIX) && tags.containsKey(rf.localName())) {
                    RevTag tag = tags.get(rf.localName());
                    remoteLocalRefs.add(new Ref(Ref.TAGS_PREFIX + tag.getName(), tag.getId()));
                }
            }
        }
        remotes = Maps.uniqueIndex(remoteRefs, (r) -> r.getName());
        locals = Maps.uniqueIndex(remoteLocalRefs, (r) -> r.getName());
    }
    final boolean mapped = remote.getInfo().getMapped();
    if (mapped) {
        // for a mapped remote, we are only interested in the branch we are mapped to
        final String mappedBranch = remote.getInfo().getMappedBranch();
        checkNotNull(mappedBranch);
        final String mappedBranchName = Ref.localName(mappedBranch);
        remotes = Maps.filterKeys(remotes, (name) -> Ref.localName(name).equals(mappedBranchName));
        locals = Maps.filterKeys(locals, (name) -> Ref.localName(name).equals(mappedBranchName));
    }
    MapDifference<String, Ref> difference = Maps.difference(remotes, locals);

    // refs existing on the remote and not on the local repo
    Collection<Ref> newRemoteRefs = difference.entriesOnlyOnLeft().values();

    // remote refs existing on the local repo and not existing on the remote anymore
    Collection<Ref> removedRemoteRefs = difference.entriesOnlyOnRight().values();

    // refs existing both in local and remote with different objectIds
    Collection<ValueDifference<Ref>> changes = difference.entriesDiffering().values();

    List<RefDiff> diffs = new ArrayList<>();
    newRemoteRefs.forEach((r) -> diffs.add(RefDiff.added(r)));
    removedRemoteRefs.forEach((r) -> diffs.add(RefDiff.removed(r)));
    // v.leftValue() == new (remote copy), v.rightValue() == old (local copy)
    changes.forEach((v) -> diffs.add(RefDiff.updated(v.rightValue(), v.leftValue())));

    return diffs;
}

From source file:org.esco.grouperui.web.tag.renderer.EscoHtmlTableRenderer.java

/**
 * Allow to output a log at the end of the process. It will compare the
 * requested parameters and the obtained parameters.
 * //  ww  w  . j a va 2 s. c  o  m
 * @param theGroupDb
 *            the parameter source.
 */
private void verifyAndLogParameter(final ParameterGroup theGroupDb) {
    // The obtained parameters
    Map<String, Parameter> reqParameter = (Map<String, Parameter>) FacesContext.getCurrentInstance()
            .getExternalContext().getRequestMap().get(EscoHtmlTableRenderer.PARAMETER);

    // The requested parameters.
    Map<String, Parameter> groupParam = new HashMap<String, Parameter>();
    for (Parameter param : theGroupDb.getParameters()) {
        groupParam.put(param.getKey(), param);
    }

    if (reqParameter != null) {
        // The difference between the two map.
        MapDifference<String, Parameter> mapDiffs = Maps.difference(reqParameter, groupParam);

        this.logDifferences(mapDiffs.entriesOnlyOnLeft(), mapDiffs.entriesOnlyOnRight());
    }
}

From source file:org.opendaylight.sxp.route.core.RouteReactorImpl.java

/**
 * Removes routes for deleted {@link RoutingDefinition},
 * if {@link Routing} was unsuccessfully removed adds them into provided {@link List}
 *
 * @param routingDifference         contains configuration changes
 * @param outcomingRouteDefinitions where result will be stored
 *//*from  w  w w.ja  va2  s  .  com*/
@VisibleForTesting
void processDeleted(final MapDifference<IpAddress, RoutingDefinition> routingDifference,
        final List<RoutingDefinition> outcomingRouteDefinitions) {
    routingDifference.entriesOnlyOnLeft().forEach((vIpAddress, routingDef) -> {
        final Routing routingService = routingServiceMap.remove(vIpAddress);
        if (routingService != null) {
            findSxpNodesOnVirtualIp(vIpAddress).forEach(SxpNode::shutdown);
            final boolean succeeded = routingService.removeRouteForCurrentService();
            if (!succeeded) {
                LOG.warn("Route cannot be closed (D): {}", routingService);
                outcomingRouteDefinitions.add(RouteUtil.createOperationalRouteDefinition(routingDef, false,
                        "route can not be closed (by remove)"));
            }
        }
    });
}