Example usage for com.google.common.collect Maps filterKeys

List of usage examples for com.google.common.collect Maps filterKeys

Introduction

In this page you can find the example usage for com.google.common.collect Maps filterKeys.

Prototype

@CheckReturnValue
public static <K, V> BiMap<K, V> filterKeys(BiMap<K, V> unfiltered, final Predicate<? super K> keyPredicate) 

Source Link

Document

Returns a bimap containing the mappings in unfiltered whose keys satisfy a predicate.

Usage

From source file:co.cask.tigon.internal.app.runtime.distributed.AbstractProgramTwillRunnable.java

/**
 * Creates program arguments. It includes all configurations from the specification, excluding hConf and cConf.
 *//*from w  w w  .j  a v a  2  s .c  o  m*/
private Arguments createProgramArguments(TwillContext context, Map<String, String> configs) {
    Map<String, String> args = ImmutableMap.<String, String>builder()
            .put(ProgramOptionConstants.INSTANCE_ID, Integer.toString(context.getInstanceId()))
            .put(ProgramOptionConstants.INSTANCES, Integer.toString(context.getInstanceCount()))
            .put(ProgramOptionConstants.RUN_ID, context.getApplicationRunId().getId())
            .putAll(Maps.filterKeys(configs, Predicates.not(Predicates.in(ImmutableSet.of("hConf", "cConf")))))
            .build();

    return new BasicArguments(args);
}

From source file:com.android.builder.internal.packaging.IncrementalPackager.java

/**
 * Updates native libraries in the archive.
 *
 * @param files the resources to update/*from ww w .  ja  va2  s  . c om*/
 * @throws IOException failed to update the archive
 */
public void updateNativeLibraries(@NonNull ImmutableMap<RelativeFile, FileStatus> files) throws IOException {
    updateFiles(PackagedFileUpdates.fromIncrementalRelativeFileSet(
            Maps.filterKeys(files, Predicates.compose(mAbiPredicate, RelativeFile.EXTRACT_PATH))));
}

From source file:grakn.core.graql.executor.WriteExecutor.java

ConceptMap write(ConceptMap preExisting) {
    concepts.putAll(preExisting.map());// w w  w . j  av a2s .c  o  m

    // time to execute writers for properties
    int executeWritersSpanId = ServerTracing.startScopedChildSpan("WriteExecutor.write execute writers");

    for (Writer writer : sortedWriters()) {
        writer.execute(this);
    }

    ServerTracing.closeScopedChildSpan(executeWritersSpanId);
    // time to delete concepts marked for deletion

    int deleteConceptsSpanId = ServerTracing.startScopedChildSpan("WriteExecutor.write delete concepts");

    for (Concept concept : conceptsToDelete) {
        concept.delete();
    }

    ServerTracing.closeScopedChildSpan(deleteConceptsSpanId);

    // time to build concepts

    int buildConceptsSpanId = ServerTracing
            .startScopedChildSpan("WriteExecutor.write build concepts for answer");

    conceptBuilders.forEach((var, builder) -> buildConcept(var, builder));

    ServerTracing.closeScopedChildSpan(buildConceptsSpanId);

    ImmutableMap.Builder<Variable, Concept> allConcepts = ImmutableMap.<Variable, Concept>builder()
            .putAll(concepts);

    // Make sure to include all equivalent vars in the result
    for (Variable var : equivalentVars.getNodes()) {
        allConcepts.put(var, concepts.get(equivalentVars.componentOf(var)));
    }

    Map<Variable, Concept> namedConcepts = Maps.filterKeys(allConcepts.build(), Variable::isReturned);
    return new ConceptMap(namedConcepts);
}

From source file:ai.grakn.graql.internal.query.QueryOperationExecutor.java

private Answer insertAll(Answer results) {
    concepts.putAll(results.map());//from w w  w.j  av  a2s .  co  m

    sortProperties().forEach(property -> property.executor(executionType).execute(this));

    conceptBuilders.forEach((var, builder) -> concepts.put(var, builder.build()));

    ImmutableMap.Builder<Var, Concept> allConcepts = ImmutableMap.<Var, Concept>builder().putAll(concepts);

    // Make sure to include all equivalent vars in the result
    for (Var var : equivalentVars.getNodes()) {
        allConcepts.put(var, concepts.get(equivalentVars.componentOf(var)));
    }

    Map<Var, Concept> namedConcepts = Maps.filterKeys(allConcepts.build(), Var::isUserDefinedName);
    return new QueryAnswer(namedConcepts);
}

From source file:edu.udo.scaffoldhunter.gui.dialogs.ConnectionDialog.java

private void setConnection(ConnectionData connection) {
    if (connection == null) {
        ((CardLayout) profilePanel.getLayout()).show(profilePanel, NO_CONNECTION);
        nameText.setEnabled(false);/*from   w  w  w . j a va 2  s .  c o  m*/
        typeDBCombo.setEnabled(false);
        return;
    } else {
        ((CardLayout) profilePanel.getLayout()).show(profilePanel, connection.getDbType().name());
        nameText.setEnabled(true);
        typeDBCombo.setEnabled(true);
    }
    connectionList.setSelectedValue(connection, true);
    nameText.setText(connection.getConnectionName());
    typeDBCombo.setSelectedItem(connection.getDbType());
    ProfilePanel panel = profilePanels.get(connection.getDbType());
    panel.setData(connection);
    Predicate<ConnectionType> other = Predicates.not(Predicates.equalTo(connection.getDbType()));
    for (ProfilePanel p : Maps.filterKeys(profilePanels, other).values()) {
        p.clearData();
    }
    validateListener.validate();
    if (dataManager.getConnections().contains(connection)) {
        needsSaving = null;
    } else {
        needsSaving = connection;
    }
}

From source file:org.locationtech.geogig.geotools.geopkg.InterchangeFormat.java

/**
 * Imports the features from the geopackage based on the existing audit table onto the current
 * branch. If the head commit of the current branch is different from the commit that the
 * features were exported from, the features will be merged into the current branch. The calling
 * function should anticipate the possibility of merge conflicts.
 * /*  ww w.j  ava 2s . c  o m*/
 * @param commitMessage commit message for the imported features
 * @param authorName author name to use for the commit
 * @param authorEmail author email to use for the commit
 * @param tableNames a list of tables to import from the geopackage, if none are specified, all
 *        tables will be imported
 * @return the commit with the imported features, or the merge commit if it was not a
 *         fast-forward merge
 */
public GeopkgImportResult importAuditLog(@Nullable String commitMessage, @Nullable String authorName,
        @Nullable String authorEmail, @Nullable String... tableNames) {

    final Set<String> importTables = tableNames == null ? ImmutableSet.of() : Sets.newHashSet(tableNames);

    List<AuditReport> reports = new ArrayList<>();
    GeoPackage geopackage;
    try {
        geopackage = new GeoPackage(geopackageDbFile);
    } catch (IOException e) {
        throw Throwables.propagate(e);
    }
    final DataSource dataSource = geopackage.getDataSource();

    RevCommit importCommit = null;
    GeopkgImportResult importResult = null;

    try (Connection connection = dataSource.getConnection();
            GeopkgGeogigMetadata metadata = new GeopkgGeogigMetadata(connection)) {

        final Map<String, AuditTable> tables = Maps.filterKeys(
                Maps.uniqueIndex(metadata.getAuditTables(), t -> t.getTableName()),
                k -> importTables.isEmpty() || importTables.contains(k));

        checkState(tables.size() > 0, "No table to import.");
        Iterator<AuditTable> iter = tables.values().iterator();
        ObjectId commitId = iter.next().getCommitId();
        while (iter.hasNext()) {
            checkState(commitId.equals(iter.next().getCommitId()),
                    "Unable to simultaneously import tables with different source commit ids.");
        }

        RevCommit commit = context.objectDatabase().getCommit(commitId);
        RevTree baseTree = context.objectDatabase().getTree(commit.getTreeId());
        RevTreeBuilder newTreeBuilder = CanonicalTreeBuilder.create(context.objectDatabase(), baseTree);

        Map<String, String> fidMappings = null;
        for (AuditTable t : tables.values()) {
            fidMappings = metadata.getFidMappings(t.getTableName());
            AuditReport report = importAuditLog(geopackage, t, baseTree, newTreeBuilder, fidMappings);
            reports.add(report);
        }

        RevTree newTree = newTreeBuilder.build();
        context.objectDatabase().put(newTree);

        if (authorName == null) {
            authorName = context.command(ConfigGet.class).setName("user.name").call().orNull();
        }
        if (authorEmail == null) {
            authorEmail = context.command(ConfigGet.class).setName("user.email").call().orNull();
        }

        CommitBuilder builder = new CommitBuilder();
        long timestamp = context.platform().currentTimeMillis();

        builder.setParentIds(Arrays.asList(commitId));
        builder.setTreeId(newTree.getId());
        builder.setCommitterTimestamp(timestamp);
        builder.setCommitter(authorName);
        builder.setCommitterEmail(authorEmail);
        builder.setAuthorTimestamp(timestamp);
        builder.setAuthor(authorName);
        builder.setAuthorEmail(authorEmail);
        if (commitMessage != null) {
            builder.setMessage(commitMessage);
        } else {
            builder.setMessage("Imported features from geopackage.");
        }

        importCommit = builder.build();
        importResult = new GeopkgImportResult(importCommit);
        for (AuditReport auditReport : reports) {
            if (auditReport.newMappings != null) {
                importResult.newMappings.put(auditReport.table.getFeatureTreePath(), auditReport.newMappings);
            }
        }

        context.objectDatabase().put(importCommit);

        MergeOp merge = context.command(MergeOp.class).setAuthor(authorName, authorEmail)
                .addCommit(importCommit.getId());

        if (commitMessage != null) {
            merge.setMessage("Merge: " + commitMessage);
        }

        MergeReport report = merge.call();
        RevCommit newCommit = report.getMergeCommit();
        importResult.newCommit = newCommit;

    } catch (MergeConflictsException e) {
        throw new GeopkgMergeConflictsException(e, importResult);
    } catch (Exception e) {
        throw Throwables.propagate(e);
    } finally {
        geopackage.close();
    }
    return importResult;
}

From source file:org.jfrog.hudson.release.promotion.UnifiedPromoteBuildAction.java

/**
 * Form submission is calling this method
 *///www .  j a va  2 s  .c o m
@SuppressWarnings({ "UnusedDeclaration" })
public void doSubmit(StaplerRequest req, StaplerResponse resp) throws IOException, ServletException {
    getACL().checkPermission(getPermission());

    bindParameters(req);
    // current user is bound to the thread and will be lost in the perform method
    User user = User.current();
    String ciUser = (user == null) ? "anonymous" : user.getId();

    JSONObject formData = req.getSubmittedForm();
    if (formData.has("promotionPlugin")) {
        JSONObject pluginSettings = formData.getJSONObject("promotionPlugin");
        if (pluginSettings.has("pluginName")) {
            String pluginName = pluginSettings.getString("pluginName");
            if (!UserPluginInfo.NO_PLUGIN_KEY.equals(pluginName)) {
                PluginSettings settings = new PluginSettings();
                Map<String, String> paramMap = Maps.newHashMap();
                settings.setPluginName(pluginName);
                Map<String, Object> filteredPluginSettings = Maps.filterKeys(pluginSettings,
                        new Predicate<String>() {
                            public boolean apply(String input) {
                                return StringUtils.isNotBlank(input) && !"pluginName".equals(input);
                            }
                        });
                for (Map.Entry<String, Object> settingsEntry : filteredPluginSettings.entrySet()) {
                    String key = settingsEntry.getKey();
                    paramMap.put(key, pluginSettings.getString(key));
                }
                paramMap.put("ciUser", ciUser);
                if (!paramMap.isEmpty()) {
                    settings.setParamMap(paramMap);
                }
                setPromotionPlugin(settings);
            }
        }
    }

    final BuildInfoAwareConfigurator configurator = getCurrentConfigurator();
    ArtifactoryServer server = configurator.getArtifactoryServer();

    new PromoteWorkerThread(server,
            CredentialManager.getPreferredDeployer((DeployerOverrider) configurator, server), ciUser).start();

    resp.sendRedirect(".");
}

From source file:org.jfrog.teamcity.agent.util.ArtifactoryClientConfigurationBuilder.java

private static void gatherBuildInfoParams(Map<String, String> allParamMap,
        ArtifactoryClientConfiguration.PublisherHandler configuration, final String propPrefix,
        final String... propTypes) {
    Map<String, String> filteredProperties = Maps.filterKeys(allParamMap, new Predicate<String>() {
        public boolean apply(String key) {
            if (StringUtils.isNotBlank(key)) {
                if (key.startsWith(propPrefix)) {
                    return true;
                }// w ww  .  j  a va2 s .  c o  m
                for (String propType : propTypes) {
                    if (key.startsWith(propType + propPrefix)) {
                        return true;
                    }
                }
            }
            return false;
        }
    });
    filteredProperties = Maps.filterValues(filteredProperties, new Predicate<String>() {
        public boolean apply(String value) {
            return StringUtils.isNotBlank(value);
        }
    });

    for (Map.Entry<String, String> entryToAdd : filteredProperties.entrySet()) {
        String key = entryToAdd.getKey();
        for (String propType : propTypes) {
            key = StringUtils.remove(key, propType);
        }
        key = StringUtils.remove(key, propPrefix);
        configuration.addMatrixParam(key, entryToAdd.getValue());
    }
}

From source file:com.isotrol.impe3.web20.impl.MembersServiceImpl.java

private MemberEntity fill(MemberEntity entity, MemberDTO dto) {
    final Calendar date = Calendar.getInstance();
    date.setTime(dto.getDate());//www  .  j  a  v  a2s. co  m
    entity.setDate(date);
    entity.setDisplayName(dto.getDisplayName());
    entity.setEmail(dto.getEmail());
    entity.setMemberCode(dto.getCode());
    entity.setName(dto.getName());
    entity.setBlocked(dto.isBlocked());

    // final Set<FavoriteEntity> favorites = entity.getFavorites();
    // TODO no se contemplan en el dto.

    final Set<String> profiles = entity.getProfiles();
    profiles.clear();
    final Set<String> dtopf = dto.getProfiles();
    if (dtopf != null) {
        profiles.addAll(Sets.filter(dtopf, notNull()));
    }
    final Map<String, String> properties = entity.getProperties();
    properties.clear();
    final Map<String, String> dtopr = dto.getProperties();
    if (dtopr != null) {
        properties.putAll(Maps.filterKeys(Maps.filterValues(dtopr, notNull()), notNull()));
    }

    return entity;
}

From source file:org.openqa.selenium.firefox.FirefoxDriver.java

/**
 * Drops capabilities that we shouldn't send over the wire.
 *
 * Used for capabilities which aren't BeanToJson-convertable, and are only used by the local
 * launcher./*from  w  ww  .ja  v  a2s  . c om*/
 */
private static Capabilities dropCapabilities(Capabilities capabilities, String... keysToRemove) {
    if (capabilities == null) {
        return new DesiredCapabilities();
    }
    final Set<String> toRemove = Sets.newHashSet(keysToRemove);
    DesiredCapabilities caps = new DesiredCapabilities(
            Maps.filterKeys(capabilities.asMap(), new Predicate<String>() {
                public boolean apply(String key) {
                    return !toRemove.contains(key);
                }
            }));

    // Ensure that the proxy is in a state fit to be sent to the extension
    Proxy proxy = Proxy.extractFrom(capabilities);
    if (proxy != null) {
        caps.setCapability(PROXY, new BeanToJsonConverter().convert(proxy));
    }

    return caps;
}