Example usage for com.google.common.collect Collections2 transform

List of usage examples for com.google.common.collect Collections2 transform

Introduction

In this page you can find the example usage for com.google.common.collect Collections2 transform.

Prototype

public static <F, T> Collection<T> transform(Collection<F> fromCollection, Function<? super F, T> function) 

Source Link

Document

Returns a collection that applies function to each element of fromCollection .

Usage

From source file:de.flapdoodle.logparser.buildsupport.ExtractMarkdownFromCode.java

private static Collection<String> resolveResources(String sourceFileName, Collection<String> lines)
        throws IOException {
    List<String> ret = Lists.newArrayList();
    String includeTag = "// @resource";

    for (String line : lines) {
        int includeDefIdx = line.indexOf(includeTag);
        if (includeDefIdx != -1) {
            String fileName = line.substring(includeDefIdx + includeTag.length()).trim();
            String basePath = sourceFileName.substring(0, sourceFileName.lastIndexOf('/') + 1);
            basePath = basePath.replaceAll("src/test/java", "src/test/resources");
            String includeFilename = basePath + fileName;

            System.out.println("Resource: " + includeFilename);

            List<String> includedLines = Files.readLines(new File(includeFilename), Charsets.UTF_8);
            Collection<String> shiftedLines = Collections2.transform(includedLines,
                    new Function<String, String>() {

                        @Override
                        public String apply(String input) {
                            return "\t\t" + input;
                        }//  w  w  w  .ja  v  a  2  s  .c  o m
                    });
            ret.addAll(shiftedLines);
        } else {
            ret.add(line);
        }
    }
    return ret;
}

From source file:org.opendaylight.netconf.impl.osgi.NetconfSessionMonitoringService.java

synchronized Sessions getSessions() {
    final Collection<Session> managementSessions = Collections2.transform(sessions,
            NetconfManagementSession::toManagementSession);
    return new SessionsBuilder().setSession(ImmutableList.copyOf(managementSessions)).build();
}

From source file:com.ning.billing.invoice.api.invoice.DefaultInvoicePaymentApi.java

@Override
public List<InvoicePayment> getInvoicePayments(final UUID paymentId, final TenantContext context) {
    return ImmutableList.<InvoicePayment>copyOf(Collections2.transform(
            dao.getInvoicePayments(paymentId, internalCallContextFactory.createInternalTenantContext(context)),
            new Function<InvoicePaymentModelDao, InvoicePayment>() {
                @Override/*from   w ww  . j a  v a  2  s.  com*/
                public InvoicePayment apply(final InvoicePaymentModelDao input) {
                    return new DefaultInvoicePayment(input);
                }
            }));
}

From source file:com.dangdang.ddframe.job.cloud.state.ready.ReadyService.java

/**
 * ?./* www. j av a  2 s .com*/
 *
 * @param ineligibleJobContexts 
 * @return ?
 */
public Collection<JobContext> getAllEligibleJobContexts(final Collection<JobContext> ineligibleJobContexts) {
    if (!regCenter.isExisted(ReadyNode.ROOT)) {
        return Collections.emptyList();
    }
    Collection<String> ineligibleJobNames = Collections2.transform(ineligibleJobContexts,
            new Function<JobContext, String>() {

                @Override
                public String apply(final JobContext input) {
                    return input.getJobConfig().getJobName();
                }
            });
    List<String> uniqueNames = regCenter.getChildrenKeys(ReadyNode.ROOT);
    List<JobContext> result = new ArrayList<>(uniqueNames.size());
    Set<String> assignedJobNames = new HashSet<>(uniqueNames.size(), 1);
    for (String each : uniqueNames) {
        String jobName = UniqueJob.from(each).getJobName();
        if (assignedJobNames.contains(jobName) || ineligibleJobNames.contains(jobName)) {
            continue;
        }
        Optional<CloudJobConfiguration> jobConfig = configService.load(jobName);
        if (!jobConfig.isPresent()) {
            regCenter.remove(ReadyNode.getReadyJobNodePath(each));
            continue;
        }
        if (runningService.isJobRunning(jobName)
                && jobConfig.get().getTypeConfig().getCoreConfig().isMisfire()) {
            misfiredService.add(jobName);
            continue;
        }
        result.add(JobContext.from(jobConfig.get(), ExecutionType.READY));
        assignedJobNames.add(jobName);
    }
    return result;
}

From source file:de.cosmocode.palava.workqueue.ConfigurableDelayQueue.java

@Override
public <T> T[] toArray(T[] a) {
    return Collections2.transform(queue, extractor).toArray(a);
}

From source file:br.com.objectos.comuns.relational.jdbc.JdbcConfigure.java

private Collection<String> getStrings(Class<?> keyClass) {
    Collection<Element> els = sql.getElements(keyClass);
    return Collections2.transform(els, Functions.toStringFunction());
}

From source file:edu.umn.msi.tropix.persistence.service.impl.FolderServiceImpl.java

private Collection<TropixObjectContext<VirtualFolder>> buildSharedFolderContexts(
        final Collection<VirtualFolder> virtualFolders, final String gridId) {
    final Multimap<String, String> objectsRoles = getTropixObjectDao().getRoles(gridId,
            ModelUtils.getIds(virtualFolders));
    return Collections2.transform(Collections2.filter(virtualFolders, new Predicate<TropixObject>() {

        public boolean apply(final TropixObject input) {
            return objectsRoles.containsKey(input.getId())
                    && ModelPredicates.isValidObjectPredicate().apply(input);
        }//from   w ww .  ja  v a 2s.c  o m

    }), new Function<VirtualFolder, TropixObjectContext<VirtualFolder>>() {

        public TropixObjectContext<VirtualFolder> apply(final VirtualFolder input) {
            final Collection<String> objectRoles = objectsRoles.get(input.getId());
            final TropixObjectUserAuthorities context = new TropixObjectUserAuthorities(
                    objectRoles.contains("write"), objectRoles.contains("write"));
            return new TropixObjectContext<VirtualFolder>(context, input);
        }

    });

}

From source file:org.opendaylight.controller.md.statistics.manager.FlowCapableTracker.java

@Override
public synchronized void onDataChanged(final DataChangeEvent<InstanceIdentifier<?>, DataObject> change) {
    logger.debug("Tracker at root {} processing notification", root);

    /*/*  w w w .ja  v a 2 s. c  om*/
     * First process all the identifiers which were removed, trying to figure out
     * whether they constitute removal of FlowCapableNode.
     */
    final Collection<NodeKey> removedNodes = Collections2
            .filter(Collections2.transform(Sets.filter(change.getRemovedOperationalData(), filterIdentifiers),
                    new Function<InstanceIdentifier<?>, NodeKey>() {
                        @Override
                        public NodeKey apply(final InstanceIdentifier<?> input) {
                            final NodeKey key = input.firstKeyOf(Node.class, NodeKey.class);
                            if (key == null) {
                                // FIXME: do we have a backup plan?
                                logger.info("Failed to extract node key from {}", input);
                            }
                            return key;
                        }
                    }), Predicates.notNull());
    stats.stopNodeHandlers(removedNodes);

    final Collection<NodeKey> addedNodes = Collections2.filter(
            Collections2.transform(Sets.filter(change.getCreatedOperationalData().keySet(), filterIdentifiers),
                    new Function<InstanceIdentifier<?>, NodeKey>() {
                        @Override
                        public NodeKey apply(final InstanceIdentifier<?> input) {
                            final NodeKey key = input.firstKeyOf(Node.class, NodeKey.class);
                            if (key == null) {
                                // FIXME: do we have a backup plan?
                                logger.info("Failed to extract node key from {}", input);
                            }
                            return key;
                        }
                    }),
            Predicates.notNull());
    stats.startNodeHandlers(addedNodes);

    logger.debug("Tracker at root {} finished processing notification", root);
}

From source file:com.addthis.bundle.core.BundleMapView.java

@Override
public Collection<ValueObject> values() {
    return Collections2.transform(keySet(), bundle::getValue);
}

From source file:org.jetbrains.jet.lang.types.BoundsSubstitutor.java

@NotNull
private static List<TypeParameterDescriptor> getTypeParametersFromUpperBounds(
        @NotNull TypeParameterDescriptor current, @NotNull final List<TypeParameterDescriptor> typeParameters) {
    return DFS.dfs(current.getUpperBounds(), new DFS.Neighbors<JetType>() {
        @NotNull/* www.  jav a  2s. co  m*/
        @Override
        public Iterable<JetType> getNeighbors(JetType current) {
            return Collections2.transform(current.getArguments(), PROJECTIONS_TO_TYPES);
        }
    }, new DFS.NodeHandlerWithListResult<JetType, TypeParameterDescriptor>() {
        @Override
        public boolean beforeChildren(JetType current) {
            ClassifierDescriptor declarationDescriptor = current.getConstructor().getDeclarationDescriptor();
            // typeParameters in a list, but it contains very few elements, so it's fine to call contains() on it
            //noinspection SuspiciousMethodCalls
            if (typeParameters.contains(declarationDescriptor)) {
                result.add((TypeParameterDescriptor) declarationDescriptor);
            }

            return true;
        }
    });
}