Example usage for com.google.common.collect Iterables skip

List of usage examples for com.google.common.collect Iterables skip

Introduction

In this page you can find the example usage for com.google.common.collect Iterables skip.

Prototype

public static <T> Iterable<T> skip(final Iterable<T> iterable, final int numberToSkip) 

Source Link

Document

Returns a view of iterable that skips its first numberToSkip elements.

Usage

From source file:org.obm.imap.archive.services.SharedMailboxesProcessor.java

private Predicate<ListInfo> filterFolders(final String... folders) {
    return new Predicate<ListInfo>() {

        @Override/*from   www.  ja  v  a2  s .c  om*/
        public boolean apply(ListInfo listInfo) {
            String path = listInfo.getName();
            if (!path.contains(String.valueOf(MailboxPaths.IMAP_FOLDER_SEPARATOR))) {
                return true;
            }
            String subPath = Joiner.on(MailboxPaths.IMAP_FOLDER_SEPARATOR)
                    .join(Iterables.skip(Splitter.on(MailboxPaths.IMAP_FOLDER_SEPARATOR).split(path), 1));
            for (String folder : folders) {
                if (subPath.startsWith(folder + MailboxPaths.IMAP_FOLDER_SEPARATOR)) {
                    return false;
                }
            }
            return true;
        }
    };
}

From source file:com.slimgears.slimprefs.apt.ClassBindingGenerator.java

@Override
protected void build(TypeSpec.Builder builder, TypeElement type, TypeElement... interfaces) {
    builder.addField(FieldSpec/*from w w  w . j  a v  a2s.  c  om*/
            .builder(ParameterizedTypeName.get(ClassName.get(ClassBinding.class), targetTypeName), "INSTANCE")
            .addModifiers(Modifier.STATIC, Modifier.PUBLIC, Modifier.FINAL)
            .initializer("new $T()", getTypeName()).build());
    builder.addMethod(MethodSpec.constructorBuilder().addModifiers(Modifier.PRIVATE).build());
    MethodSpec.Builder bindMethodBuilder = MethodSpec.methodBuilder("bind").returns(PreferenceBinding.class)
            .addParameter(PreferenceProvider.class, "provider")
            .addParameter(targetTypeName, "target", Modifier.FINAL).addAnnotation(Override.class)
            .addModifiers(Modifier.PUBLIC).addCode("return $T.create(\n", CompositePreferenceBinding.class);

    for (BindingDescriptor binding : Iterables.limit(bindings, bindings.size() - 1)) {
        bindMethodBuilder.addCode(binding.build()).addCode(",\n");
    }

    for (BindingDescriptor binding : Iterables.skip(bindings, bindings.size() - 1)) {
        bindMethodBuilder.addCode(binding.build());
    }

    bindMethodBuilder.addCode(");\n");
    builder.addMethod(bindMethodBuilder.build());
}

From source file:nextmethod.web.razor.parser.JavaCodeParserStatements.java

protected void usingDeclaration() {
    // Set block type to directive
    getContext().getCurrentBlock().setType(BlockType.Directive);

    // Parse a type name
    doAssert(JavaSymbolType.Identifier);
    packageOrTypeName();//from  ww  w.j a  va  2 s.  c  om
    final Iterable<JavaSymbol> ws = readWhile(isSpacingToken(true, true));
    if (at(JavaSymbolType.Assign)) {
        // Alias
        accept(ws);
        doAssert(JavaSymbolType.Assign);
        acceptAndMoveNext();

        acceptWhile(isSpacingToken(true, true));

        // One more package or type name
        packageOrTypeName();
    } else {
        putCurrentBack();
        putBack(ws);
    }

    final SpanBuilder span = getSpan();
    span.getEditHandler().setAcceptedCharacters(AcceptedCharacters.AnyExceptNewLine);
    span.setCodeGenerator(new AddImportCodeGenerator(SymbolExtensions.getContent(span, input1 -> {
        if (input1 == null)
            return null;
        return Iterables.skip(input1, 1);
    }).toString(), SyntaxConstants.Java.UsingKeywordLength));

    // Optional ";"
    if (ensureCurrent()) {
        optional(JavaSymbolType.Semicolon);
    }
}

From source file:com.facebook.buck.cxx.Depfiles.java

public static int parseAndWriteBuckCompatibleDepfile(ExecutionContext context, ProjectFilesystem filesystem,
        HeaderPathNormalizer headerPathNormalizer, HeaderVerification headerVerification, Path sourceDepFile,
        Path destDepFile, Path inputPath, Path outputPath) throws IOException {
    // Process the dependency file, fixing up the paths, and write it out to it's final location.
    // The paths of the headers written out to the depfile are the paths to the symlinks from the
    // root of the repo if the compilation included them from the header search paths pointing to
    // the symlink trees, or paths to headers relative to the source file if the compilation
    // included them using source relative include paths. To handle both cases we check for the
    // prerequisites both in the values and the keys of the replacement map.
    Logger.get(Depfiles.class).debug("Processing dependency file %s as Makefile", sourceDepFile);
    ImmutableMap<String, Object> params = ImmutableMap.of("input", inputPath, "output", outputPath);
    try (InputStream input = filesystem.newFileInputStream(sourceDepFile);
            BufferedReader reader = new BufferedReader(new InputStreamReader(input));
            OutputStream output = filesystem.newFileOutputStream(destDepFile);
            BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(output));
            SimplePerfEvent.Scope perfEvent = SimplePerfEvent.scope(context.getBuckEventBus(),
                    PerfEventId.of("depfile-parse"), params)) {
        ImmutableList<String> prereqs = Depfiles.parseDepfile(reader).getPrereqs();

        // Additional files passed in via command-line flags (e.g. `-fsanitize-blacklist=<file>`)
        // appear first in the dep file, followed by the input source file.  So, just skip over
        // everything until just after the input source which should position us at the headers.
        ////  w w w. j a  v a2s .  co m
        // TODO(#11303454): This means we're not including the content of these special files into the
        // rule key.  The correct way to handle this is likely to support macros in preprocessor/
        // compiler flags at which point we can use the entries for these files in the depfile to
        // verify that the user properly references these files via the macros.
        int inputIndex = prereqs.indexOf(inputPath.toString());
        Preconditions.checkState(inputIndex != -1, "Could not find input source (%s) in dep file prereqs (%s)",
                inputPath, prereqs);
        Iterable<String> headers = Iterables.skip(prereqs, inputIndex + 1);

        for (String rawHeader : headers) {
            Path header = Paths.get(rawHeader).normalize();
            Optional<Path> absolutePath = headerPathNormalizer.getAbsolutePathForUnnormalizedPath(header);
            if (absolutePath.isPresent()) {
                Preconditions.checkState(absolutePath.get().isAbsolute());
                writer.write(absolutePath.get().toString());
                writer.newLine();
            } else if (headerVerification.getMode() != HeaderVerification.Mode.IGNORE
                    && !headerVerification.isWhitelisted(header.toString())) {
                context.getBuckEventBus()
                        .post(ConsoleEvent.create(
                                headerVerification.getMode() == HeaderVerification.Mode.ERROR ? Level.SEVERE
                                        : Level.WARNING,
                                "%s: included an untracked header \"%s\"", inputPath, header));
                if (headerVerification.getMode() == HeaderVerification.Mode.ERROR) {
                    return 1;
                }
            }
        }
    }
    return 0;
}

From source file:edu.udo.scaffoldhunter.model.util.Subsets.java

/**
 * Calculated the lowest common ancestor of all given subsets.
 * //  w  ww.  ja  va 2  s  . co m
 * @param subsets
 *            the given subsets.
 * 
 * @return the lowest common ancestor of all the given subsets.
 */
public static Subset getLowestCommonAncestor(Iterable<Subset> subsets) {
    List<Subset> ancestors = Lists.newArrayList(getAncestors(Iterables.get(subsets, 0)));

    for (Subset s : Iterables.skip(subsets, 1)) {
        ancestors.retainAll(getAncestors(s));
    }

    if (ancestors.size() == 0) {
        throw new IllegalArgumentException(
                "The subsets do not have a common ancestor. " + "They are not in a common subset tree.");
    }

    return ancestors.get(0);
}

From source file:com.twitter.aurora.scheduler.http.SchedulerzJob.java

private static <T> Iterable<T> offsetAndLimit(Iterable<T> iterable, int offset) {
    return ImmutableList.copyOf(Iterables.limit(Iterables.skip(iterable, offset), PAGE_SIZE));
}

From source file:com.google.gerrit.server.git.GroupCollector.java

public void visit(RevCommit c) {
    checkState(!done, "visit() called after getGroups()");
    Set<RevCommit> interestingParents = getInterestingParents(c);

    if (interestingParents.size() == 0) {
        // All parents are uninteresting: treat this commit as the root of a new
        // group of related changes.
        groups.put(c, c.name());/*from w  w w  .j  ava 2s  .  c om*/
        return;
    } else if (interestingParents.size() == 1) {
        // Only one parent is new in this push. If it is the only parent, just use
        // that parent's group. If there are multiple parents, perhaps this commit
        // is a merge of a side branch. This commit belongs in that parent's group
        // in that case.
        groups.putAll(c, groups.get(interestingParents.iterator().next()));
        return;
    }

    // Multiple parents, merging at least two branches containing new commits in
    // this push.
    Set<String> thisCommitGroups = new TreeSet<>();
    Set<String> parentGroupsNewInThisPush = Sets.newLinkedHashSetWithExpectedSize(interestingParents.size());
    for (RevCommit p : interestingParents) {
        Collection<String> parentGroups = groups.get(p);
        if (parentGroups.isEmpty()) {
            throw new IllegalStateException(
                    String.format("no group assigned to parent %s of commit %s", p.name(), c.name()));
        }

        for (String parentGroup : parentGroups) {
            if (isGroupFromExistingPatchSet(p, parentGroup)) {
                // This parent's group is from an existing patch set, i.e. the parent
                // not new in this push. Use this group for the commit.
                thisCommitGroups.add(parentGroup);
            } else {
                // This parent's group is new in this push.
                parentGroupsNewInThisPush.add(parentGroup);
            }
        }
    }

    Iterable<String> toAlias;
    if (thisCommitGroups.isEmpty()) {
        // All parent groups were new in this push. Pick the first one and alias
        // other parents' groups to this first parent.
        String firstParentGroup = parentGroupsNewInThisPush.iterator().next();
        thisCommitGroups = ImmutableSet.of(firstParentGroup);
        toAlias = Iterables.skip(parentGroupsNewInThisPush, 1);
    } else {
        // For each parent group that was new in this push, alias it to the actual
        // computed group(s) for this commit.
        toAlias = parentGroupsNewInThisPush;
    }
    groups.putAll(c, thisCommitGroups);
    for (String pg : toAlias) {
        groupAliases.putAll(pg, thisCommitGroups);
    }
}

From source file:org.apache.jackrabbit.oak.remote.content.ContentRemoteTree.java

private Iterable<Tree> getFilteredChildren() {
    Iterable<Tree> result = tree.getChildren();

    if (filters.getChildrenStart() > 0) {
        result = Iterables.skip(result, filters.getChildrenStart());
    }// w  w w. ja  v a2  s  .  c om

    if (filters.getChildrenCount() >= 0) {
        result = Iterables.limit(result, filters.getChildrenCount());
    }

    return Iterables.filter(result, getNodeFilters());
}

From source file:org.apache.hadoop.yarn.webapp.hamlet.HamletImpl.java

/**
 * Parse selector into id and classes/*  www  .  j a  v  a  2s  . c om*/
 * @param selector in the form of (#id)?(.class)*
 * @return an two element array [id, "space-separated classes"].
 *         Either element could be null.
 * @throws WebAppException when both are null or syntax error.
 */
public static String[] parseSelector(String selector) {
    String[] result = new String[] { null, null };
    Iterable<String> rs = SS.split(selector);
    Iterator<String> it = rs.iterator();
    if (it.hasNext()) {
        String maybeId = it.next();
        if (maybeId.charAt(0) == '#') {
            result[S_ID] = maybeId.substring(1);
            if (it.hasNext()) {
                result[S_CLASS] = SJ.join(Iterables.skip(rs, 1));
            }
        } else {
            result[S_CLASS] = SJ.join(rs);
        }
        return result;
    }
    throw new WebAppException("Error parsing selector: " + selector);
}

From source file:org.knime.core.node.defaultnodesettings.DialogComponentRapidMinerProject.java

/**
 * Generates the input repository locations with the KNIME inputs.
 * //w  w w  . j  a va 2  s.  c  o  m
 * @param process
 *            The {@link Process} where the special locations will be used.
 * @param hasTableSpec
 *            The content with {@link DataTableSpec}s.
 * @return The locations for the KNIME input sources.
 */
public static List<String> generateLocations(final Process process, final HasTableSpecAndRowId hasTableSpec) {
    final ArrayList<String> newLocations = Lists
            .newArrayList(Lists.transform(Zip.zipWithIndexList(hasTableSpec.getFilteredTableSpecs(), 1),
                    new Function<Entry<DataTableSpec, Integer>, String>() {
                        @Override
                        public String apply(final Entry<DataTableSpec, Integer> input) {
                            return "//" + KnimeRepository.KNIME + "/"
                                    + KnimeRepository.KnimeIOObjectEntry.KNIME_TABLE + input.getValue();
                        }
                    }));
    newLocations.addAll(Lists.newArrayList(
            Iterables.skip(process.getContext().getInputRepositoryLocations(), newLocations.size())));
    return newLocations;
}