Example usage for com.google.common.collect Sets newLinkedHashSet

List of usage examples for com.google.common.collect Sets newLinkedHashSet

Introduction

In this page you can find the example usage for com.google.common.collect Sets newLinkedHashSet.

Prototype

public static <E> LinkedHashSet<E> newLinkedHashSet(Iterable<? extends E> elements) 

Source Link

Document

Creates a mutable LinkedHashSet instance containing the given elements in order.

Usage

From source file:com.github.ferstl.maven.pomenforcers.priority.CompoundPriorityOrdering.java

private CompoundPriorityOrdering(Iterable<F> artifactElements) {
    this.orderBy = Sets.newLinkedHashSet(artifactElements);
    this.priorityMap = LinkedHashMultimap.create();
}

From source file:org.jclouds.tmrk.enterprisecloud.domain.Actions.java

private Actions(Set<Action> actions) {
    this.actions = Sets.newLinkedHashSet(actions);
}

From source file:org.prebake.service.bake.Finisher.java

ImmutableList<Path> moveToRepo(BoundName productName, Path workingDir, final Set<Path> workingDirInputs,
        ImmutableGlobSet toCopyBack) throws IOException {
    // TODO: HIGH: respect the ignorable pattern for outPaths.

    // Compute the list of files under the working directory that match a
    // product's output globs.
    ImmutableList<Path> outPaths = WorkingDir.matching(workingDir, workingDirInputs, toCopyBack);
    // Compute the set of files that are already in the client directory.
    ImmutableList<Path> existingPaths = Baker.sortedFilesMatching(files, toCopyBack);

    Set<Path> newPaths = Sets.newLinkedHashSet(outPaths);
    newPaths.removeAll(existingPaths);//from  w w  w  .  j ava  2  s .c o  m

    Set<Path> obsoletedPaths = Sets.newLinkedHashSet(existingPaths);
    obsoletedPaths.removeAll(outPaths);

    logger.log(Level.FINE, "{0} produced {1} file(s) : {2} new, {3} obsolete.",
            new Object[] { productName, outPaths.size(), newPaths.size(), obsoletedPaths.size() });

    final Path clientRoot = files.getVersionRoot();

    // Create directories for the new paths
    for (Path p : newPaths) {
        Baker.mkdirs(clientRoot.resolve(p).getParent(), umask);
    }

    // Move the obsoleted files into the archive.
    if (!obsoletedPaths.isEmpty()) {
        Path archiveDir = clientRoot.resolve(FileNames.DIR).resolve(FileNames.ARCHIVE);
        for (Path p : obsoletedPaths) {
            Path obsoleteDest = archiveDir.resolve(p);
            logger.log(Level.FINE, "Archived {0}", obsoleteDest);
            Baker.mkdirs(obsoleteDest.getParent(), umask);
            Path clientFile = clientRoot.resolve(p);
            try {
                clientFile.moveTo(obsoleteDest, StandardCopyOption.REPLACE_EXISTING);
            } catch (IOException ex) {
                // Junk can accumulate under the archive dir.
                // Specifically, a directory could be archived, and then all attempts
                // to archive a regular file of the same name would file.
                LogRecord r = new LogRecord(Level.WARNING, "Failed to archive {0}");
                r.setParameters(new Object[] { obsoleteDest });
                r.setThrown(ex);
                logger.log(r);
                clientFile.deleteIfExists();
            }
        }
        logger.log(Level.INFO, "{0} obsolete file(s) can be found under {1}",
                new Object[] { obsoletedPaths.size(), archiveDir });
    }

    ImmutableList.Builder<Path> outClientPaths = ImmutableList.builder();
    for (Path p : outPaths) {
        Path working = workingDir.resolve(p);
        Path client = clientRoot.resolve(p);
        working.moveTo(client, StandardCopyOption.REPLACE_EXISTING);
        outClientPaths.add(client);
    }

    return outClientPaths.build();
}

From source file:org.jclouds.tmrk.enterprisecloud.domain.network.Nics.java

private Nics(Set<VirtualNic> nics) {
    this.nics = Sets.newLinkedHashSet(nics);
}

From source file:org.jclouds.tmrk.enterprisecloud.domain.Links.java

private Links(Set<Link> links) {
    this.links = Sets.newLinkedHashSet(links);
}

From source file:org.jclouds.tmrk.enterprisecloud.domain.network.IpAddresses.java

private IpAddresses(Set<IpAddress> addresses) {
    this.addresses = Sets.newLinkedHashSet(addresses);
}

From source file:com.textocat.textokit.postagger.opennlp.FeatureExtractorsBasedContextGenerator.java

public FeatureExtractorsBasedContextGenerator(int prevTagsInHistory, List<FeatureExtractor1> featureExtractors,
        Iterable<String> targetGramCategories, MorphDictionary morphDict) {
    this.prevTagsInHistory = prevTagsInHistory;
    this.featureExtractors = ImmutableList.copyOf(featureExtractors);
    this.targetGramCategories = Sets.newLinkedHashSet(targetGramCategories);
    this.morphDict = morphDict;
    if (this.morphDict != null) {
        dictContextGen = new DictionaryBasedContextGenerator(targetGramCategories, morphDict);
    }//from  w  w  w.  ja  v a  2s .  co m
}

From source file:org.eclipse.sirius.business.internal.query.DAnalysisesInternalQuery.java

private void addAllReferencedAnalyses(final Collection<DAnalysis> analysisAndReferenced,
        final DAnalysis analysis) {
    for (final DAnalysis referenced : Sets.newLinkedHashSet(analysis.getReferencedAnalysis())) {
        if (!analysisAndReferenced.contains(referenced) && referenced.eResource() != null) {
            analysisAndReferenced.add(referenced);
            addAllReferencedAnalyses(analysisAndReferenced, referenced);
        }/*from  w w w.  ja va  2s.  c  o  m*/
    }
}

From source file:org.richfaces.cdk.templatecompiler.el.types.ComplexType.java

@Override
public Iterable<JavaImport> getRequiredImports() {
    Iterable<JavaImport> imports = Iterables
            .concat(Iterables.transform(Arrays.asList(typeArguments), RequireImports.IMPORTS_TRANSFORM));
    Set<JavaImport> result = Sets
            .newLinkedHashSet(Iterables.concat(clearComponentType.getRequiredImports(), imports));
    return result;
}

From source file:de.marx_labs.utilities.common.searchtree.MapSearchTree.java

@Override
public Set<T> newest(int size) {
    // SortedMap<Long, T> matchMap = this.tailMap(lastKey(), true);

    Long key = lastKey();/* w  w  w .j a v a 2  s  .c  om*/
    Set<T> matches = new LinkedHashSet<T>();
    for (int i = 0; i < size; i++) {
        matches.add(get(key));
        key = lowerKey(key);

        if (key == null) {
            break;
        }
    }

    List<T> reverseList = Lists.reverse(Lists.newArrayList(matches));

    return Sets.newLinkedHashSet(reverseList);
    // return matches;
}