Example usage for com.google.common.collect Sets newLinkedHashSet

List of usage examples for com.google.common.collect Sets newLinkedHashSet

Introduction

In this page you can find the example usage for com.google.common.collect Sets newLinkedHashSet.

Prototype

public static <E> LinkedHashSet<E> newLinkedHashSet() 

Source Link

Document

Creates a mutable, empty LinkedHashSet instance.

Usage

From source file:org.apache.druid.query.aggregation.datasketches.hll.HllSketchUnionPostAggregator.java

@Override
public Set<String> getDependentFields() {
    final Set<String> dependentFields = Sets.newLinkedHashSet();
    for (final PostAggregator field : fields) {
        dependentFields.addAll(field.getDependentFields());
    }// w w w  .j av  a2s  . c  o m
    return dependentFields;
}

From source file:org.eclipse.emf.compare.egit.internal.match.EGitMatchEngineFactory.java

/**
 * {@inheritDoc}/*w w w  . j  a  v a 2 s.  c o m*/
 */
@Override
public IMatchEngine getMatchEngine() {
    final UseIdentifiers useUdentifier = getUseIdentifierValue(getConfiguration());
    final Collection<IResourceMatchingStrategy> strategies = Sets.newLinkedHashSet();
    strategies.add(new LocationMatchingStrategy());
    return DefaultMatchEngine.create(useUdentifier,
            EMFCompareRCPPlugin.getDefault().getWeightProviderRegistry(), strategies);
}

From source file:org.sonar.server.startup.EnableProfiles.java

private Set<String> getLanguageKeys() {
    Set<String> keys = Sets.newLinkedHashSet();
    for (Language language : languages) {
        keys.add(language.getKey());//w  ww.jav  a  2  s  . c  o  m
    }
    return keys;
}

From source file:org.eclipse.sirius.diagram.ui.business.internal.migration.CollapseSupportOnRegionMigrationParticipant.java

@SuppressWarnings("unchecked")
@Override//from   ww w . j a va 2  s.  c om
protected void postLoad(DAnalysis dAnalysis, Version loadedVersion) {
    if (loadedVersion.compareTo(MIGRATION_VERSION) < 0) {
        EClass drawerStyleClass = NotationPackage.eINSTANCE.getDrawerStyle();

        // Step 1: get all view to update
        final Collection<View> allViewsToUpdate = Sets.newLinkedHashSet();
        for (DView dView : dAnalysis.getOwnedViews()) {
            for (DDiagram dDiagram : Iterables.filter(dView.getOwnedRepresentations(), DDiagram.class)) {
                DiagramCreationUtil diagramCreationUtil = new DiagramCreationUtil(dDiagram);
                if (diagramCreationUtil.findAssociatedGMFDiagram()) {
                    Diagram gmfDiagram = diagramCreationUtil.getAssociatedGMFDiagram();
                    for (Node node : Lists.newArrayList(
                            Iterators.filter(Iterators.filter(gmfDiagram.eAllContents(), Node.class),
                                    new IsRegionCompartmentPredicate()))) {
                        Style style = node.getStyle(drawerStyleClass);
                        if (style == null) {
                            allViewsToUpdate.add(node);
                        }
                    }
                }
            }
        }

        // Step 2: update views
        for (View viewToUpdate : allViewsToUpdate) {
            DrawerStyle style = (DrawerStyle) drawerStyleClass.getEPackage().getEFactoryInstance()
                    .create(drawerStyleClass);
            style.setCollapsed(false);
            viewToUpdate.getStyles().add(style);
        }
    }
}

From source file:org.eclipse.elk.alg.layered.compaction.oned.CGroup.java

/**
 * The constructor for a {@link CGroup} receives {@link CNode}s to group.
 * // ww w . j  av a2s  .  com
 * @param inputCNodes
 *            the {@link CNode}s to add
 */
public CGroup(final CNode... inputCNodes) {
    cNodes = Sets.newLinkedHashSet();
    incomingConstraints = Sets.newHashSet();
    outDegree = 0;
    for (CNode cNode : inputCNodes) {
        if (reference == null) {
            reference = cNode;
        }
        addCNode(cNode);
    }
}

From source file:com.github.djabry.platform.service.security.DefaultPermissionMapper.java

@Override
public Set<Permission> mapPermissions(Role role) {
    Set<Permission> permissions = Sets.newLinkedHashSet();
    Set<Permission> foundPermissions = m.get(role);

    if (foundPermissions != null) {
        permissions.addAll(foundPermissions);
    }/*from  w  w w  .  ja  va2  s .  c  om*/

    return permissions;
}

From source file:org.gradle.api.internal.tasks.compile.incremental.classpath.ClasspathSnapshotFactory.java

private Set<CreateSnapshot> snapshotAll(final Iterable<File> entries) {
    final Set<CreateSnapshot> snapshotOperations = Sets.newLinkedHashSet();

    buildOperationExecutor.runAll(new Action<BuildOperationQueue<CreateSnapshot>>() {
        @Override/* www. j  ava2  s .  com*/
        public void execute(BuildOperationQueue<CreateSnapshot> buildOperationQueue) {
            for (File entry : entries) {
                CreateSnapshot operation = new CreateSnapshot(entry);
                snapshotOperations.add(operation);
                buildOperationQueue.add(operation);
            }
        }
    });
    return snapshotOperations;
}

From source file:com.textocat.textokit.commons.util.TrainDevTestCorpusSplitter.java

private void run() throws Exception {
    IOFileFilter corpusFileFilter;//www  .  ja v  a 2s  .c o m
    if (corpusFileSuffix == null) {
        corpusFileFilter = FileFilterUtils.trueFileFilter();
    } else {
        corpusFileFilter = FileFilterUtils.suffixFileFilter(corpusFileSuffix);
    }
    IOFileFilter corpusSubDirFilter = includeSubDirectores ? TrueFileFilter.INSTANCE : null;
    List<Set<File>> partitions = Lists.newArrayList(CorpusUtils.partitionCorpusByFileSize(corpusDir,
            corpusFileFilter, corpusSubDirFilter, partitionsNum));
    if (partitions.size() != partitionsNum) {
        throw new IllegalStateException();
    }
    // make dev partition from the last because it is a little bit smaller
    Set<File> devFiles = getAndRemove(partitions, partitions.size() - 1);
    Set<File> testFiles = getAndRemove(partitions, partitions.size() - 1);
    Set<File> trainFiles = Sets.newLinkedHashSet();
    for (Set<File> s : partitions) {
        trainFiles.addAll(s);
    }
    // write files
    File devPartFile = new File(outputDir, CorpusUtils.getDevPartitionFilename(0));
    FileUtils.writeLines(devPartFile, "utf-8", CorpusUtils.toRelativePaths(corpusDir, devFiles));
    File testPartFile = new File(outputDir, CorpusUtils.getTestPartitionFilename(0));
    FileUtils.writeLines(testPartFile, "utf-8", CorpusUtils.toRelativePaths(corpusDir, testFiles));
    File trainPartFile = new File(outputDir, CorpusUtils.getTrainPartitionFilename(0));
    FileUtils.writeLines(trainPartFile, "utf-8", CorpusUtils.toRelativePaths(corpusDir, trainFiles));
}

From source file:org.apache.nutch.api.impl.db.DbPageConverter.java

private static Set<Field> filterFields(WebPage page, Set<String> queryFields) {
    List<Field> pageFields = page.getSchema().getFields();
    if (CollectionUtils.isEmpty(queryFields)) {
        return Sets.newHashSet(pageFields);
    }//from  www.j av  a  2 s.c  om

    Set<Field> filteredFields = Sets.newLinkedHashSet();
    for (Field field : pageFields) {
        if (queryFields.contains(field.name())) {
            filteredFields.add(field);
        }
    }
    return filteredFields;
}

From source file:org.xpect.util.IssueVisualizer.java

public String visualize(String documentString, Collection<Issue> issues) {
    Text document = new Text(documentString);
    @SuppressWarnings("unchecked")
    List<Issue>[] mapped = new List[document.length()];
    List<Issue> unmapped = Lists.newArrayList();
    for (Issue issue : issues) {
        if (issue.getOffset() != null && issue.getLength() != null && issue.getOffset() >= 0
                && issue.getOffset() < document.length() && issue.getLength() > 0) {
            int max = Math.min(issue.getOffset() + issue.getLength(), document.length());
            for (int i = issue.getOffset(); i < max; i++) {
                if (mapped[i] == null)
                    mapped[i] = Lists.newArrayList(issue);
                else
                    mapped[i].add(issue);
            }//from   w w  w.  jav  a2 s. c  o m
        } else
            unmapped.add(issue);
    }
    StringBuffer result = new StringBuffer();
    for (Issue issue : unmapped) {
        if (result.length() > 0)
            result.append(document.getNL());
        result.append(issueToString(issue));
    }
    boolean first = true;
    int offset = 0;
    for (String line : document.splitIntoLines()) {
        if (first)
            first = false;
        else
            result.append(document.getNL());
        result.append(line);
        boolean lineHasIssue = false;
        for (int i = offset; i < offset + line.length(); i++)
            if (mapped[i] != null)
                lineHasIssue = true;
        if (lineHasIssue) {
            result.append(document.getNL());
            Set<Issue> lineIssues = Sets.newLinkedHashSet();
            for (int i = offset; i < offset + line.length(); i++)
                if (mapped[i] != null) {
                    int mark = 0;
                    lineIssues.addAll(mapped[i]);
                    for (Issue issue : mapped[i])
                        mark += 1 << Iterables.indexOf(lineIssues, Predicates.equalTo(issue));
                    result.append(Integer.toHexString(mark));
                } else
                    result.append(document.charAt(i) == '\t' ? "\t" : " ");
            for (Issue issue : lineIssues) {
                int id = 1 << Iterables.indexOf(lineIssues, Predicates.equalTo(issue));
                result.append(document.getNL());
                result.append(id);
                result.append(": ");
                result.append(issueToString(issue));
            }
        }
        offset += line.length() + document.currentLineEndLenght(offset);
    }
    return result.toString();
}