Example usage for com.google.common.collect Table contains

List of usage examples for com.google.common.collect Table contains

Introduction

In this page you can find the example usage for com.google.common.collect Table contains.

Prototype

boolean contains(@Nullable Object rowKey, @Nullable Object columnKey);

Source Link

Document

Returns true if the table contains a mapping with the specified row and column keys.

Usage

From source file:umd.twittertools.download.DataForHua.java

@SuppressWarnings("static-access")
public static void main(String[] args) throws Exception {
    Options options = new Options();

    options.addOption(OptionBuilder.withArgName("string").hasArg().withDescription("host").create(HOST_OPTION));
    options.addOption(OptionBuilder.withArgName("port").hasArg().withDescription("port").create(PORT_OPTION));
    options.addOption(OptionBuilder.withArgName("file").hasArg()
            .withDescription("file containing topics in TREC format").create(QUERIES_OPTION));
    options.addOption(/*  w  ww . j a  v  a2 s .  c  o m*/
            OptionBuilder.withArgName("string").hasArg().withDescription("qrels file").create(QRELS_OPTION));
    options.addOption(OptionBuilder.withArgName("num").hasArg().withDescription("number of results to return")
            .create(NUM_RESULTS_OPTION));
    options.addOption(
            OptionBuilder.withArgName("string").hasArg().withDescription("group id").create(GROUP_OPTION));
    options.addOption(
            OptionBuilder.withArgName("string").hasArg().withDescription("access token").create(TOKEN_OPTION));
    options.addOption(
            OptionBuilder.withArgName("string").hasArg().withDescription("runtag").create(RUNTAG_OPTION));
    options.addOption(new Option(VERBOSE_OPTION, "print out complete document"));

    CommandLine cmdline = null;
    CommandLineParser parser = new GnuParser();
    try {
        cmdline = parser.parse(options, args);
    } catch (ParseException exp) {
        System.err.println("Error parsing command line: " + exp.getMessage());
        System.exit(-1);
    }

    if (!cmdline.hasOption(HOST_OPTION) || !cmdline.hasOption(PORT_OPTION) || !cmdline.hasOption(QUERIES_OPTION)
            || !cmdline.hasOption(QRELS_OPTION)) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp(DataForHua.class.getName(), options);
        System.exit(-1);
    }

    String queryFile = cmdline.getOptionValue(QUERIES_OPTION);
    if (!new File(queryFile).exists()) {
        System.err.println("Error: " + queryFile + " doesn't exist!");
        System.exit(-1);
    }

    String runtag = cmdline.hasOption(RUNTAG_OPTION) ? cmdline.getOptionValue(RUNTAG_OPTION) : DEFAULT_RUNTAG;

    TrecTopicSet topicsFile = TrecTopicSet.fromFile(new File(queryFile));

    int numResults = 10000;
    try {
        if (cmdline.hasOption(NUM_RESULTS_OPTION)) {
            numResults = Integer.parseInt(cmdline.getOptionValue(NUM_RESULTS_OPTION));
        }
    } catch (NumberFormatException e) {
        System.err.println("Invalid " + NUM_RESULTS_OPTION + ": " + cmdline.getOptionValue(NUM_RESULTS_OPTION));
        System.exit(-1);
    }

    String group = cmdline.hasOption(GROUP_OPTION) ? cmdline.getOptionValue(GROUP_OPTION) : null;
    String token = cmdline.hasOption(TOKEN_OPTION) ? cmdline.getOptionValue(TOKEN_OPTION) : null;

    boolean verbose = cmdline.hasOption(VERBOSE_OPTION);

    PrintStream out = new PrintStream(System.out, true, "UTF-8");

    TrecSearchThriftClient client = new TrecSearchThriftClient(cmdline.getOptionValue(HOST_OPTION),
            Integer.parseInt(cmdline.getOptionValue(PORT_OPTION)), group, token);

    TweetAnalyzer tokenizer = new TweetAnalyzer(Version.LUCENE_43, false); // no stemming
    Joiner joiner = Joiner.on(' ');
    String qrelsFile = cmdline.getOptionValue(QRELS_OPTION);
    Table<Integer, Long, Integer> groundTruth = RunTemporalModel.loadGroundTruth(qrelsFile);

    for (cc.twittertools.search.TrecTopic query : topicsFile) {
        List<TResult> results = client.search(query.getQuery(), query.getQueryTweetTime(), numResults);
        int i = 1;
        Set<Long> tweetIds = new HashSet<Long>();
        for (TResult result : results) {
            if (!tweetIds.contains(result.id)) {
                // The TREC official qrels don't have the "MB" prefix and trailing zeros, so we perform
                // this transformation so that trec_eval doesn't complain.
                tweetIds.add(result.id);
                Integer qid = Integer.parseInt(query.getId().replaceFirst("^MB0*", ""));

                if (groundTruth.contains(qid, result.id)) {
                    String qtext = joiner.join(LuceneTokenizer
                            .tokenize(tokenizer.tokenStream("text", new StringReader(query.getQuery()))));
                    String tweetText = joiner.join(LuceneTokenizer
                            .tokenize(tokenizer.tokenStream("text", new StringReader(result.text))));
                    int label = groundTruth.get(qid, result.id);
                    out.println(String.format("%d@%d@%d@%f@%s@%s", qid, result.id, label, result.rsv, qtext,
                            tweetText));
                }
                i++;
            }
        }
    }
    out.close();
}

From source file:org.eclipse.incquery.runtime.localsearch.operations.extend.nobase.IterateOverEDatatypeInstances.java

public IterateOverEDatatypeInstances(int position, EDataType dataType, Collection<EObject> allModelContents,
        IQueryBackend backend) {/*from   www. j a  v  a 2s  . c  o m*/
    super(position);
    this.dataType = dataType;

    for (EObject eObject : allModelContents) {
        EDataType type = IterateOverEDatatypeInstances.this.dataType;
        LocalSearchBackend lsBackend = (LocalSearchBackend) backend;
        Table<EDataType, EClass, Set<EAttribute>> cache = lsBackend.geteAttributesByTypeForEClass();
        if (!cache.contains(type, eObject.eClass())) {
            EList<EAttribute> eAllAttributes = eObject.eClass().getEAllAttributes();
            for (EAttribute eAttribute : eAllAttributes) {
                if (eAttribute.getEType().equals(type)) {
                    cache.put(type, eObject.eClass(), Sets.<EAttribute>newHashSet());
                }
            }
        }
        Set<EAttribute> eAttributes = cache.get(type, eObject.eClass());
        for (EAttribute eAttribute : eAttributes) {
            if (eAttribute.isMany()) {
                contents.addAll((Collection<?>) eObject.eGet(eAttribute));
            } else {
                contents.add(eObject.eGet(eAttribute));
            }
        }
    }
}

From source file:org.eclipse.viatra.query.runtime.localsearch.operations.extend.nobase.IterateOverEDatatypeInstances.java

protected Stream<EAttribute> doGetEAttributes(EClass eclass, ISearchContext context) {
    @SuppressWarnings({ "unchecked" })
    Table<EDataType, EClass, Set<EAttribute>> cache = context.accessBackendLevelCache(getClass(), Table.class,
            HashBasedTable::create);//from w  ww. j ava2 s . c  om
    if (!cache.contains(dataType, eclass)) {
        EList<EAttribute> eAllAttributes = eclass.getEAllAttributes();
        cache.put(dataType, eclass, eAllAttributes.stream()
                .filter(input -> Objects.equals(input.getEType(), dataType)).collect(Collectors.toSet()));
    }
    return cache.get(dataType, eclass).stream();
}

From source file:org.caleydo.data.importer.tcga.FirehoseProvider.java

private static File parseMAF(File maf) {

    File out = new File(maf.getParentFile(), "P" + maf.getName());
    if (out.exists())
        return out;
    log.fine(maf.getAbsolutePath() + " parsing maf file");
    final String TAB = "\t";

    try (BufferedReader reader = Files.newBufferedReader(maf.toPath(), Charset.forName("UTF-8"))) {
        List<String> header = Arrays.asList(reader.readLine().split(TAB));
        int geneIndex = header.indexOf("Hugo_Symbol");
        int sampleIndex = header.indexOf("Tumor_Sample_Barcode");
        // gene x sample x mutated
        Table<String, String, Boolean> mutated = TreeBasedTable.create();
        String line = null;/*from   w  w  w.j  a  v a2 s  .  c  o m*/
        while ((line = reader.readLine()) != null) {
            String[] columns = line.split(TAB);
            mutated.put(columns[geneIndex], columns[sampleIndex], Boolean.TRUE);
        }

        File tmp = new File(out.getParentFile(), out.getName() + ".tmp");
        PrintWriter w = new PrintWriter(tmp);
        w.append("Hugo_Symbol");
        List<String> cols = new ArrayList<>(mutated.columnKeySet());
        for (String sample : cols) {
            w.append(TAB).append(sample);
        }
        w.println();
        Set<String> rows = mutated.rowKeySet();
        for (String gene : rows) {
            w.append(gene);
            for (String sample : cols) {
                w.append(TAB).append(mutated.contains(gene, sample) ? '1' : '0');
            }
            w.println();
        }
        w.close();
        Files.move(tmp.toPath(), out.toPath(), StandardCopyOption.REPLACE_EXISTING);

        log.fine(maf.getAbsolutePath() + " parsed maf file stats: " + mutated.size() + " " + rows.size() + " "
                + cols.size());
        return out;
    } catch (IOException e) {
        log.log(Level.SEVERE, maf.getAbsolutePath() + " maf parsing error: " + e.getMessage(), e);
    }
    return null;
}

From source file:com.google.javascript.jscomp.Es6RenameVariablesInParamLists.java

@Override
public void visit(NodeTraversal t, Node n, Node parent) {
    // Arrow functions without blocked body cannot have declarations in the body
    if (!n.isFunction() || !n.getLastChild().isBlock()) {
        return;//from ww  w.j  av  a 2s . c om
    }

    Node paramList = n.getSecondChild();
    final CollectReferences collector = new CollectReferences();
    NodeTraversal.traverse(compiler, paramList, new NodeTraversal.AbstractPreOrderCallback() {
        @Override
        public final boolean shouldTraverse(NodeTraversal t, Node n, Node parent) {
            if (parent == null) {
                return true;
            }

            if ((parent.isDefaultValue() && n == parent.getLastChild())
                    || (parent.isComputedProp() && n == parent.getFirstChild())) {
                NodeTraversal.traverse(compiler, n, collector);
                return false;
            }
            return true;
        }
    });

    Node block = paramList.getNext();
    Es6SyntacticScopeCreator creator = new Es6SyntacticScopeCreator(compiler);
    Scope fScope = creator.createScope(n, t.getScope());
    Scope fBlockScope = creator.createScope(block, fScope);
    Table<Node, String, String> renameTable = HashBasedTable.create();
    for (Var var : fBlockScope.getVarIterable()) {
        String oldName = var.getName();
        if (collector.currFuncReferences.contains(oldName)
                && !renameTable.contains(fBlockScope.getRootNode(), oldName)) {
            renameTable.put(fBlockScope.getRootNode(), oldName,
                    oldName + "$" + compiler.getUniqueNameIdSupplier().get());
        }
    }
    new NodeTraversal(compiler, new Es6RenameReferences(renameTable), new Es6SyntacticScopeCreator(compiler))
            .traverseInnerNode(block, block.getParent(), fScope);
}

From source file:org.shaf.core.util.TextMatrix.java

/**
 * Creates a new text matrix from the specified {@link Table table}.
 * /*www. ja  va2 s.  c  o  m*/
 * <p>
 * <b>Usage example:</b>
 * 
 * <pre>
 *      d   e   f                         0   1   2
 *    +---+---+---+                     +---+---+---+
 *  a |   | A |   |                   0 |   | A |   |
 *    +---+---+---+                     +---+---+---+
 *  b | B |   | C |  transforming to  1 | B |   | C | 
 *    +---+---+---+                     +---+---+---+
 *  c |   | D |   |                   2 |   | D |   |
 *    +---+---+---+                     +---+---+---+
 * 
 * Table&lt;String, String, String&gt; table = HashBasedTable.create();
 * table.put(&quot;a&quot;, &quot;e&quot;, &quot;A&quot;);
 * table.put(&quot;b&quot;, &quot;d&quot;, &quot;B&quot;);
 * table.put(&quot;b&quot;, &quot;f&quot;, &quot;C&quot;);
 * table.put(&quot;c&quot;, &quot;e&quot;, &quot;D&quot;);
 * System.out.println(&quot;table=&quot; + table);
 * 
 * TextMatrix matrix = TextMatrix.&lt;String, String, String&gt; create(table);
 * System.out.println(&quot;matrix=&quot; + matrix);
 * </pre>
 * 
 * </p>
 * 
 * <p>
 * <b>Output:</b>
 * 
 * <pre>
 * table={b={f=C, d=B}, c={e=D}, a={e=A}}
 * matrix=TextMatrix{[TextCell{(0, 1)=A}, TextCell{(1, 0)=B}, TextCell{(1, 2)=C}, TextCell{(2, 1)=D}]}
 * </pre>
 * 
 * </p>
 * 
 * @param tbl
 *            the converting table.
 * @return the created text matrix.
 */
public static final <R, C, V> TextMatrix create(final Table<R, C, V> tbl) {
    TextMatrix matrix = TextMatrix.create();

    Map<R, Integer> rows = new TreeMap<>();
    Map<C, Integer> cols = new TreeMap<>();
    for (Table.Cell<R, C, V> entry : tbl.cellSet()) {
        rows.put(entry.getRowKey(), 0);
        cols.put(entry.getColumnKey(), 0);
    }

    int row = 0;
    for (R r : rows.keySet()) {
        rows.put(r, row++);
    }

    int col = 0;
    for (C c : cols.keySet()) {
        cols.put(c, col++);
    }

    for (R r : rows.keySet()) {
        for (C c : cols.keySet()) {
            if (tbl.contains(r, c)) {
                matrix.putValue(rows.get(r), cols.get(c), tbl.get(r, c).toString());
            }
        }
    }

    return matrix;
}

From source file:com.ggvaidya.scinames.summary.DatasetSimilarityView.java

public void init() {
    // Setup stage.
    stage.setTitle("Timepoint similarity");

    // Setup table.
    controller.getTableEditableProperty().set(false);
    //controller.setTableColumnResizeProperty(TableView.CONSTRAINED_RESIZE_POLICY);
    ObservableList<TableColumn> cols = controller.getTableColumnsProperty();
    cols.clear();//from  w w  w  . j  a  v a 2s .c  o m

    // Set up columns.
    TableColumn<Dataset, String> colTimepointName = new TableColumn<>("Timepoint");
    colTimepointName.setCellValueFactory(new PropertyValueFactory<>("name"));
    colTimepointName.setPrefWidth(100.0);
    cols.add(colTimepointName);

    // Precalculating.
    double lowest = 100.0;
    Dataset tpLowest1 = null;
    Dataset tpLowest2 = null;

    LOGGER.info("Starting precalculating.");

    Table<Dataset, Dataset, String> data = HashBasedTable.create();
    for (Dataset tp : projectView.getProject().getDatasets()) {
        for (Dataset colTP : projectView.getProject().getDatasets()) {
            if (data.contains(tp, colTP))
                continue;

            NameClusterManager manager = projectView.getProject().getNameClusterManager();
            Set<NameCluster> leftTP = tp.getRecognizedNames(projectView.getProject())
                    .map(n -> manager.getCluster(n).get()).collect(Collectors.toSet());
            Set<NameCluster> rightTP = colTP.getRecognizedNames(projectView.getProject())
                    .map(n -> manager.getCluster(n).get()).collect(Collectors.toSet());

            // Overlapping name concepts.
            Sets.SetView<NameCluster> union = Sets.union(leftTP, rightTP);
            Sets.SetView<NameCluster> intersection = Sets.intersection(leftTP, rightTP);

            double res = (((double) intersection.size()) / union.size() * 100);

            if (lowest > res) {
                lowest = res;
                tpLowest1 = tp;
                tpLowest2 = colTP;
            }

            String result = new BigDecimal(res).setScale(2, RoundingMode.DOWN).toPlainString() + "% ("
                    + intersection.size() + " identical out of " + union.size() + ")";

            data.put(tp, colTP, result);
            data.put(colTP, tp, result);
        }
    }

    LOGGER.info("Precalculating done.");

    // Setup headertext.
    String str_lowest = "";
    if (tpLowest1 != null && tpLowest2 != null) {
        str_lowest = " (lowest: " + new BigDecimal(lowest).setScale(2, RoundingMode.DOWN).toPlainString()
                + "% between " + tpLowest1.getName() + " and " + tpLowest2.getName() + ")";
    }
    controller.getHeaderTextProperty().set("How similar is each timepoint to every other?" + str_lowest);
    controller.getHeaderTextEditableProperty().set(false);

    // Create a column for every timepoint here.
    projectView.getProject().getDatasets().forEach((Dataset colTP) -> {
        TableColumn<Dataset, String> colTimepoint = new TableColumn<>(colTP.getName());
        colTimepoint.setCellValueFactory((TableColumn.CellDataFeatures<Dataset, String> features) -> {
            Dataset tp = features.getValue();

            return new ReadOnlyStringWrapper(data.get(tp, colTP));
        });
        colTimepoint.setPrefWidth(100.0);
        cols.add(colTimepoint);
    });

    // Set table items.
    List<Dataset> timepoints = projectView.getProject().getDatasets();
    controller.getTableItemsProperty().set(FXCollections.observableList(timepoints));
}

From source file:com.github.rinde.rinsim.ui.renderers.WarehouseRenderer.java

private Table<Point, Point, Connection<?>> filterConnections() {
    // filter connections to avoid double work for bidirectional roads
    final Table<Point, Point, Connection<?>> filteredConnections = HashBasedTable.create();
    for (final Connection<?> e : graph.getConnections()) {
        if (!filteredConnections.contains(e.to(), e.from())) {
            filteredConnections.put(e.from(), e.to(), e);
        }/*from   w ww  .  ja v a 2 s  .c o m*/
    }
    return filteredConnections;
}

From source file:org.eclipse.incquery.tooling.core.project.PluginXmlModifier.java

private void addExtensionToMap(ExtensionData data, Table<String, String, List<ExtensionData>> table) {
    String id = data.getId();/*from   w  ww .  j av a  2s .co  m*/
    String point = data.getPoint();
    if (Strings.isNullOrEmpty(id) || Strings.isNullOrEmpty(point)) {
        return;
    }
    List<ExtensionData> extensions = null;
    if (table.contains(id, point)) {
        extensions = table.get(id, point);
    } else {
        extensions = Lists.newArrayList();
        table.put(id, point, extensions);
    }
    extensions.add(data);
}

From source file:com.google.api.codegen.discovery.config.ApiaryConfigToSampleConfigConverter.java

/** Creates a method. */
private MethodInfo createMethod(Method method) {
    // The order of fields must be preserved, so we use an ImmutableMap.
    ImmutableMap.Builder<String, FieldInfo> fieldsBuilder = new ImmutableMap.Builder<>();
    TypeInfo requestBodyType = null;
    Type requestType = apiaryConfig.getType(method.getRequestTypeUrl());

    boolean isPageStreamingResourceSetterInRequestBody = false;
    String requestPageTokenName = "";

    for (String fieldName : apiaryConfig.getMethodParams(method.getName())) {
        Field field = apiaryConfig.getField(requestType, fieldName);
        // If one of the method arguments has the field name "request$", it's the
        // request body.
        if (fieldName.equals(DiscoveryImporter.REQUEST_FIELD_NAME)) {
            requestBodyType = createTypeInfo(field, method);

            // Below is the request body specific portion of the page streaming logic.
            for (Field field2 : apiaryConfig.getType(field.getTypeUrl()).getFieldsList()) {
                for (String tokenName : PAGE_TOKEN_NAMES) {
                    if (field2.getName().equals(tokenName)) {
                        isPageStreamingResourceSetterInRequestBody = true;
                        requestPageTokenName = tokenName;
                        break;
                    }/*from  ww  w  . j  a v  a  2 s.c o m*/
                }
            }
            continue;
        }
        fieldsBuilder.put(field.getName(), createFieldInfo(field, requestType, method));
    }
    ImmutableMap<String, FieldInfo> fields = fieldsBuilder.build();

    TypeInfo requestTypeInfo = createTypeInfo(method, true);
    TypeInfo responseTypeInfo = null;
    String responseTypeUrl = typeNameGenerator.getResponseTypeUrl(method.getResponseTypeUrl());
    if (!Strings.isNullOrEmpty(responseTypeUrl)) {
        responseTypeInfo = createTypeInfo(method, false);
    }

    // Heuristic implementation interprets method to be page streaming iff one of the names
    // "pageToken" or "nextPageToken" occurs among the fields of both the method's response type and
    // either the method's request (query parameters) or request body.
    boolean hasResponsePageToken = false;
    String responsePageTokenName = "";
    Type responseType = apiaryConfig.getType(method.getResponseTypeUrl());
    if (responseType != null) {
        String fieldName;
        FIELDS: for (Field field : responseType.getFieldsList()) {
            fieldName = field.getName();
            for (String tokenName : PAGE_TOKEN_NAMES) {
                if (fieldName.equals(tokenName)) {
                    hasResponsePageToken = true;
                    responsePageTokenName = tokenName;
                    break FIELDS;
                }
            }
        }
    }
    boolean isPageStreaming = false;
    if (hasResponsePageToken) {
        if (isPageStreamingResourceSetterInRequestBody) {
            isPageStreaming = true;
        } else {
            Table<Type, String, Field> requestFields = apiaryConfig.getFields();
            for (String tokenName : PAGE_TOKEN_NAMES) {
                if (requestFields.contains(requestType, tokenName)) {
                    isPageStreaming = true;
                    requestPageTokenName = tokenName;
                    break;
                }
            }
        }
    }
    FieldInfo pageStreamingResourceField = null;
    if (isPageStreaming) {
        Field field = getPageStreamingResourceField(responseType);
        pageStreamingResourceField = createFieldInfo(field, responseType, method);
    }

    boolean hasMediaUpload = apiaryConfig.getMediaUpload().contains(method.getName());

    MethodInfo methodInfo = MethodInfo.newBuilder().verb(apiaryConfig.getHttpMethod(method.getName()))
            .nameComponents(
                    typeNameGenerator.getMethodNameComponents(methodNameComponents.get(method.getName())))
            .fields(fields).requestType(requestTypeInfo).requestBodyType(requestBodyType)
            .responseType(responseTypeInfo).isPageStreaming(isPageStreaming)
            .pageStreamingResourceField(pageStreamingResourceField)
            .isPageStreamingResourceSetterInRequestBody(isPageStreamingResourceSetterInRequestBody)
            .requestPageTokenName(requestPageTokenName).responsePageTokenName(responsePageTokenName)
            .hasMediaUpload(hasMediaUpload)
            // Ignore media download for methods supporting media upload, as
            // Apiary cannot combine both in a single request, and no sensible
            // use cases are known for download with a method supporting upload.
            // https://developers.google.com/discovery/v1/using#discovery-doc-methods
            .hasMediaDownload(!hasMediaUpload && apiaryConfig.getMediaDownload().contains(method.getName()))
            .authScopes(apiaryConfig.getAuthScopes(method.getName())).build();
    return methodInfo;
}