Example usage for com.google.common.collect Table put

List of usage examples for com.google.common.collect Table put

Introduction

In this page you can find the example usage for com.google.common.collect Table put.

Prototype

@Nullable
V put(R rowKey, C columnKey, V value);

Source Link

Document

Associates the specified value with the specified keys.

Usage

From source file:com.przemo.etl.dataproviders.CSVDataProvider.java

/**
 * Reads data from a given file. Headers array gets populated either with default ones or with read in from the file, if so requested.
 * @return//from   w ww  .  j  a v a2 s.c  o m
 * @throws Exception
 */
@Override
public Table readData() {
    Table data = HashBasedTable.create();
    if (file.exists() && file.canRead()) {
        try {
            BufferedReader r = new BufferedReader(new FileReader(file));
            String line;
            boolean dhReadIn = false;
            int rowNumber = 0;
            while ((line = r.readLine()) != null) {
                String[] lineArray = line.split(sep);
                if (!dhReadIn) {
                    if (dh) {
                        headers = lineArray;
                    } else {
                        headers = defaultHeaders(lineArray.length);
                    }
                    dhReadIn = true;
                } else {
                    for (int c = 0; c < lineArray.length; c++) {
                        data.put(rowNumber, headers[c], lineArray[c]);
                    }
                }
                rowNumber++;
            }
        } catch (FileNotFoundException ex) {
            Logger.getLogger(CSVDataProvider.class.getName()).log(Level.SEVERE, null, ex);
        } catch (IOException ex) {
            Logger.getLogger(CSVDataProvider.class.getName()).log(Level.SEVERE, null, ex);
        } catch (Exception ex) {
            Logger.getLogger(CSVDataProvider.class.getName()).log(Level.SEVERE, null, ex);
        }

    }
    return data;
}

From source file:org.broad.igv.sam.SpliceJunctionHelper.java

/**
 * We keep separate splice junction information by strand.
 * This combines both strand information
 *//*www .j ava  2 s . c  o  m*/
private void combineStrandJunctionsMaps() {
    Table<Integer, Integer, SpliceJunctionFeature> combinedStartEndJunctionsMap = HashBasedTable
            .create(posStartEndJunctionsMap);

    for (Table.Cell<Integer, Integer, SpliceJunctionFeature> negJunctionCell : negStartEndJunctionsMap
            .cellSet()) {
        int junctionStart = negJunctionCell.getRowKey();
        int junctionEnd = negJunctionCell.getColumnKey();
        SpliceJunctionFeature negFeat = negJunctionCell.getValue();

        SpliceJunctionFeature junction = combinedStartEndJunctionsMap.get(junctionStart, junctionEnd);

        if (junction == null) {
            junction = new SpliceJunctionFeature(negFeat.getChr(), junctionStart, junctionEnd, Strand.POSITIVE);
            combinedStartEndJunctionsMap.put(junctionStart, junctionEnd, junction);
        }

        int newJunctionDepth = junction.getJunctionDepth() + negFeat.getJunctionDepth();
        junction.addRead(negFeat.getStart(), negFeat.getEnd());
        junction.setJunctionDepth(newJunctionDepth);
    }

    filteredCombinedFeatures = new ArrayList<SpliceJunctionFeature>(combinedStartEndJunctionsMap.values());
    FeatureUtils.sortFeatureList(filteredCombinedFeatures);
}

From source file:com.przemo.etl.transformations.FlatDateGapFiller.java

@Override
protected void fillGap(Integer previousKey, Integer rowNumber, Table<Integer, Object, Object> data) {
    Integer maxRowNumber = Collections.max(data.rowKeySet());
    if (gapColumn != null && data.get(previousKey, gapColumn) instanceof Date
            && data.get(rowNumber, gapColumn) instanceof Date) {
        Calendar c1 = Calendar.getInstance();
        Calendar c2 = Calendar.getInstance();
        c1.setTime((Date) data.get(previousKey, gapColumn));
        c2.setTime((Date) data.get(rowNumber, gapColumn));
        long r = (c2.getTimeInMillis() - c1.getTimeInMillis()) / (timeStep * getMilisOfField(timeField));
        Calendar cx = Calendar.getInstance();
        cx.setTimeInMillis(timeStep * getMilisOfField(timeField));
        while (c2.getTimeInMillis() > (c1.getTimeInMillis() + cx.getTimeInMillis())) {
            c1.setTimeInMillis(c1.getTimeInMillis() + cx.getTimeInMillis());
            data.put(++maxRowNumber, gapColumn, c1.getTime());
            for (Object s : data.columnKeySet()) {
                if (s instanceof String && !s.equals(gapColumn)) {
                    String col = (String) s;
                    data.put(maxRowNumber, col, data.get(previousKey, s));
                }/*www .ja v  a2  s  .c  o  m*/
            }
        }
    }

}

From source file:i5.las2peer.services.recommender.librec.main.LibRecDemo.java

/**
 * interface to run Leave-one-out approach
 *//*from  ww  w.  ja v a 2  s  .com*/
private void runLeaveOneOut(LineConfiger params) throws Exception {

    int numThreads = params.getInt("-t", Runtime.getRuntime().availableProcessors()); // default by number of processors

    Thread[] ts = new Thread[numThreads];
    Recommender[] algos = new Recommender[numThreads];

    // average performance of k-fold
    Map<Measure, Double> avgMeasure = new HashMap<>();

    int rows = rateMatrix.numRows();
    int cols = rateMatrix.numColumns();

    int count = 0;
    for (MatrixEntry me : rateMatrix) {
        double rui = me.get();
        if (rui <= 0)
            continue;

        int u = me.row();
        int i = me.column();

        // leave the current rating out
        SparseMatrix trainMatrix = new SparseMatrix(rateMatrix);
        trainMatrix.set(u, i, 0);
        SparseMatrix.reshape(trainMatrix);

        // build test matrix
        Table<Integer, Integer, Double> dataTable = HashBasedTable.create();
        Multimap<Integer, Integer> colMap = HashMultimap.create();
        dataTable.put(u, i, rui);
        colMap.put(i, u);
        SparseMatrix testMatrix = new SparseMatrix(rows, cols, dataTable, colMap);

        // get a recommender
        Recommender algo = getRecommender(new SparseMatrix[] { trainMatrix, testMatrix }, count + 1);

        algos[count] = algo;
        ts[count] = new Thread(algo);
        ts[count].start();

        if (numThreads == 1) {
            ts[count].join(); // fold by fold

            for (Entry<Measure, Double> en : algo.measures.entrySet()) {
                Measure m = en.getKey();
                double val = avgMeasure.containsKey(m) ? avgMeasure.get(m) : 0.0;
                avgMeasure.put(m, val + en.getValue());
            }
        } else if (count < numThreads) {
            count++;
        }

        if (count == numThreads) {
            // parallel fold
            for (Thread t : ts)
                t.join();
            count = 0;

            // record performance
            for (Recommender algo2 : algos) {
                for (Entry<Measure, Double> en : algo2.measures.entrySet()) {
                    Measure m = en.getKey();
                    double val = avgMeasure.containsKey(m) ? avgMeasure.get(m) : 0.0;
                    avgMeasure.put(m, val + en.getValue());
                }
            }
        }
    }

    // normalization
    int size = rateMatrix.size();
    for (Entry<Measure, Double> en : avgMeasure.entrySet()) {
        Measure m = en.getKey();
        double val = en.getValue();
        avgMeasure.put(m, val / size);
    }

    printEvalInfo(algos[0], avgMeasure);
}

From source file:org.apache.sentry.provider.db.generic.UpdatableCache.java

/**
 * Build cache replica with latest values
 *
 * @return cache replica with latest values
 *//*from www. j  a  v a2 s  .  com*/
private Table<String, String, Set<String>> loadFromRemote() throws Exception {
    Table<String, String, Set<String>> tempCache = HashBasedTable.create();
    String requestor;
    requestor = UserGroupInformation.getLoginUser().getShortUserName();

    final SentryGenericServiceClient client = getClient();
    final Set<TSentryRole> tSentryRoles = client.listAllRoles(requestor, componentType);

    for (TSentryRole tSentryRole : tSentryRoles) {
        final String roleName = tSentryRole.getRoleName();
        final Set<TSentryPrivilege> tSentryPrivileges = client.listPrivilegesByRoleName(requestor, roleName,
                componentType, serviceName);
        for (String group : tSentryRole.getGroups()) {
            Set<String> currentPrivileges = tempCache.get(group, roleName);
            if (currentPrivileges == null) {
                currentPrivileges = new HashSet<>();
                tempCache.put(group, roleName, currentPrivileges);
            }
            for (TSentryPrivilege tSentryPrivilege : tSentryPrivileges) {
                currentPrivileges.add(tSentryPrivilegeConverter.toString(tSentryPrivilege));
            }
        }
    }
    return tempCache;
}

From source file:librec.data.SparseMatrix.java

/**
 * @return the data table of this matrix as (row, column, value) cells
 *///w w w.j  a  va2 s. co  m
public Table<Integer, Integer, Double> getDataTable() {
    Table<Integer, Integer, Double> res = HashBasedTable.create();

    for (MatrixEntry me : this) {
        if (me.get() != 0)
            res.put(me.row(), me.column(), me.get());
    }

    return res;
}

From source file:pt.org.aguiaj.classes.ClassModel.java

public void createContractProxies(Table<Object, Method, ContractDecorator<?>> table, Object object,
        Collection<Method> methods) {
    Map<Class<? extends ContractDecorator<?>>, ContractDecorator<?>> map = newHashMap();

    for (Method m : methods) {

        if (hasContracts(object.getClass(), m)) {
            for (ContractDecorator<?> proxy : createContractProxies(object, m)) {

                if (map.containsKey(proxy.getClass())) {
                    table.put(object, m, map.get(proxy.getClass()));
                } else {
                    table.put(object, m, proxy);
                    map.put((Class<? extends ContractDecorator<?>>) proxy.getClass()
                            .asSubclass(ContractDecorator.class), proxy);
                }//  w ww  .j  a v  a2  s  .c o m
            }
        }

    }
}

From source file:hu.ppke.itk.nlpg.purepos.decoder.BeamedViterbi.java

public List<Pair<List<Integer>, Double>> beamedSearch(final NGram<Integer> start,
        final List<String> observations, int resultsNumber) {
    HashMap<NGram<Integer>, Node> beam = new HashMap<NGram<Integer>, Node>();

    beam.put(start, startNode(start));//  w ww .j a  v a2s  .c o  m
    boolean isFirst = true;
    int pos = 0;
    for (String obs : observations) {
        // System.err.println(obs);

        // logger.trace("Current observation " + obs);
        // logger.trace("\tCurrent states:");
        // for (Entry<NGram<Integer>, Node> entry : beam.entrySet()) {
        // logger.trace("\t\t" + entry.getKey() + " - " + entry.getValue());
        // }

        HashMap<NGram<Integer>, Node> newBeam = new HashMap<NGram<Integer>, Node>();

        Table<NGram<Integer>, Integer, Double> nextProbs = HashBasedTable.create();
        Map<NGram<Integer>, Double> obsProbs = new HashMap<NGram<Integer>, Double>();
        Set<NGram<Integer>> contexts = beam.keySet();

        Map<NGram<Integer>, Map<Integer, Pair<Double, Double>>> nexts = getNextProbs(contexts, obs, pos,
                isFirst);

        for (Map.Entry<NGram<Integer>, Map<Integer, Pair<Double, Double>>> nextsEntry : nexts.entrySet()) {
            NGram<Integer> context = nextsEntry.getKey();
            Map<Integer, Pair<Double, Double>> nextContextProbs = nextsEntry.getValue();
            for (Map.Entry<Integer, Pair<Double, Double>> entry : nextContextProbs.entrySet()) {
                Integer tag = entry.getKey();
                nextProbs.put(context, tag, entry.getValue().getLeft());
                obsProbs.put(context.add(tag), entry.getValue().getRight());
            }
        }
        // for (Integer t : nextProbs.keySet()) {
        // logger.trace("\t\tNext node:" + context + t);
        // logger.trace("\t\tnode currentprob:"
        // + (beam.get(context) + nextProbs.get(t).getLeft()));
        // logger.trace("\t\tnode emissionprob:"
        // + nextProbs.get(t).getRight());
        // logger.trace("\n");
        // // logger.trace("\t\tNext node:" + context + t);
        // }
        for (Cell<NGram<Integer>, Integer, Double> cell : nextProbs.cellSet()) {
            Integer nextTag = cell.getColumnKey();
            NGram<Integer> context = cell.getRowKey();
            Double transVal = cell.getValue();
            NGram<Integer> newState = context.add(nextTag);
            Node from = beam.get(context);
            double newVal = transVal + beam.get(context).getWeight();
            update(newBeam, newState, newVal, from);
        }
        // adding observation probabilities
        // logger.trace("beam" + newBeam);
        if (nextProbs.size() > 1)
            for (NGram<Integer> tagSeq : newBeam.keySet()) {
                // Integer tag = tagSeq.getLast();
                Node node = newBeam.get(tagSeq);
                // Double prevVal = node.getWeight();

                Double obsProb = obsProbs.get(tagSeq);
                // logger.trace("put to beam: " + context + "(from) "
                // + tagSeq + " " + prevVal + "+" + obsProb);
                node.setWeight(obsProb + node.getWeight());
            }

        beam = prune(newBeam);
        isFirst = false;
        // for (Entry<NGram<Integer>, Node> e : beam.entrySet()) {
        // logger.trace("\t\tNode state: " + e.getKey() + " "
        // + e.getValue());
        // }
        ++pos;
    }
    return findMax(beam, resultsNumber);
}

From source file:com.ggvaidya.scinames.summary.NameClustersView.java

public void init() {
    // Setup stage.
    stage.setTitle("Name clusters");

    // Setup headertext.
    controller.getHeaderTextProperty().set("All name clusters used in this project:");
    controller.getHeaderTextEditableProperty().set(false);

    // Setup table.
    controller.getTableEditableProperty().set(false);
    //controller.setTableColumnResizeProperty(TableView.CONSTRAINED_RESIZE_POLICY);
    ObservableList<TableColumn> cols = controller.getTableColumnsProperty();
    cols.clear();//ww  w .j ava 2s .c o  m

    // Precalculate!
    SciNames.reportMemoryStatus("Started precalculating name clusters");
    Table<NameCluster, String, String> precalc = HashBasedTable.create();

    // Set up columns.
    cols.add(createColumnFromPrecalc("name", precalc));
    cols.add(createColumnFromPrecalc("cluster", precalc));
    cols.add(createColumnFromPrecalc("cluster_count", precalc));
    cols.add(createColumnFromPrecalc("datasets", precalc));
    cols.add(createColumnFromPrecalc("dates", precalc));

    cols.add(createColumnFromPrecalc("recognized_in_first_dataset", precalc));
    cols.add(createColumnFromPrecalc("recognized_in_last_dataset", precalc));

    Project project = projectView.getProject();
    Dataset firstDataset = project.getFirstDataset().orElse(null);
    Dataset lastDataset = project.getLastDataset().orElse(null);

    for (NameCluster cluster : project.getSpeciesNameClusters().collect(Collectors.toList())) {
        precalc.put(cluster, "name", cluster.getName().getFullName());

        Set<Name> namesInCluster = cluster.getNames();
        precalc.put(cluster, "cluster",
                namesInCluster.stream().map(n -> n.getFullName()).collect(Collectors.joining(", ")));
        precalc.put(cluster, "cluster_count", String.valueOf(namesInCluster.size()));

        List<Dataset> datasetsInCluster = cluster.getFoundIn().stream().sorted().collect(Collectors.toList());
        precalc.put(cluster, "datasets",
                datasetsInCluster.stream().map(ds -> ds.toString()).collect(Collectors.joining("; ")));

        List<SimplifiedDate> datesInCluster = datasetsInCluster.stream().map(ds -> ds.getDate())
                .collect(Collectors.toList());
        precalc.put(cluster, "dates", datesInCluster.stream().map(date -> date.toString()).distinct()
                .collect(Collectors.joining("; ")));

        List<TaxonConcept> taxonConceptsInCluster = cluster.getTaxonConcepts(projectView.getProject());
        precalc.put(cluster, "taxon_concepts",
                taxonConceptsInCluster.stream().map(tc -> tc.toString()).collect(Collectors.joining("; ")));
        precalc.put(cluster, "taxon_concept_count", String.valueOf(taxonConceptsInCluster.size()));

        if (firstDataset != null)
            precalc.put(cluster, "recognized_in_first_dataset",
                    cluster.containsAny(project.getRecognizedNames(firstDataset)) ? "yes" : "no");

        if (lastDataset != null)
            precalc.put(cluster, "recognized_in_last_dataset",
                    cluster.containsAny(project.getRecognizedNames(lastDataset)) ? "yes" : "no");
    }

    SciNames.reportMemoryStatus("Completed precalculating name clusters");

    // Set table items.
    SortedList<NameCluster> sorted = FXCollections
            .observableArrayList(projectView.getProject().getSpeciesNameClusters().collect(Collectors.toList()))
            .sorted();
    controller.getTableItemsProperty().set(sorted);
    sorted.comparatorProperty().bind(controller.getTableView().comparatorProperty());
}

From source file:org.seedstack.seed.web.internal.security.shiro.ShiroWebModule.java

private void setupFilterChainConfigs() {
    Table<Key<? extends PathMatchingFilter>, String, String> configs = HashBasedTable.create();

    for (Map.Entry<String, FilterKey[]> filterChain : filterChains.entrySet()) {
        for (int i = 0; i < filterChain.getValue().length; i++) {
            FilterKey configKey = filterChain.getValue()[i];
            if (!configKey.getValue().isEmpty()) {
                filterChain.getValue()[i] = configKey;
                Key<? extends Filter> key = configKey.getKey();
                if (!PathMatchingFilter.class.isAssignableFrom(key.getTypeLiteral().getRawType())) {
                    throw new ConfigurationException(
                            "Config information requires a PathMatchingFilter - can't apply to "
                                    + key.getTypeLiteral().getRawType());
                }//from w  w  w . j  a va 2s.  c o  m
                configs.put(castToPathMatching(key), filterChain.getKey(), configKey.getValue());
            } else if (PathMatchingFilter.class
                    .isAssignableFrom(configKey.getKey().getTypeLiteral().getRawType())) {
                configs.put(castToPathMatching(configKey.getKey()), filterChain.getKey(), "");
            }
        }
    }
    for (Key<? extends PathMatchingFilter> filterKey : configs.rowKeySet()) {
        bindPathMatchingFilter(filterKey, configs.row(filterKey));
    }
}