List of usage examples for com.google.common.collect Table put
@Nullable V put(R rowKey, C columnKey, V value);
From source file:org.eclipse.incquery.runtime.localsearch.operations.extend.nobase.IterateOverEDatatypeInstances.java
public IterateOverEDatatypeInstances(int position, EDataType dataType, Collection<EObject> allModelContents, IQueryBackend backend) {/*from w w w. j av a2 s. com*/ super(position); this.dataType = dataType; for (EObject eObject : allModelContents) { EDataType type = IterateOverEDatatypeInstances.this.dataType; LocalSearchBackend lsBackend = (LocalSearchBackend) backend; Table<EDataType, EClass, Set<EAttribute>> cache = lsBackend.geteAttributesByTypeForEClass(); if (!cache.contains(type, eObject.eClass())) { EList<EAttribute> eAllAttributes = eObject.eClass().getEAllAttributes(); for (EAttribute eAttribute : eAllAttributes) { if (eAttribute.getEType().equals(type)) { cache.put(type, eObject.eClass(), Sets.<EAttribute>newHashSet()); } } } Set<EAttribute> eAttributes = cache.get(type, eObject.eClass()); for (EAttribute eAttribute : eAttributes) { if (eAttribute.isMany()) { contents.addAll((Collection<?>) eObject.eGet(eAttribute)); } else { contents.add(eObject.eGet(eAttribute)); } } } }
From source file:com.assylias.jbloomberg.HistoricalData.java
/** * Adds a value to the HistoricalData structure for that security / field / date combination. *///from w ww .java2s. c o m @Override synchronized void add(LocalDate date, String security, String field, Object value) { Table<LocalDate, String, TypedObject> securityTable = data.get(security); if (securityTable == null) { securityTable = TreeBasedTable.create(); //to have the dates in order data.put(security, securityTable); } securityTable.put(date, field, TypedObject.of(value)); }
From source file:com.infinities.keystone4j.catalog.driver.CatalogJpaDriver.java
@Override public Map<String, Map<String, Map<String, String>>> getCatalog(String userid, String tenantid, Metadata metadata) {//from www .j ava 2s .c om Table<Type, String, Option> substitutions = Config.Instance.getTable(); substitutions.put(Type.DEFAULT, "tenant_id", Options.newStrOpt("tenant_id", tenantid)); substitutions.put(Type.DEFAULT, "user_id", Options.newStrOpt("user_id", userid)); List<Endpoint> endpoints = endpointDao.listAllEnabled(); Map<String, Map<String, Map<String, String>>> catalog = new HashMap<String, Map<String, Map<String, String>>>(); for (Endpoint endpoint : endpoints) { if (!endpoint.getService().getEnabled()) { continue; } String url = null; try { url = formatUrl(endpoint.getUrl(), substitutions); } catch (Exception e) { continue; } String region = endpoint.getRegionid(); String serviceType = endpoint.getService().getType(); Map<String, String> defaultService = new HashMap<String, String>(); defaultService.put("id", endpoint.getId()); defaultService.put("name", endpoint.getService().getName()); defaultService.put("publicURL", ""); Map<String, Map<String, String>> regionDict = new HashMap<String, Map<String, String>>(); catalog.put(region, regionDict); regionDict.put(serviceType, defaultService); String interfaceUrl = String.format("%sURL", endpoint.getInterfaceType()); defaultService.put(interfaceUrl, url); } return catalog; }
From source file:org.pau.assetmanager.viewmodel.stocks.HistoricalStocksValuesDownloader.java
private static Table<Integer, Integer, Double> getYearToMonthToValueTable(String symbol, Integer startYear, Integer finalYear) {//from w w w . j a v a 2s. c om try { SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd"); Calendar calendar = GregorianCalendar.getInstance(); Table<Integer, Integer, Double> yearMonthValueTable = HashBasedTable.<Integer, Integer, Double>create(); Table<Integer, Integer, Integer> yearMonthTimesTable = HashBasedTable .<Integer, Integer, Integer>create(); List<Map<String, String>> csvContents = getContents(symbol, startYear, finalYear); for (Map<String, String> fieldToValueMap : csvContents) { Double open = Double.parseDouble(fieldToValueMap.get(OPEN)); Double close = Double.parseDouble(fieldToValueMap.get(OPEN)); Double averagePrice = (open + close) / 2.0; Date date = simpleDateFormat.parse(fieldToValueMap.get(DATE)); calendar.setTime(date); Integer year = calendar.get(Calendar.YEAR); Integer month = calendar.get(Calendar.MONTH); Integer day = calendar.get(Calendar.DAY_OF_MONTH); Double currentPrice = yearMonthValueTable.get(year, month); if (currentPrice == null) { currentPrice = averagePrice; } else { currentPrice += averagePrice; } yearMonthValueTable.put(year, month, currentPrice); Integer times = yearMonthTimesTable.get(year, month); if (times == null) { times = 1; } else { times++; } yearMonthTimesTable.put(year, month, times); } for (Integer year : yearMonthTimesTable.rowKeySet()) { for (Integer month : yearMonthTimesTable.row(year).keySet()) { yearMonthValueTable.put(year, month, yearMonthValueTable.get(year, month) / yearMonthTimesTable.get(year, month).doubleValue()); } } return yearMonthValueTable; } catch (ParseException e) { throw new AssetManagerRuntimeException("Error parsing date from CSV file for '" + symbol + "' between '" + startYear + "' and '" + finalYear + "'.", e); } }
From source file:com.rackspacecloud.blueflood.io.astyanax.AEnumIO.java
/** * Read the metrics_enum column family for the specified locators. Organize * the data as a table of locator, enum value hash, and enum value. * This is a representation on how the data looks in the column family. * * @param locators//from w w w . j ava 2 s . c o m * @return */ @Override public Table<Locator, Long, String> getEnumHashValuesForLocators(final List<Locator> locators) { Map<Locator, ColumnList<Long>> enumHashValueColumnList = getEnumHashMappings(locators); Table<Locator, Long, String> enumHashValues = HashBasedTable.create(); for (Map.Entry<Locator, ColumnList<Long>> locatorToColumnList : enumHashValueColumnList.entrySet()) { Locator locator = locatorToColumnList.getKey(); ColumnList<Long> columnList = locatorToColumnList.getValue(); for (Column<Long> column : columnList) { enumHashValues.put(locator, column.getName(), column.getStringValue()); } } return enumHashValues; }
From source file:org.sonar.server.db.migrations.v52.MoveProjectProfileAssociation.java
private Table<String, String, String> getProfileKeysByLanguageThenName(final Context context) throws SQLException { final Table<String, String, String> profilesByLanguageAndName = HashBasedTable.create(); Select selectProfiles = context.prepareSelect("SELECT kee, name, language FROM rules_profiles"); try {/*ww w . j a v a 2 s. c om*/ selectProfiles.list(new RowReader<Void>() { @Override public Void read(Row row) throws SQLException { profilesByLanguageAndName.put(row.getString(3), row.getString(2), row.getString(1)); return null; } }); } finally { selectProfiles.close(); } return profilesByLanguageAndName; }
From source file:com.infinities.keystone4j.common.Config.java
public void configure(Table<Type, String, Option> conf) { for (Cell<Type, String, Option> cell : FILE_OPTIONS.cellSet()) { conf.put(cell.getRowKey(), cell.getColumnKey(), cell.getValue().clone()); }//w w w .ja va2 s . c o m }
From source file:com.ggvaidya.scinames.summary.LumpsAndSplitsView.java
public void init() { // Setup stage. stage.setTitle("Lumps and splits"); // Setup headertext. controller.getHeaderTextEditableProperty().set(false); headerText = controller.getHeaderTextProperty(); //if(headerText.get().equals("")) { headerText.set("Display all lumps and splits."); //}/* w ww.j a v a 2 s .c o m*/ controller.getHeaderTextProperty().addListener((c, a, b) -> { init(); }); // Load up name cluster manager. // NameClusterManager nameClusterManager = projectView.getProject().getNameClusterManager(); // On double-click, go straight to that splump // Double-click on rows should take you to the entry. controller.getTableView().setOnMouseClicked(evt -> { if (evt.getButton() == MouseButton.PRIMARY && evt.getClickCount() == 2) { // Double-click! @SuppressWarnings("rawtypes") Change ch = (Change) controller.getTableView().getSelectionModel().getSelectedItem(); projectView.openDetailedView(ch); evt.consume(); } }); // Setup table. controller.getTableEditableProperty().set(false); //controller.setTableColumnResizeProperty(TableView.CONSTRAINED_RESIZE_POLICY); ObservableList<TableColumn> cols = controller.getTableColumnsProperty(); cols.clear(); // Set table items. List<Change> changes = projectView.getProject().getChanges() .filter(c -> c.getType().equals(ChangeType.LUMP) || c.getType().equals(ChangeType.SPLIT)) .collect(Collectors.toList()); SortedList<Change> sorted = FXCollections.observableArrayList(changes).sorted(); controller.getTableItemsProperty().set(sorted); sorted.comparatorProperty().bind(controller.getTableView().comparatorProperty()); // Precalculate. Project project = projectView.getProject(); Table<Change, String, String> precalc = HashBasedTable.create(); int index = 0; for (Change change : changes) { index++; precalc.put(change, "index", String.valueOf(index)); precalc.put(change, "id", change.getId().toString()); precalc.put(change, "type", change.getType().toString()); precalc.put(change, "from", change.getFromStream().map(n -> n.getFullName()).collect(Collectors.joining(", "))); precalc.put(change, "to", change.getToStream().map(n -> n.getFullName()).collect(Collectors.joining(", "))); precalc.put(change, "dataset", change.getDataset().getName() + " (" + change.getDataset().getDate().toString() + ")"); precalc.put(change, "year", change.getDataset().getDate().getYearAsString()); precalc.put(change, "change", change.toString()); precalc.put(change, "reversions", project.getChangesReversing(change).map(ch -> ch.toString()).collect(Collectors.joining("; "))); precalc.put(change, "reversion_count", String.valueOf(project.getChangesReversing(change).count())); precalc.put(change, "reverts_a_previous_change", (project.getChangesReversing(change) // Did any change take place before this change? .anyMatch(ch -> ch.getDataset().getDate().compareTo(change.getDataset().getDate()) < 0)) ? "yes" : "no"); precalc.put(change, "reverts_a_later_change", (project.getChangesReversing(change) // Did any change take place before this change? .anyMatch(ch -> ch.getDataset().getDate().compareTo(change.getDataset().getDate()) > 0)) ? "yes" : "no"); /* // TODO: broken! This returns 'yes' when changes are empty. precalc.put(change, "reverts_all_previous_changes", (project.getChangesReversing(change) // Did every change take place before this change? .allMatch(ch -> ch.getDataset().getDate().compareTo(change.getDataset().getDate()) < 0) ) ? "yes" : "no" );*/ precalc.put(change, "complete_reversions", project.getChangesPerfectlyReversing(change) .map(ch -> ch.toString()).collect(Collectors.joining("; "))); precalc.put(change, "complete_reversions_summary", project.getPerfectlyReversingSummary(change)); precalc.put(change, "complete_reversion_count", String.valueOf(project.getChangesPerfectlyReversing(change).count())); precalc.put(change, "completely_reverts_a_previous_change", (project.getChangesPerfectlyReversing(change) // Did any change take place before this change? .anyMatch(ch -> ch.getDataset().getDate().compareTo(change.getDataset().getDate()) < 0)) ? "yes" : "no"); precalc.put(change, "completely_reverts_a_later_change", (project.getChangesPerfectlyReversing(change) // Did any change take place before this change? .anyMatch(ch -> ch.getDataset().getDate().compareTo(change.getDataset().getDate()) > 0)) ? "yes" : "no"); /* // TODO: broken! This returns 'yes' when changes are empty. precalc.put(change, "completely_reverts_all_previous_changes", (project.getChangesPerfectlyReversing(change) // Did every change take place before this change? .allMatch(ch -> ch.getDataset().getDate().compareTo(change.getDataset().getDate()) < 0) ) ? "yes" : "no" );*/ } // Set up columns. cols.add(createTableColumnForTable("index", 40.0, precalc)); cols.add(createTableColumnForTable("type", 40.0, precalc)); cols.add(createTableColumnForTable("from", 200.0, precalc)); cols.add(createTableColumnForTable("to", 200.0, precalc)); cols.add(createTableColumnForTable("dataset", 100.0, precalc)); cols.add(createTableColumnForTable("year", 100.0, precalc)); // Identify reversions, classified as: // (3) "reversion rate" as the proportion of all corrections that partially reverted an earlier correction // (4) "perfect revisionary rate", in which a lump is paired with a split that perfectly reverts the change made earlier cols.add(createTableColumnForTable("change", 50.0, precalc)); // Partial and complete reversions cols.add(createTableColumnForTable("reversions", 200.0, precalc)); cols.add(createTableColumnForTable("reversion_count", 200.0, precalc)); cols.add(createTableColumnForTable("reverts_a_previous_change", 100.0, precalc)); cols.add(createTableColumnForTable("reverts_a_later_change", 100.0, precalc)); //cols.add(createTableColumnForTable("reverts_all_previous_changes", 100.0, precalc)); // Complete reversions cols.add(createTableColumnForTable("complete_reversions", 200.0, precalc)); cols.add(createTableColumnForTable("complete_reversions_summary", 200.0, precalc)); cols.add(createTableColumnForTable("complete_reversion_count", 200.0, precalc)); cols.add(createTableColumnForTable("completely_reverts_a_previous_change", 100.0, precalc)); cols.add(createTableColumnForTable("completely_reverts_a_later_change", 100.0, precalc)); // cols.add(createTableColumnForTable("perfectly_reverts_all_previous_changes", 100.0, precalc)); // Mainly useful for linking data from this table with others generated in the same run, // uncomment if needed I guess. //cols.add(createTableColumnForTable("id", 40.0, precalc)); }
From source file:org.clueminer.scatter.matrix.ScatterMatrixPanel.java
public void setClustering(final Clustering<E, C> clustering) { SwingUtilities.invokeLater(new Runnable() { @Override//from w w w .j av a 2 s . c om public void run() { removeAll(); JPanel chart; GridBagConstraints c = new GridBagConstraints(); c.fill = GridBagConstraints.BOTH; c.anchor = GridBagConstraints.CENTER; c.weightx = 1.0; c.weighty = 1.0; if (clustering != null && clustering.size() > 0) { Cluster first = clustering.get(0); if (first.size() > 0) { int attrCnt = first.attributeCount(); for (int i = 0; i < attrCnt; i++) { for (int j = 0; j < i; j++) { chart = clusteringPlot(clustering, j, i); c.gridx = j; c.gridy = i - 1; add(chart, c); } } //place legend c.gridx = attrCnt - 2; c.gridy = 0; c.fill = GridBagConstraints.BOTH; int i = 0; Table<Integer, String, LegendEntry> labels = newTable(); SeriesColorMarkerLineStyleCycler generator = new SeriesColorMarkerLineStyleCycler(); for (Cluster<E> clust : clustering) { Marker m = generator.getNextSeriesColorMarkerLineStyle().getMarker(); labels.put(i, clust.getName(), new LegendEntry(clust.getName(), clust.getColor(), m)); i++; } legend.setLabels(labels); add(legend, c); } else { logger.log(Level.SEVERE, "empty cluster"); } } revalidate(); validate(); repaint(); } }); }
From source file:com.przemo.etl.dataproviders.DefaultDatabaseDataProvider.java
@Override public Table readData() { Table t = HashBasedTable.create(); if (connector != null && tableName != null) { try {/*from www. j ava 2 s . c o m*/ ResultSet res = connector.query("select * from " + tableName); int cols = res.getMetaData().getColumnCount(); int counter = 0; while (res.next()) { for (int i = 1; i <= cols; i++) { t.put(counter, res.getMetaData().getColumnName(i), res.getObject(i)); } counter++; } } catch (Exception ex) { Logger.getLogger(DefaultDatabaseDataProvider.class.getName()).log(Level.SEVERE, null, ex); } } return t; }