List of usage examples for com.google.common.collect TreeBasedTable create
public static <R extends Comparable, C extends Comparable, V> TreeBasedTable<R, C, V> create()
From source file:fi.jyu.ties454.cleaningAgents.infra.Floor.java
private Floor() { this(TreeBasedTable.create()); }
From source file:org.opennms.netmgt.jasper.analytics.DataSourceUtils.java
public static RowSortedTable<Integer, String, Double> fromDs(JRRewindableDataSource ds, String... fieldNames) throws JRException { RowSortedTable<Integer, String, Double> table = TreeBasedTable.create(); int rowIndex = 0; // Build the table, row by row while (ds.next()) { for (String fieldName : fieldNames) { JRDesignField field = new JRDesignField(); field.setDescription(fieldName); field.setName(fieldName);/*ww w. j a va 2 s .co m*/ // Some data-source implementation check the value class field.setValueClass(Object.class); table.put(rowIndex, fieldName, getValueAsDouble(ds.getFieldValue(field))); } rowIndex++; } ds.moveFirst(); return table; }
From source file:de.tudarmstadt.ukp.experiments.argumentation.sequence.evaluation.helpers.FinalTableExtractor.java
@Deprecated public static void extractCDResults(String inFile, String outFile) throws IOException { File file = new File(inFile); Table<String, String, String> table = TreeBasedTable.create(); List<String> lines = IOUtils.readLines(new FileInputStream(file)); Iterator<String> iterator = lines.iterator(); while (iterator.hasNext()) { String featureSet = iterator.next(); // shorten fs name featureSet = featureSet.replaceAll("^\\./", ""); // extract domain DocumentDomain domain = null;/*from w w w .ja v a 2 s. co m*/ for (DocumentDomain currentDomain : DocumentDomain.values()) { if (featureSet.contains(currentDomain.toString())) { if (domain != null) { throw new IllegalStateException("!!!"); } domain = currentDomain; } } if (domain == null) { throw new IllegalStateException("No domain found! " + featureSet); } String shortFeatureSet = featureSet.replaceAll("/.*", ""); System.out.println(shortFeatureSet); String[] fsSettings = shortFeatureSet.split("_", 6); String clusters = fsSettings[4]; if (includeRow(clusters)) { table.put(shortFeatureSet, "FS", fsSettings[1]); } // 12 lines with results for (int i = 0; i < 12; i++) { String line = iterator.next(); String[] split = line.split("\\s+"); String measure = split[0]; Double value = Double.valueOf(split[1]); // only a50 and a100,s1000 if (includeRow(clusters)) { table.put(shortFeatureSet, "Clusters", clusters); if (includeColumn(measure)) { table.put(shortFeatureSet, domain.toString() + measure, String.format(Locale.ENGLISH, "%.3f", value)); } } } } // tableToCsv(table, new FileWriter(outFile)); }
From source file:demo.project.ExportTable.java
/** * @param w//from w w w .j a va 2 s. c o m * @param ranker */ private void dumpHeader(PrintWriter w, ColumnRanker ranker) { w.append("Item"); RowSortedTable<Integer, Integer, String> headers = TreeBasedTable.create(); int r = 0; int c = 0; for (Iterator<ARankColumnModel> it = table.getColumnsOf(ranker); it.hasNext();) { ARankColumnModel col = it.next(); headers.put(r, c++, col.getLabel()); if (col instanceof ACompositeRankColumnModel) c = addAll(headers, r, c, (ACompositeRankColumnModel) col); } for (int row : headers.rowKeySet()) { for (int j = 0; j < c; ++j) { String l = headers.get(row, j); w.append('\t'); if (l != null) w.append(l); } w.println(); } }
From source file:com.assylias.jbloomberg.HistoricalData.java
/** * Adds a value to the HistoricalData structure for that security / field / date combination. *//*from w w w . jav a 2 s . c om*/ @Override synchronized void add(LocalDate date, String security, String field, Object value) { Table<LocalDate, String, TypedObject> securityTable = data.get(security); if (securityTable == null) { securityTable = TreeBasedTable.create(); //to have the dates in order data.put(security, securityTable); } securityTable.put(date, field, TypedObject.of(value)); }
From source file:blockplus.model.OptionsSupplier.java
@Override public Options get() { final Colors color = this.context.side(); final Board board = this.context.board(); final Iterable<IPosition> lights = board.getLights(color); final Pieces remainingPieces = this.context.getPlayer().remainingPieces(); final Table<IPosition, Polyomino, List<Set<IPosition>>> table = TreeBasedTable.create(); for (int radius = MIN_RADIUS; radius <= MAX_RADIUS; ++radius) { final Map<IPosition, Set<IPosition>> potentialPositions = this.getPotentialPositionsByLight(board, color, lights, radius);//from w w w.java2 s. c o m final Set<Polyomino> polyominos = POLYOMINOS_BY_RADIUS.get(radius); for (final Polyomino polyomino : polyominos) { if (remainingPieces.contains(polyomino)) { final Iterable<PolyominoInstance> instances = polyomino.get(); for (final Entry<IPosition, Set<IPosition>> entry : potentialPositions.entrySet()) { final IPosition position = entry.getKey(); final List<Set<IPosition>> options = Lists.newArrayList(); for (final IPosition potentialPosition : entry.getValue()) options.addAll( this.getLegalPositions(color, board, position, potentialPosition, instances)); if (!options.isEmpty()) table.put(position, polyomino, options); } } } } return new Options(table); }
From source file:co.cask.cdap.logging.save.KafkaLogWriterPlugin.java
@Inject public KafkaLogWriterPlugin(CConfiguration cConfig, FileMetaDataManager fileMetaDataManager, LocationFactory locationFactory, CheckpointManagerFactory checkpointManagerFactory) throws Exception { this.serializer = new LoggingEventSerializer(); this.messageTable = TreeBasedTable.create(); this.logBaseDir = cConfig.get(LoggingConfiguration.LOG_BASE_DIR); Preconditions.checkNotNull(this.logBaseDir, "Log base dir cannot be null"); LOG.info(String.format("Log base dir is %s", this.logBaseDir)); long retentionDurationDays = cConfig.getLong(LoggingConfiguration.LOG_RETENTION_DURATION_DAYS, LoggingConfiguration.DEFAULT_LOG_RETENTION_DURATION_DAYS); Preconditions.checkArgument(retentionDurationDays > 0, "Log file retention duration is invalid: %s", retentionDurationDays);// w ww . j a v a2s . c o m long maxLogFileSizeBytes = cConfig.getLong(LoggingConfiguration.LOG_MAX_FILE_SIZE_BYTES, 20 * 1024 * 1024); Preconditions.checkArgument(maxLogFileSizeBytes > 0, "Max log file size is invalid: %s", maxLogFileSizeBytes); int syncIntervalBytes = cConfig.getInt(LoggingConfiguration.LOG_FILE_SYNC_INTERVAL_BYTES, 50 * 1024); Preconditions.checkArgument(syncIntervalBytes > 0, "Log file sync interval is invalid: %s", syncIntervalBytes); long checkpointIntervalMs = cConfig.getLong(LoggingConfiguration.LOG_SAVER_CHECKPOINT_INTERVAL_MS, LoggingConfiguration.DEFAULT_LOG_SAVER_CHECKPOINT_INTERVAL_MS); Preconditions.checkArgument(checkpointIntervalMs > 0, "Checkpoint interval is invalid: %s", checkpointIntervalMs); long inactiveIntervalMs = cConfig.getLong(LoggingConfiguration.LOG_SAVER_INACTIVE_FILE_INTERVAL_MS, LoggingConfiguration.DEFAULT_LOG_SAVER_INACTIVE_FILE_INTERVAL_MS); Preconditions.checkArgument(inactiveIntervalMs > 0, "Inactive interval is invalid: %s", inactiveIntervalMs); this.eventBucketIntervalMs = cConfig.getLong(LoggingConfiguration.LOG_SAVER_EVENT_BUCKET_INTERVAL_MS, LoggingConfiguration.DEFAULT_LOG_SAVER_EVENT_BUCKET_INTERVAL_MS); Preconditions.checkArgument(this.eventBucketIntervalMs > 0, "Event bucket interval is invalid: %s", this.eventBucketIntervalMs); this.maxNumberOfBucketsInTable = cConfig.getLong( LoggingConfiguration.LOG_SAVER_MAXIMUM_INMEMORY_EVENT_BUCKETS, LoggingConfiguration.DEFAULT_LOG_SAVER_MAXIMUM_INMEMORY_EVENT_BUCKETS); Preconditions.checkArgument(this.maxNumberOfBucketsInTable > 0, "Maximum number of event buckets in memory is invalid: %s", this.maxNumberOfBucketsInTable); long topicCreationSleepMs = cConfig.getLong(LoggingConfiguration.LOG_SAVER_TOPIC_WAIT_SLEEP_MS, LoggingConfiguration.DEFAULT_LOG_SAVER_TOPIC_WAIT_SLEEP_MS); Preconditions.checkArgument(topicCreationSleepMs > 0, "Topic creation wait sleep is invalid: %s", topicCreationSleepMs); logCleanupIntervalMins = cConfig.getInt(LoggingConfiguration.LOG_CLEANUP_RUN_INTERVAL_MINS, LoggingConfiguration.DEFAULT_LOG_CLEANUP_RUN_INTERVAL_MINS); Preconditions.checkArgument(logCleanupIntervalMins > 0, "Log cleanup run interval is invalid: %s", logCleanupIntervalMins); AvroFileWriter avroFileWriter = new AvroFileWriter(fileMetaDataManager, cConfig, locationFactory.create(""), logBaseDir, serializer.getAvroSchema(), maxLogFileSizeBytes, syncIntervalBytes, inactiveIntervalMs); checkpointManager = checkpointManagerFactory.create(KafkaTopic.getTopic(), CHECKPOINT_ROW_KEY_PREFIX); this.logFileWriter = new CheckpointingLogFileWriter(avroFileWriter, checkpointManager, checkpointIntervalMs); String namespacesDir = cConfig.get(Constants.Namespace.NAMESPACES_DIR); long retentionDurationMs = TimeUnit.MILLISECONDS.convert(retentionDurationDays, TimeUnit.DAYS); this.logCleanup = new LogCleanup(fileMetaDataManager, locationFactory.create(""), namespacesDir, retentionDurationMs); }
From source file:com.gmarciani.gmparser.models.parser.cyk.recognition.CYKMatrix.java
/** * Generates the Cocke-Younger-Kasami recognition matrix. *//*from w ww . ja va 2 s . c om*/ private void generate() { int size = (this.getWord().length() == 0) ? 1 : this.getWord().length(); this.matrix = TreeBasedTable.create(); for (int r = 1; r <= size; r++) { for (int c = 1; c <= size; c++) { this.matrix.put(r, c, new Alphabet()); } } }
From source file:org.sonar.server.permission.ws.template.SearchTemplatesDataLoader.java
private Table<Long, String, Integer> userCountByTemplateIdAndPermission(DbSession dbSession, List<Long> templateIds) { final Table<Long, String, Integer> userCountByTemplateIdAndPermission = TreeBasedTable.create(); dbClient.permissionTemplateDao().usersCountByTemplateIdAndPermission(dbSession, templateIds, context -> { CountByTemplateAndPermissionDto row = (CountByTemplateAndPermissionDto) context.getResultObject(); userCountByTemplateIdAndPermission.put(row.getTemplateId(), row.getPermission(), row.getCount()); });//from w ww .j av a 2 s .c o m return userCountByTemplateIdAndPermission; }
From source file:org.eclipse.viatra.query.tooling.core.project.PluginXmlModifier.java
/** * Adds a collection of extensions to the current xml model. If previous extensions with the same name are already * available, it updates them instead./*from w w w . j av a2 s. c o m*/ */ public void addExtensions(Iterable<ExtensionData> contributedExtensions) { Table<String, String, List<ExtensionData>> table = TreeBasedTable.create(); for (ExtensionData data : contributedExtensions) { addExtensionToMap(data, table); } for (Cell<String, String, List<ExtensionData>> cell : table.cellSet()) { if (extensionTable.contains(cell.getRowKey(), cell.getColumnKey())) { // Updating existing items; using its original location final List<ExtensionData> oldList = extensionTable.get(cell.getRowKey(), cell.getColumnKey()); final ExtensionData oldData = oldList.get(0); for (ExtensionData data : cell.getValue()) { document.adoptNode(data.getNode()); Node parentNode = oldData.getNode().getParentNode(); if (parentNode != null) { parentNode.insertBefore(data.getNode(), oldData.getNode()); } else { document.getDocumentElement().appendChild(data.getNode()); } } // Removing old items for (ExtensionData data : oldList) { removeNode(data.getNode()); } } else { // New items, adding nodes in lexicographical order for (ExtensionData data : cell.getValue()) { document.adoptNode(data.getNode()); final Node root = document.getDocumentElement(); Node nextIdChild = findExtensionFollowingId(cell.getRowKey(), root); if (nextIdChild != null) { root.insertBefore(data.getNode(), nextIdChild); } else { // no following id, insert to end root.appendChild(data.getNode()); } } } extensionTable.put(cell.getRowKey(), cell.getColumnKey(), cell.getValue()); } }