List of usage examples for com.google.common.collect ImmutableTable builder
public static <R, C, V> Builder<R, C, V> builder()
From source file:org.caleydo.core.util.impute.KNNImpute.java
@Override protected Table<Integer, Integer, Float> compute() { // data/* www .ja va 2 s. c o m*/ // An expression matrix with genes in the rows, samples in the columns // k // Number of neighbors to be used in the imputation (default=10) // rowmax // The maximum percent missing data allowed in any row (default 50%). For any // rows with more than rowmax% missing are imputed using the overall mean per // sample. // colmax // The maximum percent missing data allowed in any column (default 80%). If // any column has more than colmax% missing data, the program halts and reports // an error. // maxp // The largest block of genes imputed using the knn algorithm inside impute.knn // (default 1500); larger blocks are divided by two-means clustering (recursively) // prior to imputation. If maxp=p, only knn imputation is done. // rng.seed // The seed used for the random number generator (default 362436069) for repro- // ducibility. // impute.knn uses k-nearest neighbors in the space of genes to impute missing expression values. // For each gene with missing values, we find the k nearest neighbors using a Euclidean metric, con- // fined to the columns for which that gene is NOT missing. Each candidate neighbor might be missing // some of the coordinates used to calculate the distance. In this case we average the distance from // the non-missing coordinates. Having found the k nearest neighbors for a gene, we impute the miss- // ing elements by averaging those (non-missing) elements of its neighbors. This can fail if ALL the // neighbors are missing in a particular element. In this case we use the overall column mean for that // block of genes. // Since nearest neighbor imputation costs O(plog(p)) operations per gene, where p is the number // of rows, the computational time can be excessive for large p and a large number of missing rows. // Our strategy is to break blocks with more than maxp genes into two smaller blocks using two-mean // clustering. This is done recursively till all blocks have less than maxp genes. For each block, k- // nearest neighbor imputation is done separately. We have set the default value of maxp to 1500. // Depending on the speed of the machine, and number of samples, this number might be increased. // Making it too small is counter-productive, because the number of two-mean clustering algorithms // will increase. if (toomanyNaNsInAColumn()) throw new IllegalStateException(); final float rowMax = desc.getRowmax(); final boolean validRowMax = !Float.isInfinite(rowMax) && !Float.isNaN(rowMax); final int max = validRowMax ? Math.round(desc.getRowmax() * samples) : 0; //list of possible List<Gene> neighborhood; int withMissing = 0; Collection<ForkJoinTask<Void>> tasks = new ArrayList<>(); if (!validRowMax) { neighborhood = genes;// all genes } else { neighborhood = new ArrayList<>(genes.size()); for (Gene gene : genes) { if (gene.getNaNs() == 0) {// nothing to impute neighborhood.add(gene); } else if (validRowMax && gene.getNaNs() > max) { // too many nans use the sample mean tasks.add(new ImputeSampleMean(gene)); //not a good neighbor } else { // neighbor but something needs to be done neighborhood.add(gene); withMissing++; } } } if (withMissing > 0) tasks.add(new ImputeKNNMean(neighborhood)); invokeAll(tasks); ImmutableTable.Builder<Integer, Integer, Float> b = ImmutableTable.builder(); for (Gene gene : genes) { if (gene.isAnySet()) { gene.fillImpute(b); } } return b.build(); }
From source file:org.opennms.netmgt.integrations.R.RScriptExecutor.java
/** * Convert the CSV string to an immutable table. *//* w w w . j a va 2 s .c o m*/ protected static ImmutableTable<Long, String, Double> fromCsv(final String csv) throws IOException { ImmutableTable.Builder<Long, String, Double> builder = ImmutableTable.builder(); try (StringReader reader = new StringReader(csv); CSVParser parser = new CSVParser(reader, CSVFormat.RFC4180.withHeader());) { long rowIndex = 0; Map<String, Integer> headerMap = parser.getHeaderMap(); for (CSVRecord record : parser) { for (String key : headerMap.keySet()) { Double value; try { value = Double.valueOf(record.get(key)); } catch (NumberFormatException e) { value = Double.NaN; } builder.put(rowIndex, key, value); } rowIndex++; } } return builder.build(); }
From source file:org.rm3l.ddwrt.tiles.status.wan.WANMonthlyTrafficTile.java
@Nullable @Override//from www. ja v a2s. c o m protected Loader<NVRAMInfo> getLoader(int id, Bundle args) { if (nbRunsLoader <= 0 || mAutoRefreshToggle) { setLoadingViewVisibility(View.VISIBLE); } return new AsyncTaskLoader<NVRAMInfo>(this.mParentFragmentActivity) { @Nullable @Override public NVRAMInfo loadInBackground() { try { Log.d(LOG_TAG, "Init background loader for " + WANMonthlyTrafficTile.class + ": routerInfo=" + mRouter + " / this.mAutoRefreshToggle= " + mAutoRefreshToggle + " / nbRunsLoader=" + nbRunsLoader); if (nbRunsLoader > 0 && !mAutoRefreshToggle) { //Skip run Log.d(LOG_TAG, "Skip loader run"); throw new DDWRTTileAutoRefreshNotAllowedException(); } nbRunsLoader++; @NotNull final NVRAMInfo nvramInfo = new NVRAMInfo(); NVRAMInfo nvramInfoTmp = null; try { //noinspection ConstantConditions nvramInfoTmp = NVRAMParser.parseNVRAMOutput(SSHUtils.getManualProperty(mRouter, mGlobalPreferences, "nvram show 2>/dev/null | grep traff[-_]")); } finally { if (nvramInfoTmp != null) { nvramInfo.putAll(nvramInfoTmp); } } traffDataTableBuilder = ImmutableTable.builder(); if (nvramInfo.isEmpty()) { throw new DDWRTNoDataException("No Data!"); } @SuppressWarnings("ConstantConditions") final Set<Map.Entry<Object, Object>> entries = nvramInfo.getData().entrySet(); for (final Map.Entry<Object, Object> entry : entries) { final Object key; final Object value; if (entry == null || (key = entry.getKey()) == null || (value = entry.getValue()) == null) { continue; } if (!StringUtils.startsWithIgnoreCase(key.toString(), "traff-")) { continue; } final String month = key.toString().replace("traff-", DDWRTCompanionConstants.EMPTY_STRING); final String yearlyTraffData = value.toString(); final List<String> yearlyTraffDataList = MONTHLY_TRAFF_DATA_SPLITTER .splitToList(yearlyTraffData); if (yearlyTraffDataList == null || yearlyTraffDataList.isEmpty()) { continue; } int dayNum = 1; for (final String dailyInOutTraffData : yearlyTraffDataList) { if (StringUtils.contains(dailyInOutTraffData, "[")) { continue; } final List<String> dailyInOutTraffDataList = DAILY_TRAFF_DATA_SPLITTER .splitToList(dailyInOutTraffData); if (dailyInOutTraffDataList == null || dailyInOutTraffDataList.size() < 2) { continue; } final String inTraff = dailyInOutTraffDataList.get(0); final String outTraff = dailyInOutTraffDataList.get(1); traffDataTableBuilder.put(month, dayNum++, Lists.newArrayList(Double.parseDouble(inTraff), Double.parseDouble(outTraff))); } } traffData = traffDataTableBuilder.build(); return nvramInfo; } catch (@NotNull final Exception e) { e.printStackTrace(); return new NVRAMInfo().setException(e); } } }; }
From source file:uk.ac.open.kmi.iserve.discovery.disco.impl.SparqlLogicConceptMatcher.java
private Table<URI, URI, MatchResult> queryForMatchResults(String queryStr) { ImmutableTable.Builder<URI, URI, MatchResult> result = ImmutableTable.builder(); // Query the engine Query query = QueryFactory.create(queryStr); QueryExecution qe = QueryExecutionFactory.sparqlService(this.sparqlEndpoint.toASCIIString(), query); MonitoredQueryExecution qexec = new MonitoredQueryExecution(qe); try {/* www . j av a2s . co m*/ Stopwatch stopwatch = new Stopwatch().start(); ResultSet qResults = qexec.execSelect(); stopwatch.stop(); log.debug("Time taken for querying the registry: {}", stopwatch); // Obtain matches if any and figure out the type MatchType type; URI origin; URI destination; while (qResults.hasNext()) { QuerySolution soln = qResults.nextSolution(); // Only process if we can get complete match information if (soln.contains(ORIGIN_VAR) && soln.contains(DESTINATION_VAR)) { type = getMatchType(soln); origin = new URI(soln.getResource(ORIGIN_VAR).getURI()); destination = new URI(soln.getResource(DESTINATION_VAR).getURI()); log.debug("Concept {} was matched to {} with type {}", origin, destination, type); result.put(origin, destination, new AtomicMatchResult(origin, destination, type, this)); } } } catch (URISyntaxException e) { log.error("Error creating URI for match results", e); } finally { qexec.close(); } return result.build(); }
From source file:google.registry.tools.server.ListObjectsAction.java
/** * Returns a table of data for the given sets of fields and objects. The table is row-keyed by * object and column-keyed by field, in the same iteration order as the provided sets. *///from w w w .j ava 2 s . c om private ImmutableTable<T, String, String> extractData(ImmutableSet<String> fields, ImmutableSet<T> objects) { ImmutableTable.Builder<T, String, String> builder = new ImmutableTable.Builder<>(); for (T object : objects) { Map<String, Object> fieldMap = new HashMap<>(); // Base case of the mapping is to use ImmutableObject's toDiffableFieldMap(). fieldMap.putAll(object.toDiffableFieldMap()); // Next, overlay any field-level overrides specified by the subclass. fieldMap.putAll(getFieldOverrides(object)); // Next, add to the mapping all the aliases, with their values defined as whatever was in the // map under the aliased field's original name. fieldMap.putAll(Maps.transformValues(getFieldAliases(), Functions.forMap(new HashMap<>(fieldMap)))); Set<String> expectedFields = ImmutableSortedSet.copyOf(fieldMap.keySet()); for (String field : fields) { checkArgument(fieldMap.containsKey(field), "Field '%s' not found - recognized fields are:\n%s", field, expectedFields); builder.put(object, field, Objects.toString(fieldMap.get(field), "")); } } return builder.build(); }
From source file:org.jage.platform.fsm.StateMachineServiceBuilder.java
/** * Builds the transitions table./*from w ww . ja va 2s. co m*/ * * @return an immutable transitions table. */ ImmutableTable<S, E, TransitionDescriptor<S, E>> buildTransitionsTable() { final EnumSet<S> allStates = EnumSet.allOf(getStateClass()); final EnumSet<E> allEvents = EnumSet.allOf(getEventClass()); final Builder<S, E, TransitionDescriptor<S, E>> tableBuilder = ImmutableTable.builder(); for (final S state : allStates) { for (final E event : allEvents) { tableBuilder.put(state, event, transitionFor(state, event)); } } return tableBuilder.build(); }
From source file:com.google.gerrit.server.index.change.ChangeField.java
public static ReviewerSet parseReviewerFieldValues(Iterable<String> values) { ImmutableTable.Builder<ReviewerStateInternal, Account.Id, Timestamp> b = ImmutableTable.builder(); for (String v : values) { int f = v.indexOf(','); if (f < 0) { continue; }/*from w w w .ja v a 2 s.c o m*/ int l = v.lastIndexOf(','); if (l == f) { continue; } b.put(ReviewerStateInternal.valueOf(v.substring(0, f)), Account.Id.parse(v.substring(f + 1, l)), new Timestamp(Long.valueOf(v.substring(l + 1, v.length())))); } return ReviewerSet.fromTable(b.build()); }
From source file:me.yanaga.guava.stream.MoreCollectors.java
public static <T, R, C, V> Collector<T, ?, ImmutableTable<R, C, V>> toImmutableTable( Function<? super T, ? extends R> rowMapper, Function<? super T, ? extends C> columnMapper, Function<? super T, ? extends V> valueMapper) { return Collector.of(ImmutableTable::builder, new BiConsumer<ImmutableTable.Builder<R, C, V>, T>() { @Override//from ww w . jav a2 s .co m public void accept(ImmutableTable.Builder<R, C, V> rcvBuilder, T t) { rcvBuilder.put(rowMapper.apply(t), columnMapper.apply(t), valueMapper.apply(t)); } }, new BinaryOperator<ImmutableTable.Builder<R, C, V>>() { @Override public ImmutableTable.Builder<R, C, V> apply(ImmutableTable.Builder<R, C, V> rcvBuilder, ImmutableTable.Builder<R, C, V> rcvBuilder2) { return rcvBuilder.putAll(rcvBuilder2.build()); } }, new Function<ImmutableTable.Builder<R, C, V>, ImmutableTable<R, C, V>>() { @Override public ImmutableTable<R, C, V> apply(ImmutableTable.Builder<R, C, V> rcvBuilder) { return rcvBuilder.build(); } }, UNORDERED, CONCURRENT); }
From source file:org.apache.beam.runners.fnexecution.control.ProcessBundleDescriptors.java
private static Map<String, Map<String, SideInputSpec>> addSideInputs(ExecutableStage stage, Components.Builder components) throws IOException { ImmutableTable.Builder<String, String, SideInputSpec> idsToSpec = ImmutableTable.builder(); for (SideInputReference sideInputReference : stage.getSideInputs()) { // Update the coder specification for side inputs to be length prefixed so that the // SDK and Runner agree on how to encode/decode the key, window, and values for // side inputs. PCollectionNode pcNode = sideInputReference.collection(); PCollection pc = pcNode.getPCollection(); String lengthPrefixedCoderId = LengthPrefixUnknownCoders.addLengthPrefixedCoder(pc.getCoderId(), components, false);/*from ww w .ja v a 2 s.co m*/ components.putPcollections(pcNode.getId(), pc.toBuilder().setCoderId(lengthPrefixedCoderId).build()); FullWindowedValueCoder<KV<?, ?>> coder = (FullWindowedValueCoder) WireCoders .instantiateRunnerWireCoder(pcNode, components.build()); idsToSpec.put(sideInputReference.transform().getId(), sideInputReference.localName(), SideInputSpec.of(sideInputReference.transform().getId(), sideInputReference.localName(), getAccessPattern(sideInputReference), coder.getValueCoder(), coder.getWindowCoder())); } return idsToSpec.build().rowMap(); }
From source file:com.google.gerrit.server.index.change.ChangeField.java
public static ReviewerByEmailSet parseReviewerByEmailFieldValues(Iterable<String> values) { ImmutableTable.Builder<ReviewerStateInternal, Address, Timestamp> b = ImmutableTable.builder(); for (String v : values) { int f = v.indexOf(','); if (f < 0) { continue; }/* www . ja v a 2 s .com*/ int l = v.lastIndexOf(','); if (l == f) { continue; } b.put(ReviewerStateInternal.valueOf(v.substring(0, f)), Address.parse(v.substring(f + 1, l)), new Timestamp(Long.valueOf(v.substring(l + 1, v.length())))); } return ReviewerByEmailSet.fromTable(b.build()); }