List of usage examples for com.google.common.collect ImmutableList get
E get(int index);
From source file:org.mskcc.shenkers.control.track.gene.GTFGeneModelProvider.java
private void createGtfBgz(File gtf_file, File gtf_bgz_file) throws IOException { logger.info("reading {}", gtf_file.getAbsolutePath()); AbstractFeatureReader<GTFContext, LineIterator> afr = AbstractFeatureReader .getFeatureReader(gtf_file.getAbsolutePath(), codec, false); CloseableTribbleIterator<GTFContext> iterator = afr.iterator(); List<GTFContext> gtf = new ArrayList<>(); while (iterator.hasNext()) { GTFContext next = iterator.next(); gtf.add(next);/* w w w . j a va 2 s.co m*/ } ImmutableListMultimap<String, GTFContext> transcript_id_multimap = Multimaps.index(gtf.iterator(), GTFContext::getTranscriptId); logger.info("adding transcript ranges"); gtf.addAll(transcript_id_multimap.keySet().stream().map(key -> { System.out.println(key); ImmutableList<GTFContext> contexts = transcript_id_multimap.get(key); Range<Integer> span = contexts.stream().map(c -> Range.closed(c.getStart(), c.getEnd())) .collect(new RangeSetCollector()).span(); GTFContext context = new GTFContext(contexts.get(0).getChr(), span.lowerEndpoint(), span.upperEndpoint()); context.setFeature("transcript"); context.setFrame("."); context.setName("."); context.setScore("."); context.setSource("."); context.setStrand('.'); context.setAttributes(String.format("transcript_id \"%s\";", key)); return context; }).collect(Collectors.toList())); logger.info("sorting"); Collections.sort(gtf, new CoordinateOrderComparator()); logger.info("writing to compressed output stream"); BlockCompressedOutputStream os = new BlockCompressedOutputStream(gtf_bgz_file); Writer w = new OutputStreamWriter(os); for (GTFContext feature : gtf) { w.write(codec.encodeToString(feature)); } w.close(); }
From source file:org.apache.hadoop.hive.ql.optimizer.calcite.cost.HiveAlgorithmsUtil.java
public double computeMapJoinIOCost(ImmutableList<Pair<Double, Double>> relationInfos, ImmutableBitSet streaming, int parallelism) { // Hash-join//from ww w .j a v a 2 s .c o m double ioCost = 0.0; for (int i = 0; i < relationInfos.size(); i++) { double cardinality = relationInfos.get(i).left; double averageTupleSize = relationInfos.get(i).right; if (!streaming.get(i)) { ioCost += cardinality * averageTupleSize * netCost * parallelism; } } return ioCost; }
From source file:org.apache.hadoop.hive.ql.optimizer.calcite.cost.HiveAlgorithmsUtil.java
public double computeBucketMapJoinIOCost(ImmutableList<Pair<Double, Double>> relationInfos, ImmutableBitSet streaming, int parallelism) { // Hash-join/* ww w . j av a 2 s. c o m*/ double ioCost = 0.0; for (int i = 0; i < relationInfos.size(); i++) { double cardinality = relationInfos.get(i).left; double averageTupleSize = relationInfos.get(i).right; if (!streaming.get(i)) { ioCost += cardinality * averageTupleSize * netCost * parallelism; } } return ioCost; }
From source file:org.apache.hadoop.hive.ql.optimizer.calcite.cost.HiveAlgorithmsUtil.java
public double computeSMBMapJoinIOCost(ImmutableList<Pair<Double, Double>> relationInfos, ImmutableBitSet streaming, int parallelism) { // Hash-join//w ww . j a v a 2 s . c o m double ioCost = 0.0; for (int i = 0; i < relationInfos.size(); i++) { double cardinality = relationInfos.get(i).left; double averageTupleSize = relationInfos.get(i).right; if (!streaming.get(i)) { ioCost += cardinality * averageTupleSize * netCost * parallelism; } } return ioCost; }
From source file:com.opengamma.strata.pricer.calibration.CurveCalibrator.java
private DoubleMatrix derivatives(ImmutableList<Trade> trades, ImmutableRatesProvider provider, ImmutableList<CurveParameterSize> orderAll, int totalParamsAll) { return DoubleMatrix.ofArrayObjects(trades.size(), totalParamsAll, i -> measures.derivative(trades.get(i), provider, orderAll)); }
From source file:org.geogit.api.porcelain.SquashOp.java
/** * Executes the squash operation./*from w w w . j ava2 s . c o m*/ * * @return the new head after modifying the history squashing commits * @see org.geogit.api.AbstractGeoGitOp#call() */ @Override public ObjectId call() { Preconditions.checkNotNull(since); Preconditions.checkNotNull(until); final Optional<Ref> currHead = command(RefParse.class).setName(Ref.HEAD).call(); Preconditions.checkState(currHead.isPresent(), "Repository has no HEAD, can't squash."); Preconditions.checkState(currHead.get() instanceof SymRef, "Can't squash from detached HEAD"); final SymRef headRef = (SymRef) currHead.get(); final String currentBranch = headRef.getTarget(); Preconditions.checkState(getIndex().isClean() && getWorkTree().isClean(), "You must have a clean working tree and index to perform a squash."); Optional<RevCommit> ancestor = command(FindCommonAncestor.class).setLeft(since).setRight(until).call(); Preconditions.checkArgument(ancestor.isPresent(), "'since' and 'until' command do not have a common ancestor"); Preconditions.checkArgument(ancestor.get().equals(since), "Commits provided in wrong order"); Preconditions.checkArgument(!since.getParentIds().isEmpty(), "'since' commit has no parents"); // we get a a list of commits to apply on top of the squashed commits List<RevCommit> commits = getCommitsAfterUntil(); ImmutableSet<Ref> refs = command(ForEachRef.class).setPrefixFilter("refs/heads").call(); // we create a list of all parents of those squashed commits, in case they are // merge commits. The resulting commit will have all these parents // // While iterating the set of commits to squash, we check that there are no branch starting // points among them. Any commit with more than one child causes an exception to be thrown, // since the squash operation does not support squashing those commits Iterator<RevCommit> toSquash = command(LogOp.class).setSince(since.getParentIds().get(0)) .setUntil(until.getId()).setFirstParentOnly(true).call(); List<ObjectId> firstParents = Lists.newArrayList(); List<ObjectId> secondaryParents = Lists.newArrayList(); final List<ObjectId> squashedIds = Lists.newArrayList(); RevCommit commitToSquash = until; while (toSquash.hasNext()) { commitToSquash = toSquash.next(); squashedIds.add(commitToSquash.getId()); Preconditions.checkArgument(graphDb.getChildren(commitToSquash.getId()).size() < 2, "The commits to squash include a branch starting point. Squashing that type of commit is not supported."); for (Ref ref : refs) { // In case a branch has been created but no commit has been made on it and the // starting commit has just one child Preconditions.checkArgument( !ref.getObjectId().equals(commitToSquash.getId()) || ref.getObjectId().equals(currHead.get().getObjectId()) || commitToSquash.getParentIds().size() > 1, "The commits to squash include a branch starting point. Squashing that type of commit is not supported."); } ImmutableList<ObjectId> parentIds = commitToSquash.getParentIds(); for (int i = 1; i < parentIds.size(); i++) { secondaryParents.add(parentIds.get(i)); } firstParents.add(parentIds.get(0)); } Preconditions.checkArgument(since.equals(commitToSquash), "Cannot reach 'since' from 'until' commit through first parentage"); // We do the same check in the children commits for (RevCommit commit : commits) { Preconditions.checkArgument(graphDb.getChildren(commit.getId()).size() < 2, "The commits after the ones to squash include a branch starting point. This scenario is not supported."); for (Ref ref : refs) { // In case a branch has been created but no commit has been made on it Preconditions.checkArgument( !ref.getObjectId().equals(commit.getId()) || ref.getObjectId().equals(currHead.get().getObjectId()) || commit.getParentIds().size() > 1, "The commits after the ones to squash include a branch starting point. This scenario is not supported."); } } ObjectId newHead; // rewind the head newHead = since.getParentIds().get(0); command(ResetOp.class).setCommit(Suppliers.ofInstance(newHead)).setMode(ResetMode.HARD).call(); // add the current HEAD as first parent of the resulting commit // parents.add(0, newHead); // Create new commit List<ObjectId> parents = Lists.newArrayList(); parents.addAll(firstParents); parents.addAll(secondaryParents); ObjectId endTree = until.getTreeId(); CommitBuilder builder = new CommitBuilder(until); Collection<ObjectId> filteredParents = Collections2.filter(parents, new Predicate<ObjectId>() { @Override public boolean apply(@Nullable ObjectId id) { return !squashedIds.contains(id); } }); builder.setParentIds(Lists.newArrayList(filteredParents)); builder.setTreeId(endTree); if (message == null) { message = since.getMessage(); } long timestamp = platform.currentTimeMillis(); builder.setMessage(message); builder.setCommitter(resolveCommitter()); builder.setCommitterEmail(resolveCommitterEmail()); builder.setCommitterTimestamp(timestamp); builder.setCommitterTimeZoneOffset(platform.timeZoneOffset(timestamp)); builder.setAuthorTimestamp(until.getAuthor().getTimestamp()); RevCommit newCommit = builder.build(); repository.getObjectDatabase().put(newCommit); newHead = newCommit.getId(); ObjectId newTreeId = newCommit.getTreeId(); command(UpdateRef.class).setName(currentBranch).setNewValue(newHead).call(); command(UpdateSymRef.class).setName(Ref.HEAD).setNewValue(currentBranch).call(); getWorkTree().updateWorkHead(newTreeId); getIndex().updateStageHead(newTreeId); // now put the other commits after the squashed one newHead = addCommits(commits, currentBranch, newHead); return newHead; }
From source file:com.google.api.server.spi.dispatcher.PathTrie.java
private Result<T> resolve(HttpMethod method, List<String> pathSegments, int index, List<String> rawParameters) { if (index < pathSegments.size()) { String segment = pathSegments.get(index); PathTrie<T> subTrie = subTries.get(segment); if (subTrie != null) { Result<T> result = subTrie.resolve(method, pathSegments, index + 1, rawParameters); if (result != null) { return result; }/*from ww w. j a v a2 s . co m*/ } subTrie = subTries.get(PARAMETER_PATH_SEGMENT); if (subTrie != null) { // TODO: We likely need to enforce non-empty values here. rawParameters.add(segment); Result<T> result = subTrie.resolve(method, pathSegments, index + 1, rawParameters); if (result == null) { rawParameters.remove(rawParameters.size() - 1); } return result; } return null; } else if (httpMethodMap.containsKey(method)) { MethodInfo<T> methodInfo = httpMethodMap.get(method); ImmutableList<String> parameterNames = methodInfo.parameterNames; Preconditions.checkState(rawParameters.size() == parameterNames.size()); Map<String, String> rawParameterMap = Maps.newHashMap(); for (int i = 0; i < parameterNames.size(); i++) { rawParameterMap.put(parameterNames.get(i), decodeUri(rawParameters.get(i))); } return new Result<>(methodInfo.value, rawParameterMap); } return null; }
From source file:com.github.hilcode.versionator.impl.DefaultVersionChangeExtractor.java
@Override public final Result<String, RequestedVersionChange> extract(final ImmutableList<String> arguments) { final int argumentCount = arguments.size(); if (argumentCount == 0) { return Result.failure("Expected either a GAV or a group:artifact followed by an old and a new version, " + "but you provided nothing.\n\n" + "Perhaps try --help?"); }// w w w.ja v a 2 s. c om if (argumentCount < 3) { return extractAny(arguments); } final Matcher matcher = GROUP_ARTIFACT.matcher(arguments.get(argumentCount - 3)); if (!matcher.matches()) { return extractAny(arguments); } else { final String oldVersion = arguments.get(argumentCount - 2); if (oldVersion.contains(":")) { return failure(arguments.get(argumentCount - 3), arguments.get(argumentCount - 2)); } final String newVersion = arguments.get(argumentCount - 1); if (newVersion.contains(":")) { return failure(arguments.get(argumentCount - 3), arguments.get(argumentCount - 1)); } return extractOldNew(this.matchers.toGroupArtifact(matcher), oldVersion, newVersion, arguments); } }
From source file:ru.adios.budgeter.activities.BalancesTransferActivity.java
@Nullable private long[] transformLongList(ImmutableList<Long> list) { int listSize = list.size(); if (listSize == 0) { return null; }//w w w . j ava 2 s . c om final long[] res = new long[listSize]; for (int i1 = 0; i1 < listSize; i1++) { final Long i = list.get(i1); res[i1] = i != null ? i : -1; } return res; }
From source file:org.locationtech.geogig.plumbing.diff.FeatureDiff.java
/** * /* ww w. ja v a2s .co m*/ * @param path the full path to the feature, including its name * @param newRevFeature the new version of the feature * @param oldRevFeature the old version of the feature * @param newRevFeatureType the new version of the feature type * @param oldRevFeatureType the old version of the feature type * @param all - true if all attributes should be added regardless of change */ public FeatureDiff(String path, @Nullable RevFeature newRevFeature, @Nullable RevFeature oldRevFeature, @Nullable RevFeatureType newRevFeatureType, @Nullable RevFeatureType oldRevFeatureType, boolean all) { this.path = path; this.newRevFeature = newRevFeature; this.oldRevFeature = oldRevFeature; this.newFeatureType = newRevFeatureType; this.oldFeatureType = oldRevFeatureType; diffs = new HashMap<PropertyDescriptor, AttributeDiff>(); if (newRevFeature == null) { Preconditions.checkArgument(oldRevFeature != null, "A feature must be provided"); Preconditions.checkArgument(oldRevFeatureType != null, "Old feature type must be provided."); ImmutableList<PropertyDescriptor> oldAttributes = oldRevFeatureType.descriptors(); for (int i = 0; i < oldAttributes.size(); i++) { Optional<Object> oldValue = oldRevFeature.get(i); PropertyDescriptor descriptor = oldAttributes.get(i); if (Geometry.class.isAssignableFrom(descriptor.getType().getBinding())) { diffs.put(descriptor, new GeometryAttributeDiff((Geometry) oldValue.orNull(), (Geometry) null)); } else { diffs.put(oldAttributes.get(i), new GenericAttributeDiffImpl(oldValue.orNull(), null)); } } } else if (oldRevFeature == null) { Preconditions.checkArgument(newRevFeatureType != null, "New feature type must be provided."); ImmutableList<PropertyDescriptor> newAttributes = newRevFeatureType.descriptors(); for (int i = 0; i < newAttributes.size(); i++) { Optional<Object> newValue = newRevFeature.get(i); PropertyDescriptor descriptor = newAttributes.get(i); if (Geometry.class.isAssignableFrom(descriptor.getType().getBinding())) { diffs.put(descriptor, new GeometryAttributeDiff((Geometry) null, (Geometry) newValue.orNull())); } else { diffs.put(newAttributes.get(i), new GenericAttributeDiffImpl(null, newValue.orNull())); } } } else { ImmutableList<PropertyDescriptor> oldAttributes = oldRevFeatureType.descriptors(); ImmutableList<PropertyDescriptor> newAttributes = newRevFeatureType.descriptors(); BitSet updatedAttributes = new BitSet(newRevFeature.size()); for (int i = 0; i < oldAttributes.size(); i++) { Optional<Object> oldValue = oldRevFeature.get(i); int idx = newAttributes.indexOf(oldAttributes.get(i)); if (idx != -1) { Optional<Object> newValue = newRevFeature.get(idx); if (!oldValue.equals(newValue) || all) { if (Geometry.class.isAssignableFrom(oldAttributes.get(i).getType().getBinding())) { diffs.put(oldAttributes.get(i), new GeometryAttributeDiff((Geometry) oldValue.orNull(), (Geometry) newValue.orNull())); } else { diffs.put(oldAttributes.get(i), new GenericAttributeDiffImpl(oldValue.orNull(), newValue.orNull())); } } updatedAttributes.set(idx); } else { if (Geometry.class.isAssignableFrom(oldAttributes.get(i).getType().getBinding())) { diffs.put(oldAttributes.get(i), new GeometryAttributeDiff((Geometry) oldValue.orNull(), (Geometry) null)); } else { diffs.put(oldAttributes.get(i), new GenericAttributeDiffImpl(oldValue.orNull(), null)); } } } updatedAttributes.flip(0, newRevFeature.size()); for (int i = updatedAttributes.nextSetBit(0); i >= 0; i = updatedAttributes.nextSetBit(i + 1)) { PropertyDescriptor descriptor = newAttributes.get(i); if (Geometry.class.isAssignableFrom(descriptor.getType().getBinding())) { diffs.put(descriptor, new GeometryAttributeDiff((Geometry) null, (Geometry) newRevFeature.get(i).orNull())); } else { diffs.put(descriptor, new GenericAttributeDiffImpl(null, newRevFeature.get(i).orNull())); } } } }