List of usage examples for com.google.common.collect ImmutableList get
E get(int index);
From source file:org.apache.beam.sdk.extensions.sql.impl.rule.BeamAggregationRule.java
private void updateWindowTrigger(RelOptRuleCall call, Aggregate aggregate, Project project) { ImmutableBitSet groupByFields = aggregate.getGroupSet(); List<RexNode> projectMapping = project.getProjects(); WindowFn windowFn = new GlobalWindows(); Trigger triggerFn = Repeatedly.forever(AfterWatermark.pastEndOfWindow()); int windowFieldIdx = -1; Duration allowedLatence = Duration.ZERO; for (int groupField : groupByFields.asList()) { RexNode projNode = projectMapping.get(groupField); if (projNode instanceof RexCall) { SqlOperator op = ((RexCall) projNode).op; ImmutableList<RexNode> parameters = ((RexCall) projNode).operands; String functionName = op.getName(); switch (functionName) { case "TUMBLE": windowFieldIdx = groupField; windowFn = FixedWindows.of(Duration.millis(getWindowParameterAsMillis(parameters.get(1)))); if (parameters.size() == 3) { GregorianCalendar delayTime = (GregorianCalendar) ((RexLiteral) parameters.get(2)) .getValue(); triggerFn = createTriggerWithDelay(delayTime); allowedLatence = (Duration.millis(delayTime.getTimeInMillis())); }/*from w w w. j av a2 s.c o m*/ break; case "HOP": windowFieldIdx = groupField; windowFn = SlidingWindows.of(Duration.millis(getWindowParameterAsMillis(parameters.get(1)))) .every(Duration.millis(getWindowParameterAsMillis(parameters.get(2)))); if (parameters.size() == 4) { GregorianCalendar delayTime = (GregorianCalendar) ((RexLiteral) parameters.get(3)) .getValue(); triggerFn = createTriggerWithDelay(delayTime); allowedLatence = (Duration.millis(delayTime.getTimeInMillis())); } break; case "SESSION": windowFieldIdx = groupField; windowFn = Sessions .withGapDuration(Duration.millis(getWindowParameterAsMillis(parameters.get(1)))); if (parameters.size() == 3) { GregorianCalendar delayTime = (GregorianCalendar) ((RexLiteral) parameters.get(2)) .getValue(); triggerFn = createTriggerWithDelay(delayTime); allowedLatence = (Duration.millis(delayTime.getTimeInMillis())); } break; default: break; } } } BeamAggregationRel newAggregator = new BeamAggregationRel(aggregate.getCluster(), aggregate.getTraitSet().replace(BeamLogicalConvention.INSTANCE), convert(aggregate.getInput(), aggregate.getInput().getTraitSet().replace(BeamLogicalConvention.INSTANCE)), aggregate.indicator, aggregate.getGroupSet(), aggregate.getGroupSets(), aggregate.getAggCallList(), windowFn, triggerFn, windowFieldIdx, allowedLatence); call.transformTo(newAggregator); }
From source file:com.palantir.giraffe.file.UniformPath.java
private Path toRelativePath(FileSystem fs) { ImmutableList<String> segments = core.getPathSegments(); if (segments.isEmpty()) { return fs.getPath(""); } else {/* w w w.j a va 2 s.co m*/ String first = segments.get(0); String[] more = segments.subList(1, segments.size()).toArray(new String[0]); return fs.getPath(first, more); } }
From source file:com.opengamma.strata.pricer.bond.DiscountingBondFutureProductPricer.java
/** * Calculates the price sensitivity of the bond future product. * <p>//from w w w . j a v a 2s . c o m * The price sensitivity of the product is the sensitivity of the price to the underlying curves. * <p> * Note that the price sensitivity should be no currency. * * @param future the future * @param discountingProvider the discounting provider * @return the price curve sensitivity of the product */ public PointSensitivities priceSensitivity(ResolvedBondFuture future, LegalEntityDiscountingProvider discountingProvider) { ImmutableList<ResolvedFixedCouponBond> basket = future.getDeliveryBasket(); int size = basket.size(); double[] priceBonds = new double[size]; int indexCTD = 0; double priceMin = 2d; for (int i = 0; i < size; i++) { ResolvedFixedCouponBond bond = basket.get(i); double dirtyPrice = bondPricer.dirtyPriceFromCurves(bond, discountingProvider, future.getLastDeliveryDate()); priceBonds[i] = bondPricer.cleanPriceFromDirtyPrice(bond, future.getLastDeliveryDate(), dirtyPrice) / future.getConversionFactors().get(i); if (priceBonds[i] < priceMin) { priceMin = priceBonds[i]; indexCTD = i; } } ResolvedFixedCouponBond bond = basket.get(indexCTD); PointSensitivityBuilder pointSensi = bondPricer.dirtyPriceSensitivity(bond, discountingProvider, future.getLastDeliveryDate()); return pointSensi.multipliedBy(1d / future.getConversionFactors().get(indexCTD)).build(); }
From source file:com.opengamma.strata.pricer.impl.credit.isda.IsdaModelDatasetsSheetReader.java
private String[] readHeaderRow() { // Read in the header row ImmutableList<String> rawRow = csvFile.headers(); final List<LocalDate> parSpreadDates = new ArrayList<>(); // Normalise read-in headers (to lower case) and set as columns String[] columns = new String[rawRow.size()]; for (int i = 0; i < rawRow.size(); i++) { columns[i] = rawRow.get(i).trim(); // if a date add to list of spread dates try {//from w w w .ja v a 2 s .c o m final LocalDate date = LocalDate.parse(columns[i], DATE_TIME_PARSER); parSpreadDates.add(date); continue; } catch (Exception ex) { columns[i] = columns[i].toLowerCase(Locale.ENGLISH); // lowercase non dates } } _parSpreadDates = parSpreadDates.toArray(new LocalDate[parSpreadDates.size()]); _curveTenors = new ZonedDateTime[_parSpreadDates.length]; for (int j = 0; j < _parSpreadDates.length; j++) { _curveTenors[j] = ZonedDateTime.of(_parSpreadDates[j], LOCAL_TIME, TIME_ZONE); } ArgChecker.notEmpty(_parSpreadDates, "par spread dates"); ArgChecker.notEmpty(_curveTenors, "curve tenors"); return columns; }
From source file:com.opengamma.strata.pricer.bond.DiscountingBondFutureProductPricer.java
/** * Calculates the price sensitivity of the bond future product with z-spread. * <p>// www . j a va 2 s . co m * The price sensitivity of the product is the sensitivity of the price to the underlying curves. * <p> * The z-spread is a parallel shift applied to continuously compounded rates or periodic compounded rates * of the issuer discounting curve. * <p> * Note that the price sensitivity should be no currency. * * @param future the future * @param discountingProvider the discounting provider * @param zSpread the z-spread * @param compoundedRateType the compounded rate type * @param periodPerYear the number of periods per year * @return the price curve sensitivity of the product */ public PointSensitivities priceSensitivityWithZSpread(ResolvedBondFuture future, LegalEntityDiscountingProvider discountingProvider, double zSpread, CompoundedRateType compoundedRateType, int periodPerYear) { ImmutableList<ResolvedFixedCouponBond> basket = future.getDeliveryBasket(); int size = basket.size(); double[] priceBonds = new double[size]; int indexCTD = 0; double priceMin = 2d; for (int i = 0; i < size; i++) { ResolvedFixedCouponBond bond = basket.get(i); double dirtyPrice = bondPricer.dirtyPriceFromCurvesWithZSpread(bond, discountingProvider, zSpread, compoundedRateType, periodPerYear, future.getLastDeliveryDate()); priceBonds[i] = bondPricer.cleanPriceFromDirtyPrice(bond, future.getLastDeliveryDate(), dirtyPrice) / future.getConversionFactors().get(i); if (priceBonds[i] < priceMin) { priceMin = priceBonds[i]; indexCTD = i; } } ResolvedFixedCouponBond bond = basket.get(indexCTD); PointSensitivityBuilder pointSensi = bondPricer.dirtyPriceSensitivityWithZspread(bond, discountingProvider, zSpread, compoundedRateType, periodPerYear, future.getLastDeliveryDate()); return pointSensi.multipliedBy(1d / future.getConversionFactors().get(indexCTD)).build(); }
From source file:org.locationtech.geogig.web.api.commands.FeatureDiffWeb.java
/** * Runs the command and builds the appropriate response * /*from w w w .jav a2 s . co m*/ * @param context - the context to use for this command * * @throws CommandSpecException */ @Override public void run(CommandContext context) { if (path == null || path.trim().isEmpty()) { throw new CommandSpecException("No path for feature name specifed"); } final Context geogig = this.getCommandLocator(context); ObjectId newId = geogig.command(ResolveTreeish.class).setTreeish(newTreeish).call().get(); ObjectId oldId = geogig.command(ResolveTreeish.class).setTreeish(oldTreeish).call().get(); RevFeature newFeature = null; RevFeatureType newFeatureType = null; RevFeature oldFeature = null; RevFeatureType oldFeatureType = null; final Map<PropertyDescriptor, AttributeDiff> diffs; Optional<NodeRef> ref = parseID(newId, geogig); Optional<RevObject> object; // need these to determine if the feature was added or removed so I can build the diffs // myself until the FeatureDiff supports null values boolean removed = false; boolean added = false; if (ref.isPresent()) { object = geogig.command(RevObjectParse.class).setObjectId(ref.get().getMetadataId()).call(); if (object.isPresent() && object.get() instanceof RevFeatureType) { newFeatureType = (RevFeatureType) object.get(); } else { throw new CommandSpecException("Couldn't resolve newCommit's featureType"); } object = geogig.command(RevObjectParse.class).setObjectId(ref.get().objectId()).call(); if (object.isPresent() && object.get() instanceof RevFeature) { newFeature = (RevFeature) object.get(); } else { throw new CommandSpecException("Couldn't resolve newCommit's feature"); } } else { removed = true; } if (!oldId.equals(ObjectId.NULL)) { ref = parseID(oldId, geogig); if (ref.isPresent()) { object = geogig.command(RevObjectParse.class).setObjectId(ref.get().getMetadataId()).call(); if (object.isPresent() && object.get() instanceof RevFeatureType) { oldFeatureType = (RevFeatureType) object.get(); } else { throw new CommandSpecException("Couldn't resolve oldCommit's featureType"); } object = geogig.command(RevObjectParse.class).setObjectId(ref.get().objectId()).call(); if (object.isPresent() && object.get() instanceof RevFeature) { oldFeature = (RevFeature) object.get(); } else { throw new CommandSpecException("Couldn't resolve oldCommit's feature"); } } else { added = true; } } else { added = true; } if (removed) { Map<PropertyDescriptor, AttributeDiff> tempDiffs = new HashMap<PropertyDescriptor, AttributeDiff>(); ImmutableList<PropertyDescriptor> attributes = oldFeatureType.sortedDescriptors(); ImmutableList<Optional<Object>> values = oldFeature.getValues(); for (int index = 0; index < attributes.size(); index++) { Optional<Object> value = values.get(index); if (Geometry.class.isAssignableFrom(attributes.get(index).getType().getBinding())) { Optional<Geometry> temp = Optional.absent(); if (value.isPresent() || all) { tempDiffs.put(attributes.get(index), new GeometryAttributeDiff(Optional.fromNullable((Geometry) value.orNull()), temp)); } } else { if (value.isPresent() || all) { tempDiffs.put(attributes.get(index), new GenericAttributeDiffImpl(value, Optional.absent())); } } } diffs = tempDiffs; } else if (added) { Map<PropertyDescriptor, AttributeDiff> tempDiffs = new HashMap<PropertyDescriptor, AttributeDiff>(); ImmutableList<PropertyDescriptor> attributes = newFeatureType.sortedDescriptors(); ImmutableList<Optional<Object>> values = newFeature.getValues(); for (int index = 0; index < attributes.size(); index++) { Optional<Object> value = values.get(index); if (Geometry.class.isAssignableFrom(attributes.get(index).getType().getBinding())) { Optional<Geometry> temp = Optional.absent(); if (value.isPresent() || all) { tempDiffs.put(attributes.get(index), new GeometryAttributeDiff(temp, Optional.fromNullable((Geometry) value.orNull()))); } } else { if (value.isPresent() || all) { tempDiffs.put(attributes.get(index), new GenericAttributeDiffImpl(Optional.absent(), value)); } } } diffs = tempDiffs; } else { FeatureDiff diff = new FeatureDiff(path, newFeature, oldFeature, newFeatureType, oldFeatureType, all); diffs = diff.getDiffs(); } context.setResponseContent(new CommandResponse() { @Override public void write(ResponseWriter out) throws Exception { out.start(); out.writeFeatureDiffResponse(diffs); out.finish(); } }); }
From source file:com.facebook.buck.cxx.AbstractNewPrebuiltCxxLibraryPaths.java
private <T> Optional<T> getParameter(String parameter, Optional<T> element, CxxPlatform cxxPlatform, Optional<PatternMatchedCollection<T>> platformElement, Optional<ImmutableMap<BuildTarget, Version>> selectedVersions, Optional<VersionMatchedCollection<T>> versionedElement) { if (element.isPresent()) { return element; }/* w w w.jav a2 s.co m*/ if (platformElement.isPresent()) { ImmutableList<T> matches = platformElement.get().getMatchingValues(cxxPlatform.getFlavor().toString()); if (matches.size() != 1) { throw new HumanReadableException("%s: %s: expected a single match for platform %s, but found %s", getTarget(), parameter, cxxPlatform.getFlavor(), matches); } return Optional.of(matches.get(0)); } if (selectedVersions.isPresent() && versionedElement.isPresent()) { return Optional.of( versionedElement.get().getOnlyMatchingValue(getTarget().toString(), selectedVersions.get())); } return Optional.empty(); }
From source file:com.opengamma.collect.io.PropertySet.java
/** * Gets a single value from this property set. * <p>// w w w. j ava2 s .c om * This returns the value associated with the specified key. * If more than one value, or no value, is associated with the key an exception is thrown. * * @param key the key name * @return the value * @throws IllegalArgumentException if the key does not exist, or if more than one value is associated */ public String getValue(String key) { ArgChecker.notNull(key, "key"); ImmutableList<String> values = keyValueMap.get(key); if (values.size() == 0) { throw new IllegalArgumentException("Unknown key: " + key); } if (values.size() > 1) { throw new IllegalArgumentException("Multiple values for key: " + key); } return values.get(0); }
From source file:flipkart.mongo.replicator.node.ReplicaSetManager.java
@Override public void updateReplicaSetConfigs(ImmutableList<ReplicaSetConfig> updatedRSConfigs) { if (DiscoveryUtils.hasReplicaSetsChanged(ImmutableList.of(this.replicaSetConfig), updatedRSConfigs)) { /**//from ww w . j a v a2s. co m * - stopping currently running replicators * - updating replicaSetConfigs in cluster * - starting replicators with updated configs */ this.stopReplicator(); this.replicaSetConfig = updatedRSConfigs.get(0); this.startReplicator(); } }
From source file:org.apache.hadoop.hive.ql.optimizer.calcite.cost.HiveAlgorithmsUtil.java
public double computeBucketMapJoinCPUCost(ImmutableList<Double> cardinalities, ImmutableBitSet streaming) { // Hash-join/* w w w . j av a 2 s . c om*/ double cpuCost = 0.0; for (int i = 0; i < cardinalities.size(); i++) { double cardinality = cardinalities.get(i); if (!streaming.get(i)) { cpuCost += cardinality * cpuCost; } cpuCost += cardinality * cpuCost; } return cpuCost; }