Example usage for com.google.common.collect ImmutableList get

List of usage examples for com.google.common.collect ImmutableList get

Introduction

In this page you can find the example usage for com.google.common.collect ImmutableList get.

Prototype

E get(int index);

Source Link

Document

Returns the element at the specified position in this list.

Usage

From source file:com.google.devtools.build.lib.packages.SkylarkProvider.java

@Override
protected Info createInstanceFromSkylark(Object[] args, Location loc) throws EvalException {
    if (signature.getSignature().getShape().hasKwArg()) {
        @SuppressWarnings("unchecked")
        Map<String, Object> kwargs = (Map<String, Object>) args[0];
        return new SkylarkInfo(this, kwargs, loc);
    } else {//  ww  w  . j a  v  a 2s.  co m
        // todo(dslomov): implement shape sharing.
        ImmutableList<String> names = signature.getSignature().getNames();
        Preconditions.checkState(names.size() == args.length);
        ImmutableMap.Builder<String, Object> fields = ImmutableMap.builder();
        for (int i = 0; i < args.length; i++) {
            if (args[i] != null) {
                fields.put(names.get(i), args[i]);
            }
        }
        return new SkylarkInfo(this, fields.build(), loc);
    }
}

From source file:org.apache.brooklyn.location.jclouds.networking.SecurityGroupEditor.java

/**
 * Find a security group with the given name. As we use jclouds, groups are created with names prefixed
 * with {@link #JCLOUDS_PREFIX_REGEX}. For convenience this method accepts names either with or without
 * the prefix./*from  w w w  . j a  v a2s .  c om*/
 * @param name Name of the group to find.
 * @return An optional of the group.
 * @throws AmbiguousGroupName in the unexpected case that the cloud returns more than one matching group.
 */
public Optional<SecurityGroup> findSecurityGroupByName(final String name) {
    final Iterable<SecurityGroup> groupsMatching = findSecurityGroupsMatching(new Predicate<SecurityGroup>() {
        final String rawName = name.replaceAll(JCLOUDS_PREFIX_REGEX, "");

        @Override
        public boolean apply(final SecurityGroup input) {
            return input.getName().replaceAll(JCLOUDS_PREFIX_REGEX, "").equals(rawName);
        }
    });
    final ImmutableList<SecurityGroup> matches = ImmutableList.copyOf(groupsMatching);
    if (matches.size() == 0) {
        return Optional.absent();
    } else if (matches.size() == 1) {
        return Optional.of(matches.get(0));
    } else {
        throw new AmbiguousGroupName("Unexpected result of multiple groups matching " + name);
    }
}

From source file:org.apache.hadoop.hive.ql.optimizer.calcite.cost.HiveAlgorithmsUtil.java

public double computeSortMergeCPUCost(ImmutableList<Double> cardinalities, ImmutableBitSet sorted) {
    // Sort-merge join
    double cpuCost = 0.0;
    for (int i = 0; i < cardinalities.size(); i++) {
        double cardinality = cardinalities.get(i);
        if (!sorted.get(i)) {
            // Sort cost
            cpuCost += computeSortCPUCost(cardinality);
        }//from  w  w  w .  java 2  s.c om
        // Merge cost
        cpuCost += cardinality * cpuCost;
    }
    return cpuCost;
}

From source file:org.killbill.billing.entitlement.dao.MockBlockingStateDao.java

@Override
public BlockingState getBlockingStateForService(final UUID blockableId,
        final BlockingStateType blockingStateType, final String serviceName,
        final InternalTenantContext context) {
    final List<BlockingState> states = blockingStates.get(blockableId);
    if (states == null) {
        return null;
    }/*ww  w  .  j av a2  s  .c o  m*/
    final ImmutableList<BlockingState> filtered = ImmutableList
            .<BlockingState>copyOf(Collections2.filter(states, new Predicate<BlockingState>() {
                @Override
                public boolean apply(@Nullable final BlockingState input) {
                    return input.getService().equals(serviceName);
                }
            }));
    return filtered.size() == 0 ? null : filtered.get(filtered.size() - 1);
}

From source file:com.facebook.buck.parser.TargetSpecResolver.java

private Multimap<Path, Integer> groupSpecsByBuildFile(Cell rootCell,
        ImmutableList<TargetNodeSpec> orderedSpecs) {

    Multimap<Path, Integer> perBuildFileSpecs = LinkedHashMultimap.create();
    for (int index = 0; index < orderedSpecs.size(); index++) {
        TargetNodeSpec spec = orderedSpecs.get(index);
        Path cellPath = spec.getBuildFileSpec().getCellPath();
        Cell cell = rootCell.getCell(cellPath);
        try (SimplePerfEvent.Scope perfEventScope = SimplePerfEvent.scope(eventBus,
                PerfEventId.of("FindBuildFiles"), "targetNodeSpec", spec)) {

            BuildFileSpec buildFileSpec = spec.getBuildFileSpec();
            ProjectFilesystemView projectFilesystemView = cell.getFilesystemViewForSourceFiles();
            if (!buildFileSpec.isRecursive()) {
                // If spec is not recursive, i.e. //path/to:something, then we only need to look for
                // build file under base path
                Path buildFile = projectFilesystemView
                        .resolve(buildFileSpec.getBasePath().resolve(cell.getBuildFileName()));
                perBuildFileSpecs.put(buildFile, index);
            } else {
                // For recursive spec, i.e. //path/to/... we use cached file tree
                Path basePath = spec.getBuildFileSpec().getBasePath();

                // sometimes spec comes with absolute path as base path, sometimes it is relative to
                // cell path
                // TODO(sergeyb): find out why
                if (basePath.isAbsolute()) {
                    basePath = cellPath.relativize(basePath);
                }//from   w w  w  .j a  v  a2  s  .  c  o  m
                FileTree fileTree = graphEngineForRecursiveSpecPerRoot.getUnchecked(cellPath)
                        .computeUnchecked(ImmutableFileTreeKey.of(basePath));

                for (Path path : FileTreeFileNameIterator.ofIterable(fileTree, cell.getBuildFileName())) {
                    perBuildFileSpecs.put(projectFilesystemView.resolve(path), index);
                }
            }
        }
    }
    return perBuildFileSpecs;
}

From source file:de.metas.ui.web.quickinput.QuickInputDescriptorFactoryService.java

private IQuickInputDescriptorFactory getQuickInputDescriptorFactory(
        final IQuickInputDescriptorFactory.MatchingKey matchingKey) {
    final ImmutableList<IQuickInputDescriptorFactory> matchingFactories = factories.get(matchingKey);
    if (matchingFactories.isEmpty()) {
        return null;
    }//ww w . j  av  a 2 s  . c  o m

    if (matchingFactories.size() > 1) {
        logger.warn("More than one factory found for {}. Using the first one: {}", matchingFactories);
    }

    return matchingFactories.get(0);
}

From source file:org.geogit.web.api.commands.FeatureDiffWeb.java

/**
 * Runs the command and builds the appropriate response
 * /*ww w.j  a va 2  s  .  c o m*/
 * @param context - the context to use for this command
 * 
 * @throws CommandSpecException
 */
@Override
public void run(CommandContext context) {
    if (path == null || path.trim().isEmpty()) {
        throw new CommandSpecException("No path for feature name specifed");
    }

    ObjectId newId = null;
    final CommandLocator geogit = this.getCommandLocator(context);

    if (newCommitId.equals(ObjectId.NULL.toString()) || newCommitId.trim().isEmpty()) {
        Optional<ObjectId> oid = geogit.command(ResolveTreeish.class).setTreeish(Ref.HEAD).call();
        if (oid.isPresent()) {
            newId = oid.get();
        } else {
            throw new CommandSpecException("Something went wrong, couldn't resolve HEAD");
        }
    } else {
        newId = ObjectId.valueOf(newCommitId);
    }
    ObjectId oldId = ObjectId.valueOf(oldCommitId);

    RevFeature newFeature = null;
    RevFeatureType newFeatureType = null;

    RevFeature oldFeature = null;
    RevFeatureType oldFeatureType = null;

    final Map<PropertyDescriptor, AttributeDiff> diffs;

    Optional<NodeRef> ref = parseID(newId, geogit);

    Optional<RevObject> object;

    // need these to determine if the feature was added or removed so I can build the diffs
    // myself until the FeatureDiff supports null values
    boolean removed = false;
    boolean added = false;

    if (ref.isPresent()) {
        object = geogit.command(RevObjectParse.class).setObjectId(ref.get().getMetadataId()).call();
        if (object.isPresent() && object.get() instanceof RevFeatureType) {
            newFeatureType = (RevFeatureType) object.get();
        } else {
            throw new CommandSpecException("Couldn't resolve newCommit's featureType");
        }
        object = geogit.command(RevObjectParse.class).setObjectId(ref.get().objectId()).call();
        if (object.isPresent() && object.get() instanceof RevFeature) {
            newFeature = (RevFeature) object.get();
        } else {
            throw new CommandSpecException("Couldn't resolve newCommit's feature");
        }
    } else {
        removed = true;
    }

    if (!oldId.equals(ObjectId.NULL)) {
        ref = parseID(oldId, geogit);

        if (ref.isPresent()) {
            object = geogit.command(RevObjectParse.class).setObjectId(ref.get().getMetadataId()).call();
            if (object.isPresent() && object.get() instanceof RevFeatureType) {
                oldFeatureType = (RevFeatureType) object.get();
            } else {
                throw new CommandSpecException("Couldn't resolve oldCommit's featureType");
            }
            object = geogit.command(RevObjectParse.class).setObjectId(ref.get().objectId()).call();
            if (object.isPresent() && object.get() instanceof RevFeature) {
                oldFeature = (RevFeature) object.get();
            } else {
                throw new CommandSpecException("Couldn't resolve oldCommit's feature");
            }
        } else {
            added = true;
        }
    } else {
        added = true;
    }

    if (removed) {
        Map<PropertyDescriptor, AttributeDiff> tempDiffs = new HashMap<PropertyDescriptor, AttributeDiff>();
        ImmutableList<PropertyDescriptor> attributes = oldFeatureType.sortedDescriptors();
        ImmutableList<Optional<Object>> values = oldFeature.getValues();
        for (int index = 0; index < attributes.size(); index++) {
            Optional<Object> value = values.get(index);
            if (Geometry.class.isAssignableFrom(attributes.get(index).getType().getBinding())) {
                Optional<Geometry> temp = Optional.absent();
                if (value.isPresent() || all) {
                    tempDiffs.put(attributes.get(index),
                            new GeometryAttributeDiff(Optional.fromNullable((Geometry) value.orNull()), temp));
                }
            } else {
                if (value.isPresent() || all) {
                    tempDiffs.put(attributes.get(index),
                            new GenericAttributeDiffImpl(value, Optional.absent()));
                }
            }
        }
        diffs = tempDiffs;
    } else if (added) {
        Map<PropertyDescriptor, AttributeDiff> tempDiffs = new HashMap<PropertyDescriptor, AttributeDiff>();
        ImmutableList<PropertyDescriptor> attributes = newFeatureType.sortedDescriptors();
        ImmutableList<Optional<Object>> values = newFeature.getValues();
        for (int index = 0; index < attributes.size(); index++) {
            Optional<Object> value = values.get(index);
            if (Geometry.class.isAssignableFrom(attributes.get(index).getType().getBinding())) {
                Optional<Geometry> temp = Optional.absent();
                if (value.isPresent() || all) {
                    tempDiffs.put(attributes.get(index),
                            new GeometryAttributeDiff(temp, Optional.fromNullable((Geometry) value.orNull())));
                }
            } else {
                if (value.isPresent() || all) {
                    tempDiffs.put(attributes.get(index),
                            new GenericAttributeDiffImpl(Optional.absent(), value));
                }
            }
        }
        diffs = tempDiffs;
    } else {
        FeatureDiff diff = new FeatureDiff(path, newFeature, oldFeature, newFeatureType, oldFeatureType, all);
        diffs = diff.getDiffs();
    }

    context.setResponseContent(new CommandResponse() {

        @Override
        public void write(ResponseWriter out) throws Exception {
            out.start();
            out.writeFeatureDiffResponse(diffs);
            out.finish();
        }
    });
}

From source file:com.opengamma.strata.collect.io.PropertySet.java

/**
 * Gets a single value from this property set.
 * <p>//from w w w  .  j a  v a2 s. c  om
 * This returns the value associated with the specified key.
 * If more than one value, or no value, is associated with the key an exception is thrown.
 * 
 * @param key  the key name
 * @return the value
 * @throws IllegalArgumentException if the key does not exist, or if more than one value is associated
 */
public String value(String key) {
    ArgChecker.notNull(key, "key");
    ImmutableList<String> values = keyValueMap.get(key);
    if (values.size() == 0) {
        throw new IllegalArgumentException("Unknown key: " + key);
    }
    if (values.size() > 1) {
        throw new IllegalArgumentException("Multiple values for key: " + key);
    }
    return values.get(0);
}

From source file:com.opengamma.strata.collect.io.CsvRow.java

/**
 * Creates an instance, specifying the headers and row.
 * <p>//w ww.j a v  a  2 s. co  m
 * See {@link CsvFile}.
 * 
 * @param headers  the headers
 * @param fields  the fields
 */
private CsvRow(ImmutableList<String> headers, ImmutableList<String> fields) {
    this.headers = headers;
    // need to allow duplicate headers and only store the first instance
    Map<String, Integer> searchHeaders = new HashMap<>();
    for (int i = 0; i < headers.size(); i++) {
        String searchHeader = headers.get(i).toLowerCase(Locale.ENGLISH);
        searchHeaders.putIfAbsent(searchHeader, i);
    }
    this.searchHeaders = ImmutableMap.copyOf(searchHeaders);
    this.fields = fields;
}

From source file:com.opengamma.strata.pricer.credit.IsdaCompliantIndexCurveCalibrator.java

/**
 * Calibrates the index curve to the market data.
 * <p>//from w w w . j  av a  2  s. c o  m
 * This creates the single credit curve for CDS index trades.
 * The curve nodes in {@code IsdaCreditCurveDefinition} must be CDS index.
 * <p>
 * The relevant discount curve and recovery rate curve must be stored in {@code ratesProvider}. 
 * The day count convention for the resulting credit curve is the same as that of the discount curve.
 * 
 * @param curveDefinition  the curve definition
 * @param marketData  the market data
 * @param ratesProvider  the rates provider
 * @param refData  the reference data
 * @return the index curve
 */
public LegalEntitySurvivalProbabilities calibrate(IsdaCreditCurveDefinition curveDefinition,
        MarketData marketData, ImmutableCreditRatesProvider ratesProvider, ReferenceData refData) {

    ArgChecker.isTrue(curveDefinition.getCurveValuationDate().equals(ratesProvider.getValuationDate()),
            "ratesProvider and curveDefinition must be based on the same valuation date");
    ImmutableList<CdsIndexIsdaCreditCurveNode> curveNodes = curveDefinition.getCurveNodes().stream()
            .filter(n -> n instanceof CdsIndexIsdaCreditCurveNode).map(n -> (CdsIndexIsdaCreditCurveNode) n)
            .collect(Guavate.toImmutableList());
    // Homogeneity of curveNode will be checked within IsdaCompliantCreditCurveCalibrator
    double indexFactor = computeIndexFactor(curveNodes.get(0), marketData);
    List<CdsIsdaCreditCurveNode> cdsNodes = curveNodes.stream().map(i -> toCdsNode(i))
            .collect(Guavate.toImmutableList());
    LegalEntitySurvivalProbabilities creditCurve = creditCurveCalibrator.calibrate(cdsNodes,
            curveDefinition.getName(), marketData, ratesProvider, curveDefinition.getDayCount(),
            curveDefinition.getCurrency(), curveDefinition.isComputeJacobian(), false, refData);
    NodalCurve underlyingCurve = ((IsdaCreditDiscountFactors) creditCurve.getSurvivalProbabilities())
            .getCurve();
    CurveMetadata metadata = underlyingCurve.getMetadata().withInfo(CurveInfoType.CDS_INDEX_FACTOR,
            indexFactor);
    if (curveDefinition.isStoreNodeTrade()) {
        int nNodes = curveDefinition.getCurveNodes().size();
        ImmutableList<ParameterMetadata> parameterMetadata = IntStream.range(0, nNodes)
                .mapToObj(n -> ResolvedTradeParameterMetadata.of(
                        curveNodes.get(n).trade(1d, marketData, refData).getUnderlyingTrade().resolve(refData),
                        curveNodes.get(n).getLabel()))
                .collect(Guavate.toImmutableList());
        metadata = metadata.withParameterMetadata(parameterMetadata);
    }
    NodalCurve curveWithFactor = underlyingCurve.withMetadata(metadata);

    return LegalEntitySurvivalProbabilities.of(creditCurve.getLegalEntityId(), IsdaCreditDiscountFactors
            .of(creditCurve.getCurrency(), creditCurve.getValuationDate(), curveWithFactor));
}