List of usage examples for com.google.common.collect ImmutableList size
int size();
From source file:com.facebook.buck.cxx.toolchain.elf.ElfVerNeed.java
public ElfVerNeed compact() { return new ElfVerNeed(RichStream.from(MoreIterables.enumerate(this.entries)).map(vnp -> { int verneedIndex = vnp.getFirst(); ElfVerNeed.Verneed verneed = vnp.getSecond().getFirst(); ImmutableList<ElfVerNeed.Vernaux> vernauxes = vnp.getSecond().getSecond(); return new Pair<>( new ElfVerNeed.Verneed(verneed.vn_version, vernauxes.size(), verneed.vn_file, vernauxes.isEmpty() ? 0 : ElfVerNeed.Verneed.BYTES, verneedIndex == entries.size() - 1 ? 0 : ElfVerNeed.Verneed.BYTES + ElfVerNeed.Vernaux.BYTES * vernauxes.size()), RichStream.from(MoreIterables.enumerate(vernauxes)) .map(vnxp -> new ElfVerNeed.Vernaux(vnxp.getSecond().vna_hash, vnxp.getSecond().vna_flags, vnxp.getSecond().vna_other, vnxp.getSecond().vna_name, vnxp.getFirst() == vernauxes.size() - 1 ? 0 : ElfVerNeed.Vernaux.BYTES)) .toImmutableList()); }).toImmutableList());/*w ww . j av a 2 s . c o m*/ }
From source file:com.google.devtools.common.options.InvocationPolicyEnforcer.java
/** * Expand a single policy. If the policy is not about an expansion flag, this will simply return a * list with a single element, oneself. If the policy is for an expansion flag, the policy will * get split into multiple policies applying to each flag the original flag expands to. * * <p>None of the flagPolicies returned should be on expansion flags. *///from w w w .j a va2 s .c o m private static List<FlagPolicyWithContext> expandPolicy(FlagPolicyWithContext originalPolicy, OptionsParser parser, Level loglevel) throws OptionsParsingException { List<FlagPolicyWithContext> expandedPolicies = new ArrayList<>(); boolean isExpansion = originalPolicy.description.isExpansion(); ImmutableList<ParsedOptionDescription> subflags = parser.getExpansionValueDescriptions( originalPolicy.description.getOptionDefinition(), originalPolicy.origin); // If we have nothing to expand to, no need to do any further work. if (subflags.isEmpty()) { return ImmutableList.of(originalPolicy); } if (logger.isLoggable(loglevel)) { // Log the expansion. This is only really useful for understanding the invocation policy // itself. List<String> subflagNames = new ArrayList<>(subflags.size()); for (ParsedOptionDescription subflag : subflags) { subflagNames.add("--" + subflag.getOptionDefinition().getOptionName()); } logger.logp(loglevel, "InvocationPolicyEnforcer", "expandPolicy", String.format("Expanding %s on option %s to its %s: %s.", originalPolicy.policy.getOperationCase(), originalPolicy.policy.getFlagName(), isExpansion ? "expansions" : "implied flags", Joiner.on("; ").join(subflagNames))); } // Repeated flags are special, and could set multiple times in an expansion, with the user // expecting both values to be valid. Collect these separately. Multimap<OptionDescription, ParsedOptionDescription> repeatableSubflagsInSetValues = ArrayListMultimap .create(); // Create a flag policy for the child that looks like the parent's policy "transferred" to its // child. Note that this only makes sense for SetValue, when setting an expansion flag, or // UseDefault, when preventing it from being set. for (ParsedOptionDescription currentSubflag : subflags) { OptionDescription subflagOptionDescription = parser .getOptionDescription(currentSubflag.getOptionDefinition().getOptionName()); if (currentSubflag.getOptionDefinition().allowsMultiple() && originalPolicy.policy.getOperationCase().equals(OperationCase.SET_VALUE)) { repeatableSubflagsInSetValues.put(subflagOptionDescription, currentSubflag); } else { FlagPolicyWithContext subflagAsPolicy = getSingleValueSubflagAsPolicy(subflagOptionDescription, currentSubflag, originalPolicy, isExpansion); // In case any of the expanded flags are themselves expansions, recurse. expandedPolicies.addAll(expandPolicy(subflagAsPolicy, parser, loglevel)); } } // If there are any repeatable flag SetValues, deal with them together now. // Note that expansion flags have no value, and so cannot have multiple values either. // Skipping the recursion above is fine. for (OptionDescription repeatableFlag : repeatableSubflagsInSetValues.keySet()) { int numValues = repeatableSubflagsInSetValues.get(repeatableFlag).size(); ArrayList<String> newValues = new ArrayList<>(numValues); ArrayList<OptionInstanceOrigin> origins = new ArrayList<>(numValues); for (ParsedOptionDescription setValue : repeatableSubflagsInSetValues.get(repeatableFlag)) { newValues.add(setValue.getUnconvertedValue()); origins.add(setValue.getOrigin()); } // These options come from expanding a single policy, so they have effectively the same // priority. They could have come from different expansions or implicit requirements in the // recursive resolving of the option list, so just pick the first one. Do collapse the source // strings though, in case there are different sources. OptionInstanceOrigin arbitraryFirstOptionOrigin = origins.get(0); OptionInstanceOrigin originOfSubflags = new OptionInstanceOrigin( arbitraryFirstOptionOrigin.getPriority(), origins.stream().map(OptionInstanceOrigin::getSource).distinct() .collect(Collectors.joining(", ")), arbitraryFirstOptionOrigin.getImplicitDependent(), arbitraryFirstOptionOrigin.getExpandedFrom()); expandedPolicies .add(getSetValueSubflagAsPolicy(repeatableFlag, newValues, originOfSubflags, originalPolicy)); } // Don't add the original policy if it was an expansion flag, which have no value, but do add // it if there was either no expansion or if it was a valued flag with implicit requirements. if (!isExpansion) { expandedPolicies.add(originalPolicy); } return expandedPolicies; }
From source file:com.facebook.buck.cxx.elf.ElfVerDef.java
public ElfVerDef compact() { return new ElfVerDef(RichStream.from(MoreIterables.enumerate(this.entries)).map(vdp -> { int verdefIndex = vdp.getFirst(); Verdef verdef = vdp.getSecond().getFirst(); ImmutableList<Verdaux> verdauxes = vdp.getSecond().getSecond(); return new Pair<>( new Verdef(verdef.vd_version, verdef.vd_flags, verdef.vd_ndx, verdauxes.size(), verdef.vd_hash, verdauxes.size() == 0 ? 0 : Verdef.BYTES, verdefIndex == entries.size() - 1 ? 0 : Verdef.BYTES + Verdaux.BYTES * verdauxes.size()), RichStream.from(MoreIterables.enumerate(verdauxes)) .map(vdxp -> new Verdaux(vdxp.getSecond().vda_name, vdxp.getFirst() == verdauxes.size() - 1 ? 0 : Verdaux.BYTES)) .toImmutableList()); }).toImmutableList());/*from w w w .j av a 2 s .co m*/ }
From source file:com.facebook.buck.cxx.elf.ElfVerNeed.java
public ElfVerNeed compact() { return new ElfVerNeed(RichStream.from(MoreIterables.enumerate(this.entries)).map(vnp -> { int verneedIndex = vnp.getFirst(); ElfVerNeed.Verneed verneed = vnp.getSecond().getFirst(); ImmutableList<ElfVerNeed.Vernaux> vernauxes = vnp.getSecond().getSecond(); return new Pair<>( new ElfVerNeed.Verneed(verneed.vn_version, vernauxes.size(), verneed.vn_file, vernauxes.size() == 0 ? 0 : ElfVerNeed.Verneed.BYTES, verneedIndex == entries.size() - 1 ? 0 : ElfVerNeed.Verneed.BYTES + ElfVerNeed.Vernaux.BYTES * vernauxes.size()), RichStream.from(MoreIterables.enumerate(vernauxes)) .map(vnxp -> new ElfVerNeed.Vernaux(vnxp.getSecond().vna_hash, vnxp.getSecond().vna_flags, vnxp.getSecond().vna_other, vnxp.getSecond().vna_name, vnxp.getFirst() == vernauxes.size() - 1 ? 0 : ElfVerNeed.Vernaux.BYTES)) .toImmutableList()); }).toImmutableList());/* w w w. j av a2 s . co m*/ }
From source file:com.palantir.paxos.PaxosProposerImpl.java
private PaxosProposerImpl(PaxosLearner localLearner, ImmutableList<PaxosAcceptor> acceptors, ImmutableList<PaxosLearner> learners, int quorumSize, String uuid, Executor executor) { Preconditions.checkState(quorumSize > acceptors.size() / 2, "quorum size needs to be at least the majority of acceptors"); this.localLearner = localLearner; this.allAcceptors = acceptors; this.allLearners = learners; this.quorumSize = quorumSize; this.uuid = uuid; this.proposalNum = new AtomicLong(); this.executor = executor; }
From source file:org.waveprotocol.box.server.persistence.blocks.impl.SegmentOperationImpl.java
public SegmentOperationImpl(ImmutableList<? extends WaveletOperation> operations) { Preconditions.checkArgument(!operations.isEmpty(), "No operations"); rawOperation = new RawOperation(GsonSerializer.OPERATION_SERIALIZER, operations, operations.get(operations.size() - 1).getContext()); }
From source file:com.facebook.presto.operator.aggregation.AbstractTestApproximateAggregationFunction.java
private void testCorrectnessOfErrorFunction(List<Number> inputList) throws Exception { int inRange = 0; int numberOfRuns = 1000; double sampleRatio = 1 / (double) WEIGHT; double actual = getExpectedValue(inputList); Random rand = new Random(1); for (int i = 0; i < numberOfRuns; i++) { //Compute Sampled Value using sampledList (numberOfRuns times) ImmutableList.Builder<Number> sampledList = ImmutableList.builder(); for (Number x : inputList) { if (rand.nextDouble() < sampleRatio) { sampledList.add(x);/*from w ww . ja v a2 s . c o m*/ } } ImmutableList<Number> list = sampledList.build(); BlockBuilder builder = getType().createBlockBuilder(new BlockBuilderStatus(), list.size()); for (Number sample : list) { if (getType().equals(BIGINT)) { BIGINT.writeLong(builder, sample.longValue()); } else if (getType().equals(DOUBLE)) { DOUBLE.writeDouble(builder, sample.doubleValue()); } else { throw new AssertionError("Can only handle longs and doubles"); } } Page page = new Page(builder.build()); page = OperatorAssertion.appendSampleWeight(ImmutableList.of(page), WEIGHT).get(0); Accumulator accumulator = getFunction().bind(ImmutableList.of(0), Optional.empty(), Optional.of(page.getChannelCount() - 1), getConfidence()).createAccumulator(); accumulator.addInput(page); Block result = getFinalBlock(accumulator); String approxValue = BlockAssertions.toValues(accumulator.getFinalType(), result).get(0).toString(); double approx = Double.parseDouble(approxValue.split(" ")[0]); double error = Double.parseDouble(approxValue.split(" ")[2]); //Check if actual answer lies within [approxAnswer - error, approxAnswer + error] if (Math.abs(approx - actual) <= error) { inRange++; } } BinomialDistribution binomial = new BinomialDistribution(numberOfRuns, getConfidence()); int lowerBound = binomial.inverseCumulativeProbability(0.01); int upperBound = binomial.inverseCumulativeProbability(0.99); assertTrue(lowerBound < inRange && inRange < upperBound, String .format("%d out of %d passed. Expected [%d, %d]", inRange, numberOfRuns, lowerBound, upperBound)); }
From source file:com.bendb.thrifty.schema.Loader.java
/** * Loads and parses a Thrift file and all files included (both directly and * transitively) by it./*w w w . j av a2 s . c o m*/ * * @param path A relative or absolute path to a Thrift file. * @param loadedFiles A mapping of absolute paths to parsed Thrift files. */ private void loadFileRecursively(String path, Map<String, ThriftFileElement> loadedFiles) throws IOException { ThriftFileElement element = null; File dir = null; File file = findFirstExisting(path, null); if (file != null) { // Resolve symlinks, redundant '.' and '..' segments. file = file.getCanonicalFile(); if (loadedFiles.containsKey(file.getAbsolutePath())) { return; } dir = file.getParentFile(); element = loadSingleFile(file.getParentFile(), file.getName()); } if (element == null) { throw new FileNotFoundException("Failed to locate " + path + " in " + includePaths); } loadedFiles.put(file.getAbsolutePath(), element); ImmutableList<IncludeElement> includes = element.includes(); if (includes.size() > 0) { includePaths.addFirst(dir); for (IncludeElement include : includes) { if (!include.isCpp()) { loadFileRecursively(include.path(), loadedFiles); } } includePaths.removeFirst(); } }
From source file:org.fcrepo.auth.xacml.XACMLWorkspaceInitializer.java
private void registerNodeTypes() { Session session = null;/*w w w .j av a 2 s . co m*/ try { session = getJcrSession(sessionFactory.getInternalSession()); final NodeTypeManager mgr = (NodeTypeManager) session.getWorkspace().getNodeTypeManager(); final URL cnd = XACMLWorkspaceInitializer.class.getResource("/cnd/xacml-policy.cnd"); final NodeTypeIterator nti = mgr.registerNodeTypes(cnd, true); while (nti.hasNext()) { final NodeType nt = nti.nextNodeType(); LOGGER.debug("registered node type: {}", nt.getName()); } // Add "authz:xacmlAssignable" mixin to "fedora:Resource" type final NodeType nodeType = mgr.getNodeType("fedora:Resource"); final NodeTypeTemplate nodeTypeTemplate = mgr.createNodeTypeTemplate(nodeType); final String[] superTypes = nodeType.getDeclaredSupertypeNames(); final ImmutableList.Builder<String> listBuilder = ImmutableList.builder(); listBuilder.add(superTypes); listBuilder.add("authz:xacmlAssignable"); final ImmutableList<String> newSuperTypes = listBuilder.build(); nodeTypeTemplate.setDeclaredSuperTypeNames(newSuperTypes.toArray(new String[newSuperTypes.size()])); mgr.registerNodeType(nodeTypeTemplate, true); session.save(); LOGGER.debug("Registered XACML policy node types"); } catch (final RepositoryException | IOException e) { throw new Error("Cannot register XACML policy node types", e); } finally { if (session != null) { session.logout(); } } }
From source file:com.freiheit.fuava.simplebatch.processor.RetryingProcessor.java
private Iterable<Result<OriginalItem, Output>> doPersist(final Iterable<Result<OriginalItem, Input>> iterable) { final ImmutableList<Result<OriginalItem, Input>> successes = FluentIterable.from(iterable) .filter(Result::isSuccess).toList(); final ImmutableList<Result<OriginalItem, Input>> fails = FluentIterable.from(iterable) .filter(Result::isFailed).toList(); final ImmutableList<Input> outputs = getSuccessOutputs(successes); final List<Output> persistenceResults = outputs.isEmpty() ? ImmutableList.of() : apply(outputs); if (persistenceResults.size() != outputs.size() || persistenceResults.size() != successes.size()) { throw new IllegalStateException("persistence results of unexpected size produced by " + this); }/* w ww.jav a 2 s. co m*/ final ImmutableList.Builder<Result<OriginalItem, Output>> b = ImmutableList.builder(); for (int i = 0; i < outputs.size(); i++) { final Result<OriginalItem, Input> processingResult = successes.get(i); final Output persistenceResult = persistenceResults.get(i); b.add(Result.<OriginalItem, Output>builder(processingResult).withOutput(persistenceResult).success()); } for (final Result<OriginalItem, Input> failed : fails) { b.add(Result.<OriginalItem, Output>builder(failed).failed()); } return b.build(); }