List of usage examples for com.google.common.base Predicates in
public static <T> Predicate<T> in(Collection<? extends T> target)
From source file:org.apache.aurora.common.args.ArgScanner.java
/** * Applies argument values to fields based on their annotations. * * @param parserOracle ParserOracle available to parse raw args with. * @param verifiers Verifiers available to verify argument constraints with. * @param argsInfo Fields to apply argument values to. * @param args Unparsed argument values. * @param positionalArgs The unparsed positional arguments. * @return {@code true} if the given {@code args} were successfully applied to their * corresponding {@link Arg} fields. *//* www .j av a 2s. c om*/ private boolean process(final ParserOracle parserOracle, Verifiers verifiers, ArgsInfo argsInfo, Map<String, String> args, List<String> positionalArgs) { if (!Sets.intersection(args.keySet(), ArgumentInfo.HELP_ARGS).isEmpty()) { printHelp(verifiers, argsInfo); return false; } Iterable<? extends OptionInfo<?>> optionInfos = argsInfo.getOptionInfos(); final Set<String> argsFailedToParse = Sets.newHashSet(); final Set<String> argsConstraintsFailed = Sets.newHashSet(); Set<String> argAllShortNamesNoCollisions = getNoCollisions(optionInfos); final Map<String, OptionInfo<?>> argsByName = ImmutableMap.<String, OptionInfo<?>>builder() // Map by short arg name -> arg def. .putAll(Maps.uniqueIndex(Iterables.filter(optionInfos, Predicates.compose(Predicates.in(argAllShortNamesNoCollisions), GET_OPTION_INFO_NAME)), GET_OPTION_INFO_NAME)) // Map by negated short arg name (for booleans) .putAll(Maps .uniqueIndex( Iterables.filter(Iterables.filter(optionInfos, IS_BOOLEAN), Predicates.compose(Predicates.in(argAllShortNamesNoCollisions), GET_OPTION_INFO_NEGATED_NAME)), GET_OPTION_INFO_NEGATED_NAME)) .build(); // TODO(William Farner): Make sure to disallow duplicate arg specification by short and // canonical names. // TODO(William Farner): Support non-atomic argument constraints. @OnlyIfSet, @OnlyIfNotSet, // @ExclusiveOf to define inter-argument constraints. Set<String> recognizedArgs = Sets.intersection(argsByName.keySet(), args.keySet()); for (String argName : recognizedArgs) { String argValue = args.get(argName); OptionInfo<?> optionInfo = argsByName.get(argName); try { optionInfo.load(parserOracle, argName, argValue); } catch (IllegalArgumentException e) { argsFailedToParse.add(argName + " - " + e.getMessage()); } } Set<String> commandLineArgumentInfos = Sets.newTreeSet(); Iterable<? extends ArgumentInfo<?>> allArguments = argsInfo.getOptionInfos(); for (ArgumentInfo<?> anArgumentInfo : allArguments) { Arg<?> arg = anArgumentInfo.getArg(); commandLineArgumentInfos.add(String.format("%s: %s", anArgumentInfo.getName(), arg.uncheckedGet())); try { anArgumentInfo.verify(verifiers); } catch (IllegalArgumentException e) { argsConstraintsFailed.add(anArgumentInfo.getName() + " - " + e.getMessage()); } } ImmutableMultimap<String, String> warningMessages = ImmutableMultimap.<String, String>builder() .putAll("Unrecognized arguments", Sets.difference(args.keySet(), argsByName.keySet())) .putAll("Failed to parse", argsFailedToParse) .putAll("Value did not meet constraints", argsConstraintsFailed).build(); if (!warningMessages.isEmpty()) { printHelp(verifiers, argsInfo); StringBuilder sb = new StringBuilder(); for (Map.Entry<String, Collection<String>> warnings : warningMessages.asMap().entrySet()) { sb.append(warnings.getKey()).append(":\n\t").append(Joiner.on("\n\t").join(warnings.getValue())) .append("\n"); } throw new IllegalArgumentException(sb.toString()); } LOG.info("-------------------------------------------------------------------------"); LOG.info("Command line argument values"); for (String commandLineArgumentInfo : commandLineArgumentInfos) { LOG.info(commandLineArgumentInfo); } LOG.info("-------------------------------------------------------------------------"); return true; }
From source file:com.palantir.ptoss.cinch.core.BindingContext.java
private List<Field> getBindableModelFields() { List<Field> allModelFields = Reflections.getFieldsOfTypeForClassHierarchy(object.getClass(), BindableModel.class); List<Field> notBindableFields = Reflections.getAnnotatedFieldsForClassHierarchy(object.getClass(), NotBindable.class); allModelFields = ImmutableList//from w ww . ja v a 2 s. c om .copyOf(Iterables.filter(allModelFields, Predicates.not(Predicates.in(notBindableFields)))); List<Field> nonFinalModelFields = ImmutableList .copyOf(Iterables.filter(allModelFields, Predicates.not(Reflections.IS_FIELD_FINAL))); if (!nonFinalModelFields.isEmpty()) { throw new BindingException("All BindableModels have to be final or marked with @NotBindable, but " + Iterables.transform(nonFinalModelFields, Reflections.FIELD_TO_NAME) + " are not."); } return allModelFields; }
From source file:com.google.devtools.build.lib.runtime.BlazeOptionHandler.java
/** * Parses the options from .rc files for a command invocation. It works in one of two modes; * either it loads the non-config options, or the config options that are specified in the {@code * configs} parameter./*from www.j a v a2 s.com*/ * * <p>This method adds every option pertaining to the specified command to the options parser. To * do that, it needs the command -> option mapping that is generated from the .rc files. * * <p>It is not as trivial as simply taking the list of options for the specified command because * commands can inherit arguments from each other, and we have to respect that (e.g. if an option * is specified for 'build', it needs to take effect for the 'test' command, too). * * <p>Note that the order in which the options are parsed is well-defined: all options from the * same rc file are parsed at the same time, and the rc files are handled in the order in which * they were passed in from the client. * * @param rcfileNotes note message that would be printed during parsing * @param commandAnnotation the command for which options should be parsed. * @param optionsParser parser to receive parsed options. * @param optionsMap .rc files in structured format: a list of pairs, where the first part is the * name of the rc file, and the second part is a multimap of command name (plus config, if * present) to the list of options for that command * @param configs the configs for which to parse options; if {@code null}, non-config options are * parsed * @param unknownConfigs optional; a collection that the method will populate with the config * values in {@code configs} that none of the .rc files had entries for * @throws OptionsParsingException */ protected static void parseOptionsForCommand(List<String> rcfileNotes, Command commandAnnotation, OptionsParser optionsParser, List<Pair<String, ListMultimap<String, String>>> optionsMap, @Nullable Collection<String> configs, @Nullable Collection<String> unknownConfigs) throws OptionsParsingException { Set<String> knownConfigs = new HashSet<>(); for (String commandToParse : getCommandNamesToParse(commandAnnotation)) { for (Pair<String, ListMultimap<String, String>> entry : optionsMap) { String rcFile = entry.first; List<String> allOptions = new ArrayList<>(); if (configs == null) { Collection<String> values = entry.second.get(commandToParse); if (!values.isEmpty()) { allOptions.addAll(entry.second.get(commandToParse)); String inherited = commandToParse.equals(commandAnnotation.name()) ? "" : "Inherited "; String source = rcFile.equals("client") ? "Options provided by the client" : String.format("Reading rc options for '%s' from %s", commandAnnotation.name(), rcFile); rcfileNotes.add(String.format("%s:\n %s'%s' options: %s", source, inherited, commandToParse, Joiner.on(' ').join(values))); } } else { for (String config : configs) { String configDef = commandToParse + ":" + config; Collection<String> values = entry.second.get(configDef); if (!values.isEmpty()) { allOptions.addAll(values); knownConfigs.add(config); rcfileNotes.add(String.format("Found applicable config definition %s in file %s: %s", configDef, rcFile, String.join(" ", values))); } } } processOptionList(optionsParser, rcFile, allOptions); } } if (unknownConfigs != null && configs != null && configs.size() > knownConfigs.size()) { configs.stream().filter(Predicates.not(Predicates.in(knownConfigs))) .forEachOrdered(unknownConfigs::add); } }
From source file:org.eclipse.sirius.diagram.sequence.business.internal.operation.SynchronizeISequenceEventsSemanticOrderingOperation.java
private EventEnd findEndPredecessor(EObject semanticElement, boolean startingEnd, List<EventEnd> eventEnds, Set<EventEnd> toIgnore) { EventEnd result = null;// w w w. j av a 2 s. c o m for (EventEnd end : Iterables.filter(eventEnds, Predicates.not(Predicates.in(toIgnore)))) { if (isLookedEnd(semanticElement, startingEnd, end)) { break; } else { result = end; } } return result; }
From source file:com.google.errorprone.fixes.SuggestedFixes.java
/** Deletes the given exceptions from a method's throws clause. */ public static Fix deleteExceptions(MethodTree tree, final VisitorState state, List<ExpressionTree> toDelete) { List<? extends ExpressionTree> trees = tree.getThrows(); if (toDelete.size() == trees.size()) { return SuggestedFix.replace(getThrowsPosition(tree, state), state.getEndPosition(getLast(trees)), ""); }//from w w w. j ava 2s. com String replacement = FluentIterable.from(tree.getThrows()).filter(Predicates.not(Predicates.in(toDelete))) .transform(new Function<ExpressionTree, String>() { @Override @Nullable public String apply(ExpressionTree input) { return state.getSourceForNode(input); } }).join(Joiner.on(", ")); return SuggestedFix.replace(((JCTree) tree.getThrows().get(0)).getStartPosition(), state.getEndPosition(getLast(tree.getThrows())), replacement); }
From source file:edu.udo.scaffoldhunter.model.dataimport.Importer.java
private List<PropertyDefinition> saveDatasetAndPropertyDefinitions(Dataset dataset, Map<String, PropertyDefinition> propDefs) { for (ImportJob j : importProcess.getJobs()) { for (Map.Entry<String, SourcePropertyMapping> e : j.getPropertyMappings().entrySet()) { if (e.getValue().getPropertyDefiniton() != null) { PropertyDefinition propDef = e.getValue().getPropertyDefiniton(); // find unique key if key is not set if (propDef.getKey() == null || propDef.getKey().isEmpty()) { String newKey; for (int i = 0;; i++) { newKey = propDef.getTitle().toUpperCase() + i; /* * prevents key collision with scaffold properties * and calculated properties *///from w w w. j ava 2s . co m if (newKey.startsWith(ScaffoldTreeGenerator.SCAFFOLD_PROPERTY_KEY_PREFIX) || newKey.startsWith(Calculator.CALC_PLUGINS_PROPERTY_KEY_PREFIX)) { newKey = "_" + newKey; } if (!propDefs.containsKey(newKey)) break; } propDef.setKey(newKey); } else { /* * prevents key collision with scaffold properties and * calculated properties */ if (propDef.getKey().startsWith(ScaffoldTreeGenerator.SCAFFOLD_PROPERTY_KEY_PREFIX) || propDef.getKey().startsWith(Calculator.CALC_PLUGINS_PROPERTY_KEY_PREFIX)) { propDef.setKey("_" + propDef.getKey()); } } propDef.setDataset(dataset); propDefs.put(propDef.getKey(), propDef); } } } if (importProcess.getDataset() != null) { Predicate<PropertyDefinition> notOld = Predicates.and(Predicates.notNull(), Predicates.not(Predicates.in(importProcess.getDataset().getPropertyDefinitions().values()))); newPropDefs = Lists.newArrayList(Iterables.filter(propDefs.values(), notOld)); } else { newPropDefs = Lists.newArrayList(propDefs.values()); } dataset.setPropertyDefinitions(propDefs); DBExceptionHandler.callDBManager(db, new VoidUnaryDBFunction<Dataset>(dataset) { @Override public void call(Dataset arg) throws DatabaseException { db.saveOrUpdate(newDataset); db.saveAllAsNew(newPropDefs); } }); return newPropDefs; }
From source file:com.palantir.atlasdb.transaction.impl.SerializableTransaction.java
private void verifyRows(Transaction ro) { for (String table : rowsRead.keySet()) { final ConcurrentNavigableMap<Cell, byte[]> readsForTable = getReadsForTable(table); Multimap<ColumnSelection, byte[]> map = Multimaps.newSortedSetMultimap( Maps.<ColumnSelection, Collection<byte[]>>newHashMap(), new Supplier<SortedSet<byte[]>>() { @Override// w w w . j av a 2s . co m public TreeSet<byte[]> get() { return Sets.newTreeSet(UnsignedBytes.lexicographicalComparator()); } }); for (RowRead r : rowsRead.get(table)) { map.putAll(r.cols, r.rows); } for (final ColumnSelection cols : map.keySet()) { for (List<byte[]> batch : Iterables.partition(map.get(cols), 1000)) { SortedMap<byte[], RowResult<byte[]>> currentRows = ro.getRows(table, batch, cols); for (byte[] row : batch) { RowResult<byte[]> currentRow = currentRows.get(row); Map<Cell, byte[]> orignalReads = readsForTable .tailMap(Cells.createSmallestCellForRow(row), true) .headMap(Cells.createLargestCellForRow(row), true); // We want to filter out all our reads to just the set that matches our column selection. orignalReads = Maps.filterKeys(orignalReads, new Predicate<Cell>() { @Override public boolean apply(Cell input) { return cols.contains(input.getColumnName()); } }); if (writesByTable.get(table) != null) { // We don't want to verify any reads that we wrote to cause we will just read our own values. // NB: We filter our write set out here because our normal SI checking handles this case to ensure the value hasn't changed. orignalReads = Maps.filterKeys(orignalReads, Predicates.not(Predicates.in(writesByTable.get(table).keySet()))); } if (currentRow == null && orignalReads.isEmpty()) { continue; } if (currentRow == null) { throw TransactionSerializableConflictException.create(table, getTimestamp(), System.currentTimeMillis() - timeCreated); } Map<Cell, byte[]> currentCells = Maps2.fromEntries(currentRow.getCells()); if (writesByTable.get(table) != null) { // We don't want to verify any reads that we wrote to cause we will just read our own values. // NB: We filter our write set out here because our normal SI checking handles this case to ensure the value hasn't changed. currentCells = Maps.filterKeys(currentCells, Predicates.not(Predicates.in(writesByTable.get(table).keySet()))); } if (!areMapsEqual(orignalReads, currentCells)) { throw TransactionSerializableConflictException.create(table, getTimestamp(), System.currentTimeMillis() - timeCreated); } } } } } }
From source file:co.cask.cdap.internal.app.runtime.distributed.AbstractProgramTwillRunnable.java
/** * Creates program options. It contains program and user arguments as passed form the distributed program runner. * Extra program arguments are inserted based on the environment information (e.g. host, instance id). Also all * configs available through the TwillRunnable configs are also available through program arguments. *///from w ww . j a v a2 s . c o m private ProgramOptions createProgramOptions(CommandLine cmdLine, TwillContext context, Map<String, String> configs) { ProgramOptions original = GSON.fromJson(cmdLine.getOptionValue(RunnableOptions.PROGRAM_OPTIONS), ProgramOptions.class); // Overwrite them with environmental information Map<String, String> arguments = Maps.newHashMap(original.getArguments().asMap()); arguments.put(ProgramOptionConstants.INSTANCE_ID, Integer.toString(context.getInstanceId())); arguments.put(ProgramOptionConstants.INSTANCES, Integer.toString(context.getInstanceCount())); arguments.put(ProgramOptionConstants.RUN_ID, original.getArguments().getOption(ProgramOptionConstants.RUN_ID)); arguments.put(ProgramOptionConstants.TWILL_RUN_ID, context.getApplicationRunId().getId()); arguments.put(ProgramOptionConstants.HOST, context.getHost().getCanonicalHostName()); arguments .putAll(Maps.filterKeys(configs, Predicates.not(Predicates.in(ImmutableSet.of("hConf", "cConf"))))); return new SimpleProgramOptions(context.getSpecification().getName(), new BasicArguments(arguments), original.getUserArguments(), original.isDebug()); }
From source file:edu.mit.streamjit.impl.compiler2.Compiler2BlobHost.java
/** * Extracts elements from storage and puts them in a DrainData for an * interpreter blob.//ww w . j a va 2 s .c o m * @param reads read instructions whose load() completed (thus requiring * unload()) * @param drains drain instructions, if we're in the steady-state, or an * empty list if we didn't complete init */ private void doDrain(List<ReadInstruction> reads, List<DrainInstruction> drains) { Stopwatch drainTime = null; if (collectTimings) drainTime = Stopwatch.createStarted(); List<Map<Token, Object[]>> data = new ArrayList<>(reads.size() + drains.size()); for (ReadInstruction i : reads) data.add(i.unload()); for (DrainInstruction i : drains) data.add(i.call()); ImmutableMap<Token, List<Object>> mergedData = CollectionUtils.union((key, value) -> { int size = 0; for (Object[] v : value) size += v.length; List<Object> data1 = new ArrayList<>(size); for (Object[] v : value) data1.addAll(Arrays.asList(v)); return data1; }, data); //Try once to write data on output edges, then let the interpreter handle it. Predicate<Token> isOutput = Predicates.in(getOutputs()); for (Map.Entry<Token, List<Object>> e : Maps.filterKeys(mergedData, isOutput).entrySet()) { final Buffer b = buffers.get(e.getKey()); final Object[] d = e.getValue().toArray(); int written = b.write(d, 0, d.length); //Remove the data we wrote. e.getValue().subList(0, written).clear(); } DrainData forInterp = new DrainData(mergedData, //We put state back in the workers via StateHolders, which are //DrainInstructions, so no state in the DrainData. (It will be //in the DrainData produced by the interpreter blob, so //distributed will still see it.) ImmutableTable.<Integer, String, Object>of()); Interpreter.InterpreterBlobFactory interpFactory = new Interpreter.InterpreterBlobFactory(); Blob interp = interpFactory.makeBlob(workers, interpFactory.getDefaultConfiguration(workers), 1, forInterp); interp.installBuffers(buffers); Runnable interpCode = interp.getCoreCode(0); final AtomicBoolean interpFinished = new AtomicBoolean(); interp.drain(() -> interpFinished.set(true)); while (!interpFinished.get()) interpCode.run(); this.drainData = interp.getDrainData(); SwitchPoint.invalidateAll(new SwitchPoint[] { sp1, sp2 }); drainCallback.run(); if (collectTimings) { drainTime.stop(); System.out.println("total adjust time: " + adjustTime + " over " + adjustCount + " adjusts"); System.out.println("drain time: " + drainTime); } }
From source file:org.opendaylight.netvirt.neutronvpn.ChangeUtils.java
public static <T extends DataObject> Map<InstanceIdentifier<T>, T> extractRemovedObjects( AsyncDataChangeEvent<InstanceIdentifier<?>, DataObject> changes, Class<T> klazz) { Set<InstanceIdentifier<T>> iids = extractRemoved(changes, klazz); return Maps.filterKeys(extractOriginal(changes, klazz), Predicates.in(iids)); }