List of usage examples for com.google.common.collect SetMultimap asMap
@Override Map<K, Collection<V>> asMap();
Note: The returned map's values are guaranteed to be of type Set .
From source file:com.google.protoeditor.validation.ProtoValidator.java
public void checkEnums(ProtoDefinitionBody protoMessageDefinition, AnnotationHolder annotationHolder) { SetMultimap<String, ProtoEnumDefinition> enumNames = HashMultimap.create(); for (ProtoEnumDefinition enm : protoMessageDefinition.getEnumerations()) { SetMultimap<String, ProtoEnumConstant> enumConstantNames = HashMultimap.create(); String enumName = enm.getName(); enumNames.put(enumName, enm);//from w ww. j a v a2 s . c o m final SortedMap<Long, Set<ProtoEnumConstant>> enumNumbers = getUsedEnumConstantValues(enm, enumConstantNames); for (Map.Entry<Long, Set<ProtoEnumConstant>> entry : enumNumbers.entrySet()) { Set<ProtoEnumConstant> consts = entry.getValue(); if (consts.size() > 1) { for (ProtoEnumConstant constant : consts) { ProtoEnumValue enumValue = constant.getEnumValue(); if (enumValue == null) { continue; } final ProtoAbstractIntegerLiteral valueLiteral = enumValue.getValueLiteral(); if (valueLiteral == null) { continue; } Annotation anno = annotationHolder.createWarningAnnotation(valueLiteral, "multiple constants in " + enm.getName() + " have value " + entry.getKey()); anno.registerFix(new IntentionAction() { @Override public String getText() { return "Reassign enum constant value"; } @Override public String getFamilyName() { return "ReassignEnumValue"; } @Override public boolean isAvailable(Project project, Editor editor, PsiFile file) { return true; } @Override public void invoke(Project project, Editor editor, PsiFile file) { valueLiteral.setIntValue(enumNumbers.lastKey() + 1); } @Override public boolean startInWriteAction() { return true; } }); } } } for (Map.Entry<String, Collection<ProtoEnumConstant>> entry : enumConstantNames.asMap().entrySet()) { Collection<ProtoEnumConstant> consts = entry.getValue(); if (consts.size() > 1) { for (ProtoEnumConstant constant : consts) { annotationHolder.createErrorAnnotation(constant.getNameElement(), "multiple constants in " + enm.getName() + " have name " + entry.getKey()); } } } } for (Map.Entry<String, Collection<ProtoEnumDefinition>> entry : enumNames.asMap().entrySet()) { Collection<ProtoEnumDefinition> enums = entry.getValue(); if (enums.size() > 1) { for (ProtoEnumDefinition enm : enums) { annotationHolder.createErrorAnnotation(enm.getNameElement(), "multiple definitions " + "of enum " + entry.getKey()); } } } }
From source file:org.apache.gobblin.data.management.copy.CopySource.java
/** * <ul>//from w w w . j a v a 2 s . c o m * Does the following: * <li>Instantiate a {@link DatasetsFinder}. * <li>Find all {@link Dataset} using {@link DatasetsFinder}. * <li>For each {@link CopyableDataset} get all {@link CopyEntity}s. * <li>Create a {@link WorkUnit} per {@link CopyEntity}. * </ul> * * <p> * In this implementation, one workunit is created for every {@link CopyEntity} found. But the extractor/converters * and writers are built to support multiple {@link CopyEntity}s per workunit * </p> * * @param state see {@link org.apache.gobblin.configuration.SourceState} * @return Work units for copying files. */ @Override public List<WorkUnit> getWorkunits(final SourceState state) { this.metricContext = Instrumented.getMetricContext(state, CopySource.class); this.lineageInfo = LineageInfo.getLineageInfo(state.getBroker()); try { DeprecationUtils.renameDeprecatedKeys(state, CopyConfiguration.MAX_COPY_PREFIX + "." + CopyResourcePool.ENTITIES_KEY, Lists.newArrayList(MAX_FILES_COPIED_KEY)); final FileSystem sourceFs = HadoopUtils.getSourceFileSystem(state); final FileSystem targetFs = HadoopUtils.getWriterFileSystem(state, 1, 0); state.setProp(SlaEventKeys.SOURCE_URI, sourceFs.getUri()); state.setProp(SlaEventKeys.DESTINATION_URI, targetFs.getUri()); log.info("Identified source file system at {} and target file system at {}.", sourceFs.getUri(), targetFs.getUri()); long maxSizePerBin = state.getPropAsLong(MAX_SIZE_MULTI_WORKUNITS, 0); long maxWorkUnitsPerMultiWorkUnit = state.getPropAsLong(MAX_WORK_UNITS_PER_BIN, 50); final long minWorkUnitWeight = Math.max(1, maxSizePerBin / maxWorkUnitsPerMultiWorkUnit); final Optional<CopyableFileWatermarkGenerator> watermarkGenerator = CopyableFileWatermarkHelper .getCopyableFileWatermarkGenerator(state); int maxThreads = state.getPropAsInt(MAX_CONCURRENT_LISTING_SERVICES, DEFAULT_MAX_CONCURRENT_LISTING_SERVICES); final CopyConfiguration copyConfiguration = CopyConfiguration.builder(targetFs, state.getProperties()) .build(); this.eventSubmitter = new EventSubmitter.Builder(this.metricContext, CopyConfiguration.COPY_PREFIX) .build(); DatasetsFinder<CopyableDatasetBase> datasetFinder = DatasetUtils.instantiateDatasetFinder( state.getProperties(), sourceFs, DEFAULT_DATASET_PROFILE_CLASS_KEY, this.eventSubmitter, state); IterableDatasetFinder<CopyableDatasetBase> iterableDatasetFinder = datasetFinder instanceof IterableDatasetFinder ? (IterableDatasetFinder<CopyableDatasetBase>) datasetFinder : new IterableDatasetFinderImpl<>(datasetFinder); Iterator<CopyableDatasetRequestor> requestorIteratorWithNulls = Iterators.transform( iterableDatasetFinder.getDatasetsIterator(), new CopyableDatasetRequestor.Factory(targetFs, copyConfiguration, log)); Iterator<CopyableDatasetRequestor> requestorIterator = Iterators.filter(requestorIteratorWithNulls, Predicates.<CopyableDatasetRequestor>notNull()); final SetMultimap<FileSet<CopyEntity>, WorkUnit> workUnitsMap = Multimaps .<FileSet<CopyEntity>, WorkUnit>synchronizedSetMultimap( HashMultimap.<FileSet<CopyEntity>, WorkUnit>create()); RequestAllocator<FileSet<CopyEntity>> allocator = createRequestAllocator(copyConfiguration, maxThreads); Iterator<FileSet<CopyEntity>> prioritizedFileSets = allocator.allocateRequests(requestorIterator, copyConfiguration.getMaxToCopy()); //Submit alertable events for unfulfilled requests submitUnfulfilledRequestEvents(allocator); String filesetWuGeneratorAlias = state.getProp(ConfigurationKeys.COPY_SOURCE_FILESET_WU_GENERATOR_CLASS, FileSetWorkUnitGenerator.class.getName()); Iterator<Callable<Void>> callableIterator = Iterators.transform(prioritizedFileSets, new Function<FileSet<CopyEntity>, Callable<Void>>() { @Nullable @Override public Callable<Void> apply(FileSet<CopyEntity> input) { try { return GobblinConstructorUtils.<FileSetWorkUnitGenerator>invokeLongestConstructor( new ClassAliasResolver(FileSetWorkUnitGenerator.class) .resolveClass(filesetWuGeneratorAlias), input.getDataset(), input, state, workUnitsMap, watermarkGenerator, minWorkUnitWeight, lineageInfo); } catch (Exception e) { throw new RuntimeException("Cannot create workunits generator", e); } } }); try { List<Future<Void>> futures = new IteratorExecutor<>(callableIterator, maxThreads, ExecutorsUtils .newDaemonThreadFactory(Optional.of(log), Optional.of("Copy-file-listing-pool-%d"))) .execute(); for (Future<Void> future : futures) { try { future.get(); } catch (ExecutionException exc) { log.error("Failed to get work units for dataset.", exc.getCause()); } } } catch (InterruptedException ie) { log.error("Retrieval of work units was interrupted. Aborting."); return Lists.newArrayList(); } log.info(String.format("Created %s workunits ", workUnitsMap.size())); copyConfiguration.getCopyContext().logCacheStatistics(); if (state.contains(SIMULATE) && state.getPropAsBoolean(SIMULATE)) { log.info("Simulate mode enabled. Will not execute the copy."); for (Map.Entry<FileSet<CopyEntity>, Collection<WorkUnit>> entry : workUnitsMap.asMap().entrySet()) { log.info(String.format("Actions for dataset %s file set %s.", entry.getKey().getDataset().datasetURN(), entry.getKey().getName())); for (WorkUnit workUnit : entry.getValue()) { try { CopyEntity copyEntity = deserializeCopyEntity(workUnit); log.info(copyEntity.explain()); } catch (Exception e) { log.info("Cannot deserialize CopyEntity from wu : {}", workUnit.toString()); } } } return Lists.newArrayList(); } List<? extends WorkUnit> workUnits = new WorstFitDecreasingBinPacking(maxSizePerBin) .pack(Lists.newArrayList(workUnitsMap.values()), this.weighter); log.info(String.format( "Bin packed work units. Initial work units: %d, packed work units: %d, max weight per bin: %d, " + "max work units per bin: %d.", workUnitsMap.size(), workUnits.size(), maxSizePerBin, maxWorkUnitsPerMultiWorkUnit)); return ImmutableList.copyOf(workUnits); } catch (IOException e) { throw new RuntimeException(e); } }
From source file:dagger.internal.codegen.ComponentValidator.java
/** * Validates the given component subject. Also validates any referenced subcomponents that aren't * already included in the {@code validatedSubcomponents} set. *///from w w w . j a v a 2 s .c o m public ComponentValidationReport validate(final TypeElement subject, Set<? extends Element> validatedSubcomponents, Set<? extends Element> validatedSubcomponentBuilders) { ValidationReport.Builder<TypeElement> builder = ValidationReport.about(subject); ComponentDescriptor.Kind componentKind = ComponentDescriptor.Kind.forAnnotatedElement(subject).get(); if (!subject.getKind().equals(INTERFACE) && !(subject.getKind().equals(CLASS) && subject.getModifiers().contains(ABSTRACT))) { builder.addError(String.format("@%s may only be applied to an interface or abstract class", componentKind.annotationType().getSimpleName()), subject); } ImmutableList<DeclaredType> builders = enclosedBuilders(subject, componentKind.builderAnnotationType()); if (builders.isEmpty()) { final String subjectName = subject.getQualifiedName().toString(); builder.addError( String.format(ErrorMessages.builderMsgsFor(componentKind).noBuilderPresent(), subjectName)); } if (builders.size() > 1) { builder.addError(String.format(ErrorMessages.builderMsgsFor(componentKind).moreThanOne(), builders), subject); } Optional<AnnotationMirror> reusableAnnotation = getAnnotationMirror(subject, Reusable.class); if (reusableAnnotation.isPresent()) { builder.addError(COMPONENT_ANNOTATED_REUSABLE, subject, reusableAnnotation.get()); } DeclaredType subjectType = MoreTypes.asDeclared(subject.asType()); SetMultimap<Element, ExecutableElement> referencedSubcomponents = LinkedHashMultimap.create(); getLocalAndInheritedMethods(subject, types, elements).stream() .filter(method -> method.getModifiers().contains(ABSTRACT)).forEachOrdered(method -> { ExecutableType resolvedMethod = asExecutable(types.asMemberOf(subjectType, method)); List<? extends TypeMirror> parameterTypes = resolvedMethod.getParameterTypes(); List<? extends VariableElement> parameters = method.getParameters(); TypeMirror returnType = resolvedMethod.getReturnType(); // abstract methods are ones we have to implement, so they each need to be validated // first, check the return type. if it's a subcomponent, validate that method as such. Optional<AnnotationMirror> subcomponentAnnotation = checkForAnnotations(returnType, FluentIterable.from(componentKind.subcomponentKinds()).transform(Kind::annotationType) .toSet()); Optional<AnnotationMirror> subcomponentBuilderAnnotation = checkForAnnotations(returnType, FluentIterable.from(componentKind.subcomponentKinds()) .transform(Kind::builderAnnotationType).toSet()); if (subcomponentAnnotation.isPresent()) { referencedSubcomponents.put(MoreTypes.asElement(returnType), method); validateSubcomponentMethod(builder, ComponentDescriptor.Kind.forAnnotatedElement(MoreTypes.asTypeElement(returnType)) .get(), method, parameters, parameterTypes, returnType, subcomponentAnnotation); } else if (subcomponentBuilderAnnotation.isPresent()) { referencedSubcomponents.put(MoreTypes.asElement(returnType).getEnclosingElement(), method); validateSubcomponentBuilderMethod(builder, method, parameters, returnType, validatedSubcomponentBuilders); } else { // if it's not a subcomponent... switch (parameters.size()) { case 0: // no parameters means that it is a provision method // basically, there are no restrictions here. \o/ break; case 1: // one parameter means that it's a members injection method TypeMirror onlyParameter = Iterables.getOnlyElement(parameterTypes); if (!(returnType.getKind().equals(VOID) || types.isSameType(returnType, onlyParameter))) { builder.addError( "Members injection methods may only return the injected type or void.", method); } break; default: // this isn't any method that we know how to implement... builder.addError( "This method isn't a valid provision method, members injection method or " + "subcomponent factory method. Dagger cannot implement this method", method); break; } } }); Maps.filterValues(referencedSubcomponents.asMap(), methods -> methods.size() > 1) .forEach((subcomponent, methods) -> builder.addError(String.format( ErrorMessages.SubcomponentBuilderMessages.INSTANCE.moreThanOneRefToSubcomponent(), subcomponent, methods), subject)); AnnotationMirror componentMirror = getAnnotationMirror(subject, componentKind.annotationType()).get(); if (componentKind.isTopLevel()) { validateComponentDependencies(builder, getComponentDependencies(componentMirror)); } builder.addSubreport( moduleValidator.validateReferencedModules(subject, componentMirror, componentKind.moduleKinds())); // Make sure we validate any subcomponents we're referencing, unless we know we validated // them already in this pass. // TODO(sameb): If subcomponents refer to each other and both aren't in // 'validatedSubcomponents' (e.g, both aren't compiled in this pass), // then this can loop forever. ImmutableSet.Builder<Element> allSubcomponents = ImmutableSet.<Element>builder() .addAll(referencedSubcomponents.keySet()); for (Element subcomponent : Sets.difference(referencedSubcomponents.keySet(), validatedSubcomponents)) { ComponentValidationReport subreport = subcomponentValidator.validate(MoreElements.asType(subcomponent), validatedSubcomponents, validatedSubcomponentBuilders); builder.addItems(subreport.report().items()); allSubcomponents.addAll(subreport.referencedSubcomponents()); } return new AutoValue_ComponentValidator_ComponentValidationReport(allSubcomponents.build(), builder.build()); }
From source file:ome.services.graphs.GraphTraversal.java
/** * Prepare to remove links between the targeted model objects and the remainder of the model object graph. * @param isUnlinkIncludeFromExclude if {@link Action#EXCLUDE} objects must be unlinked from {@link Action#INCLUDE} objects * and vice versa//ww w.j av a 2 s . co m * @return the actual unlinker for the targeted model objects, to be used by the caller * @throws GraphException if the user does not have permission to unlink the targets */ public PlanExecutor unlinkTargets(boolean isUnlinkIncludeFromExclude) throws GraphException { if (!progress.contains(Milestone.PLANNED)) { throw new IllegalStateException("operation not yet planned"); } /* accumulate plan for unlinking included/deleted from others */ final SetMultimap<CP, Long> toNullByCP = HashMultimap.create(); final Map<CP, SetMultimap<Long, Entry<String, Long>>> linkerToIdToLinked = new HashMap<CP, SetMultimap<Long, Entry<String, Long>>>(); for (final CI object : planning.included) { for (final String superclassName : model.getSuperclassesOfReflexive(object.className)) { for (final Entry<String, String> forwardLink : model.getLinkedTo(superclassName)) { final CP linkProperty = new CP(superclassName, forwardLink.getValue()); final boolean isCollection = model.getPropertyKind(linkProperty.className, linkProperty.propertyName) == PropertyKind.COLLECTION; final CPI linkSource = linkProperty.toCPI(object.id); for (final CI linked : planning.forwardLinksCached.get(linkSource)) { final Action linkedAction = getAction(linked); if (linkedAction == Action.DELETE || isUnlinkIncludeFromExclude && linkedAction == Action.EXCLUDE) { /* INCLUDE is linked to EXCLUDE or DELETE, so unlink */ if (isCollection) { addRemoval(linkerToIdToLinked, linkProperty.toCPI(object.id), linked); } else { toNullByCP.put(linkProperty, object.id); } } } } if (isUnlinkIncludeFromExclude) { for (final Entry<String, String> backwardLink : model.getLinkedBy(superclassName)) { final CP linkProperty = new CP(backwardLink.getKey(), backwardLink.getValue()); final boolean isCollection = model.getPropertyKind(linkProperty.className, linkProperty.propertyName) == PropertyKind.COLLECTION; final CPI linkTarget = linkProperty.toCPI(object.id); for (final CI linker : planning.backwardLinksCached.get(linkTarget)) { final Action linkerAction = getAction(linker); if (linkerAction == Action.EXCLUDE) { /* EXCLUDE is linked to INCLUDE, so unlink */ if (isCollection) { addRemoval(linkerToIdToLinked, linkProperty.toCPI(linker.id), object); } else { toNullByCP.put(linkProperty, linker.id); } } } } } } } for (final CI object : planning.deleted) { for (final String superclassName : model.getSuperclassesOfReflexive(object.className)) { for (final Entry<String, String> backwardLink : model.getLinkedBy(superclassName)) { final CP linkProperty = new CP(backwardLink.getKey(), backwardLink.getValue()); final boolean isCollection = model.getPropertyKind(linkProperty.className, linkProperty.propertyName) == PropertyKind.COLLECTION; final CPI linkTarget = linkProperty.toCPI(object.id); for (final CI linker : planning.backwardLinksCached.get(linkTarget)) { final Action linkerAction = getAction(linker); if (linkerAction != Action.DELETE) { /* EXCLUDE, INCLUDE or OUTSIDE is linked to DELETE, so unlink */ if (isCollection) { addRemoval(linkerToIdToLinked, linkProperty.toCPI(linker.id), object); } else { toNullByCP.put(linkProperty, linker.id); } } } } } } /* note unlink included/deleted by nulling properties */ final Map<CP, Collection<Long>> eachToNullByCP = toNullByCP.asMap(); for (final Entry<CP, Collection<Long>> nullCurr : eachToNullByCP.entrySet()) { final CP linker = nullCurr.getKey(); if (unnullable.get(linker.className).contains(linker.propertyName) || model.getPropertyKind(linker.className, linker.propertyName) == PropertyKind.REQUIRED) { throw new GraphException("cannot null " + linker); } final Collection<Long> allIds = nullCurr.getValue(); assertMayBeUpdated(linker.className, allIds); } /* note unlink included/deleted by removing from collections */ for (final Entry<CP, SetMultimap<Long, Entry<String, Long>>> removeCurr : linkerToIdToLinked.entrySet()) { final CP linker = removeCurr.getKey(); final Collection<Long> allIds = removeCurr.getValue().keySet(); assertMayBeUpdated(linker.className, allIds); throw new GraphException("cannot remove elements from collection " + linker); } return new PlanExecutor() { @Override public void execute() throws GraphException { if (progress.contains(Milestone.UNLINKED)) { throw new IllegalStateException("model objects already unlinked"); } /* actually do the noted unlinking */ for (final Entry<CP, Collection<Long>> nullCurr : eachToNullByCP.entrySet()) { final CP linker = nullCurr.getKey(); final Collection<Long> allIds = nullCurr.getValue(); for (final List<Long> ids : Iterables.partition(allIds, BATCH_SIZE)) { processor.nullProperties(linker.className, linker.propertyName, ids); } } progress.add(Milestone.UNLINKED); } }; }