List of usage examples for com.google.common.collect SetMultimap put
boolean put(@Nullable K key, @Nullable V value);
From source file:brooklyn.event.feed.http.HttpFeed.java
protected HttpFeed(Builder builder) { setConfig(ONLY_IF_SERVICE_UP, builder.onlyIfServiceUp); Map<String, String> baseHeaders = ImmutableMap.copyOf(checkNotNull(builder.headers, "headers")); SetMultimap<HttpPollIdentifier, HttpPollConfig<?>> polls = HashMultimap .<HttpPollIdentifier, HttpPollConfig<?>>create(); for (HttpPollConfig<?> config : builder.polls) { @SuppressWarnings({ "unchecked", "rawtypes" }) HttpPollConfig<?> configCopy = new HttpPollConfig(config); if (configCopy.getPeriod() < 0) configCopy.period(builder.period); String method = config.getMethod(); Map<String, String> headers = config.buildHeaders(baseHeaders); byte[] body = config.getBody(); Duration connectionTimeout = config.getConnectionTimeout(); Duration socketTimeout = config.getSocketTimeout(); Optional<Credentials> credentials = Optional.fromNullable(builder.credentials); Supplier<URI> baseUriProvider = builder.baseUriProvider; if (builder.baseUri != null) { if (baseUriProvider != null) throw new IllegalStateException("Not permitted to supply baseUri and baseUriProvider"); Map<String, String> baseUriVars = ImmutableMap .copyOf(checkNotNull(builder.baseUriVars, "baseUriVars")); URI uri = config.buildUri(builder.baseUri, baseUriVars); baseUriProvider = Suppliers.ofInstance(uri); } else if (!builder.baseUriVars.isEmpty()) { throw new IllegalStateException( "Not permitted to supply URI vars when using a URI provider; pass the vars to the provider instead"); }/*from w ww. ja v a 2s .co m*/ checkNotNull(baseUriProvider); polls.put(new HttpPollIdentifier(method, baseUriProvider, headers, body, credentials, connectionTimeout, socketTimeout), configCopy); } setConfig(POLLS, polls); initUniqueTag(builder.uniqueTag, polls.values()); }
From source file:org.apache.brooklyn.feed.http.HttpFeed.java
protected HttpFeed(Builder builder) { setConfig(ONLY_IF_SERVICE_UP, builder.onlyIfServiceUp); Map<String, String> baseHeaders = ImmutableMap.copyOf(checkNotNull(builder.headers, "headers")); SetMultimap<HttpPollIdentifier, HttpPollConfig<?>> polls = HashMultimap .<HttpPollIdentifier, HttpPollConfig<?>>create(); for (HttpPollConfig<?> config : builder.polls) { if (!config.isEnabled()) continue; @SuppressWarnings({ "unchecked", "rawtypes" }) HttpPollConfig<?> configCopy = new HttpPollConfig(config); if (configCopy.getPeriod() < 0) configCopy.period(builder.period); String method = config.getMethod(); Map<String, String> headers = config.buildHeaders(baseHeaders); byte[] body = config.getBody(); Duration connectionTimeout = config.getConnectionTimeout(); Duration socketTimeout = config.getSocketTimeout(); Optional<Credentials> credentials = Optional.fromNullable(builder.credentials); Supplier<URI> baseUriProvider = builder.baseUriProvider; if (builder.baseUri != null) { if (baseUriProvider != null) throw new IllegalStateException("Not permitted to supply baseUri and baseUriProvider"); Map<String, String> baseUriVars = ImmutableMap .copyOf(checkNotNull(builder.baseUriVars, "baseUriVars")); URI uri = config.buildUri(builder.baseUri, baseUriVars); baseUriProvider = Suppliers.ofInstance(uri); } else if (!builder.baseUriVars.isEmpty()) { throw new IllegalStateException( "Not permitted to supply URI vars when using a URI provider; pass the vars to the provider instead"); }/* w w w. ja v a2 s.com*/ checkNotNull(baseUriProvider); polls.put(new HttpPollIdentifier(method, baseUriProvider, headers, body, credentials, connectionTimeout, socketTimeout), configCopy); } setConfig(POLLS, polls); initUniqueTag(builder.uniqueTag, polls.values()); }
From source file:ome.services.graphs.GraphTraversal.java
/** * Load object instances and their links into the various cache fields of {@link Planning}. * @param session a Hibernate session/*from w ww. ja v a 2 s . c om*/ * @param toCache the objects to cache * @throws GraphException if the objects could not be converted to unloaded instances */ private void cache(Session session, Collection<CI> toCache) throws GraphException { /* note which links to query, organized for batch querying */ final SetMultimap<CP, Long> forwardLinksWanted = HashMultimap.create(); final SetMultimap<CP, Long> backwardLinksWanted = HashMultimap.create(); for (final CI inclusionCandidate : toCache) { for (final String inclusionCandidateSuperclassName : model .getSuperclassesOfReflexive(inclusionCandidate.className)) { for (final Entry<String, String> forwardLink : model .getLinkedTo(inclusionCandidateSuperclassName)) { final CP linkProperty = new CP(inclusionCandidateSuperclassName, forwardLink.getValue()); forwardLinksWanted.put(linkProperty, inclusionCandidate.id); } for (final Entry<String, String> backwardLink : model .getLinkedBy(inclusionCandidateSuperclassName)) { final CP linkProperty = new CP(backwardLink.getKey(), backwardLink.getValue()); backwardLinksWanted.put(linkProperty, inclusionCandidate.id); } } } /* query and cache forward links */ for (final Entry<CP, Collection<Long>> forwardLink : forwardLinksWanted.asMap().entrySet()) { final CP linkProperty = forwardLink.getKey(); final String query = "SELECT linker.id, linked.id FROM " + linkProperty.className + " AS linker " + "JOIN linker." + linkProperty.propertyName + " AS linked WHERE linker.id IN (:ids)"; for (final Entry<CI, CI> linkerLinked : getLinksToCache(linkProperty, query, forwardLink.getValue())) { planning.forwardLinksCached.put(linkProperty.toCPI(linkerLinked.getKey().id), linkerLinked.getValue()); } } /* query and cache backward links */ for (final Entry<CP, Collection<Long>> backwardLink : backwardLinksWanted.asMap().entrySet()) { final CP linkProperty = backwardLink.getKey(); final String query = "SELECT linker.id, linked.id FROM " + linkProperty.className + " AS linker " + "JOIN linker." + linkProperty.propertyName + " AS linked WHERE linked.id IN (:ids)"; for (final Entry<CI, CI> linkerLinked : getLinksToCache(linkProperty, query, backwardLink.getValue())) { planning.backwardLinksCached.put(linkProperty.toCPI(linkerLinked.getValue().id), linkerLinked.getKey()); } } /* note cached objects for further processing */ planning.cached.addAll(toCache); planning.toProcess.addAll(toCache); }
From source file:org.opendaylight.groupbasedpolicy.renderer.ofoverlay.arp.ArpTasker.java
private SetMultimap<Node, Pair<InstanceIdentifier<NodeConnector>, MacAddress>> readNodesWithExternalIfaces( ReadTransaction rTx) {/* w w w .j a v a2 s. co m*/ Optional<Nodes> potentialNodes = DataStoreHelper.readFromDs(LogicalDatastoreType.CONFIGURATION, InstanceIdentifier.builder(Nodes.class).build(), rTx); if (!potentialNodes.isPresent() || potentialNodes.get().getNode() == null) { return ImmutableSetMultimap.of(); } List<Node> nodes = potentialNodes.get().getNode(); SetMultimap<Node, Pair<InstanceIdentifier<NodeConnector>, MacAddress>> extIfacesByNode = HashMultimap .create(); for (Node node : nodes) { OfOverlayNodeConfig ofOverlayNode = node.getAugmentation(OfOverlayNodeConfig.class); if (ofOverlayNode != null) { List<ExternalInterfaces> externalIfaces = ofOverlayNode.getExternalInterfaces(); if (externalIfaces != null) { for (ExternalInterfaces extIface : externalIfaces) { NodeConnectorId externalNc = extIface.getNodeConnectorId(); InstanceIdentifier<NodeConnector> extNcIid = InstanceIdentifier.builder(Nodes.class) .child(Node.class, node.getKey()) .child(NodeConnector.class, new NodeConnectorKey(externalNc)).build(); Optional<NodeConnector> potentialExtNcFromOper = DataStoreHelper .readFromDs(LogicalDatastoreType.OPERATIONAL, extNcIid, rTx); if (!potentialExtNcFromOper.isPresent()) { LOG.debug("Node connector {} does not exit in OPER DS. Node from CONF: {}", externalNc.getValue(), node); continue; } FlowCapableNodeConnector externalFcNc = potentialExtNcFromOper.get() .getAugmentation(FlowCapableNodeConnector.class); if (externalFcNc == null || externalFcNc.getHardwareAddress() == null) { LOG.debug("Hardware address does not exist on node connector {}", externalNc.getValue()); LOG.trace("Node connector from OPER DS {}", potentialExtNcFromOper.get()); continue; } extIfacesByNode.put(node, new ImmutablePair<>(extNcIid, externalFcNc.getHardwareAddress())); } } } } return extIfacesByNode; }
From source file:gr.forth.ics.swkm.model2.importer.AbstractStore.java
private void updateExistingLabels(ImportContext context) throws SQLException { UpdatedLabels updatedLabels = context.updatedLabels; Map<Interval, Interval> oldIntervalToNew = Maps.newHashMap(); Map<Interval, RdfType> oldIntervalToType = Maps.newHashMap(); final SetMultimap<RdfType, Interval> typeToOldInterval = HashMultimap.create(); //Two cases:// www .j a va 2 s . com //1) Nodes for which we have their URI. for (Uri oldResource : updatedLabels.getOldLabels().getResourcesWithPredefinedLabels()) { RdfType type = context.model.mapResource(oldResource).type(); Label oldLabel = updatedLabels.getOldLabels().predefinedLabelOf(oldResource); //Do we have a new label for this URI? Label newLabel = updatedLabels.getNewLabels(type).get(oldResource); if (newLabel != null) { oldIntervalToNew.put(oldLabel.getTreeLabel(), newLabel.getTreeLabel()); oldIntervalToType.put(oldLabel.getTreeLabel(), type); typeToOldInterval.put(type, oldLabel.getTreeLabel()); } } //2) Nodes for which we only know their old tree interval: all of them need to be updated for (RdfType type : EnumSet.of(RdfType.CLASS, RdfType.METACLASS, RdfType.PROPERTY, RdfType.METAPROPERTY)) { Map<Interval, Label> map = updatedLabels.getUpdatedLabelsByInterval(type); for (Entry<Interval, Label> entry : map.entrySet()) { Interval oldLabel = entry.getKey(); Label newLabel = entry.getValue(); oldIntervalToNew.put(oldLabel, newLabel.getTreeLabel()); oldIntervalToType.put(oldLabel, type); typeToOldInterval.put(type, oldLabel); } } for (final RdfType type : EnumSet.of(RdfType.CLASS, RdfType.METACLASS, RdfType.PROPERTY, RdfType.METAPROPERTY)) { Predicate<Interval> predicate = new Predicate<Interval>() { public boolean apply(Interval interval) { return typeToOldInterval.get(type).contains(interval); } }; Map<Interval, Interval> filtered = Maps.filterKeys(oldIntervalToNew, predicate); if (!filtered.isEmpty()) { updateResourceLabels(filtered, type); } } }
From source file:org.jakstab.analysis.explicit.VpcTrackingAnalysis.java
@Override public Pair<AbstractState, Precision> prec(AbstractState s, Precision precision, ReachedSet reached) { // This method uses the fact that there is only 1 precision per location VpcPrecision vprec = (VpcPrecision) precision; BasedNumberValuation widenedState = (BasedNumberValuation) s; ExplicitPrecision eprec = vprec.getPrecision(widenedState.getValue(vpc)); // Only check value counts if we have at least enough states to reach it if (reached.size() > Math.min(BoundedAddressTracking.varThreshold.getValue(), BoundedAddressTracking.heapThreshold.getValue())) { boolean changed = false; // Check value counts for variables for (RTLVariable v : eprec.varMap.keySet()) { //BasedNumberElement currentValue = ((BasedNumberValuation)s).getValue(v); Set<BasedNumberElement> existingValues = eprec.varMap.get(v); int threshold = eprec.getThreshold(v); if (existingValues.size() > threshold) { // Lower precisions and widen the value in this state, too. // This avoids values accumulating at join points (where they are not // intercepted by the precision-aware setValue) if (countRegions(existingValues) > threshold) { eprec.stopTracking(v); if (!changed) { widenedState = new BasedNumberValuation(widenedState); changed = true;// w w w . ja v a 2 s . co m } widenedState.setValue(v, BasedNumberElement.getTop(v.getBitWidth())); } else { eprec.trackRegionOnly(v); if (!changed) { widenedState = new BasedNumberValuation(widenedState); changed = true; } logger.debug("Only tracking region of " + v + ", values were " + existingValues); widenedState.setValue(v, new BasedNumberElement(widenedState.getValue(v).getRegion(), NumberElement.getTop(v.getBitWidth()))); } } } // Check value counts for store PartitionedMemory<BasedNumberElement> sStore = ((BasedNumberValuation) s).getStore(); for (EntryIterator<MemoryRegion, Long, BasedNumberElement> entryIt = sStore.entryIterator(); entryIt .hasEntry(); entryIt.next()) { MemoryRegion region = entryIt.getLeftKey(); Long offset = entryIt.getRightKey(); BasedNumberElement value = entryIt.getValue(); SetMultimap<Long, BasedNumberElement> memoryMap = eprec.regionMaps.get(region); if (memoryMap == null) continue; //BasedNumberElement currentValue = entry.getValue(); Set<BasedNumberElement> existingValues = memoryMap.get(offset); int threshold = eprec.getStoreThreshold(region, offset); if (existingValues.size() > threshold) { if (countRegions(existingValues) > 5 * threshold) { eprec.stopTracking(region, offset); if (!changed) { widenedState = new BasedNumberValuation(widenedState); changed = true; } widenedState.getStore().set(region, offset, value.getBitWidth(), BasedNumberElement.getTop(value.getBitWidth())); } else { eprec.trackRegionOnly(region, offset); if (!changed) { widenedState = new BasedNumberValuation(widenedState); changed = true; } widenedState.getStore().set(region, offset, value.getBitWidth(), new BasedNumberElement( value.getRegion(), NumberElement.getTop(value.getBitWidth()))); } } } } // Collect all values for all variables for (Map.Entry<RTLVariable, BasedNumberElement> entry : widenedState.getVariableValuation()) { eprec.varMap.put(entry.getKey(), entry.getValue()); } // Collect all values for all memory areas PartitionedMemory<BasedNumberElement> store = widenedState.getStore(); for (EntryIterator<MemoryRegion, Long, BasedNumberElement> entryIt = store.entryIterator(); entryIt .hasEntry(); entryIt.next()) { SetMultimap<Long, BasedNumberElement> memoryMap = eprec.regionMaps.get(entryIt.getLeftKey()); if (memoryMap == null) { memoryMap = HashMultimap.create(); eprec.regionMaps.put(entryIt.getLeftKey(), memoryMap); } memoryMap.put(entryIt.getRightKey(), entryIt.getValue()); } // If it was changed, widenedState is now a new state return Pair.create((AbstractState) widenedState, precision); }
From source file:org.jakstab.analysis.explicit.BoundedAddressTracking.java
@Override public Pair<AbstractState, Precision> prec(AbstractState s, Precision precision, ReachedSet reached) { // This method uses the fact that there is only 1 precision per location ExplicitPrecision eprec = (ExplicitPrecision) precision; BasedNumberValuation widenedState = (BasedNumberValuation) s; // Only check value counts if we have at least enough states to reach it if (reached.size() > Math.min(varThreshold.getValue(), heapThreshold.getValue())) { boolean changed = false; // Check value counts for variables for (RTLVariable v : eprec.varMap.keySet()) { //BasedNumberElement currentValue = ((BasedNumberValuation)s).getValue(v); Set<BasedNumberElement> existingValues = eprec.varMap.get(v); int threshold = eprec.getThreshold(v); if (existingValues.size() > threshold) { // Lower precisions and widen the value in this state, too. // This avoids values accumulating at join points (where they are not // intercepted by the precision-aware setValue) if (countRegions(existingValues) > threshold) { eprec.stopTracking(v); if (!changed) { widenedState = new BasedNumberValuation(widenedState); changed = true;/*from w w w. j av a 2 s . c o m*/ } widenedState.setValue(v, BasedNumberElement.getTop(v.getBitWidth())); } else { eprec.trackRegionOnly(v); if (!changed) { widenedState = new BasedNumberValuation(widenedState); changed = true; } logger.debug("Only tracking region of " + v + ", values were " + existingValues); widenedState.setValue(v, new BasedNumberElement(widenedState.getValue(v).getRegion(), NumberElement.getTop(v.getBitWidth()))); } } } // Check value counts for store PartitionedMemory<BasedNumberElement> sStore = ((BasedNumberValuation) s).getStore(); for (EntryIterator<MemoryRegion, Long, BasedNumberElement> entryIt = sStore.entryIterator(); entryIt .hasEntry(); entryIt.next()) { MemoryRegion region = entryIt.getLeftKey(); Long offset = entryIt.getRightKey(); BasedNumberElement value = entryIt.getValue(); SetMultimap<Long, BasedNumberElement> memoryMap = eprec.regionMaps.get(region); if (memoryMap == null) continue; //BasedNumberElement currentValue = entry.getValue(); Set<BasedNumberElement> existingValues = memoryMap.get(offset); int threshold = eprec.getStoreThreshold(region, offset); if (existingValues.size() > threshold) { if (countRegions(existingValues) > 5 * threshold) { eprec.stopTracking(region, offset); if (!changed) { widenedState = new BasedNumberValuation(widenedState); changed = true; } widenedState.getStore().set(region, offset, value.getBitWidth(), BasedNumberElement.getTop(value.getBitWidth())); } else { eprec.trackRegionOnly(region, offset); if (!changed) { widenedState = new BasedNumberValuation(widenedState); changed = true; } widenedState.getStore().set(region, offset, value.getBitWidth(), new BasedNumberElement( value.getRegion(), NumberElement.getTop(value.getBitWidth()))); } } } } // Collect all values for all variables for (Map.Entry<RTLVariable, BasedNumberElement> entry : widenedState.getVariableValuation()) { eprec.varMap.put(entry.getKey(), entry.getValue()); } // Collect all values for all memory areas PartitionedMemory<BasedNumberElement> store = widenedState.getStore(); for (EntryIterator<MemoryRegion, Long, BasedNumberElement> entryIt = store.entryIterator(); entryIt .hasEntry(); entryIt.next()) { SetMultimap<Long, BasedNumberElement> memoryMap = eprec.regionMaps.get(entryIt.getLeftKey()); if (memoryMap == null) { memoryMap = HashMultimap.create(); eprec.regionMaps.put(entryIt.getLeftKey(), memoryMap); } memoryMap.put(entryIt.getRightKey(), entryIt.getValue()); } // If it was changed, widenedState is now a new state return Pair.create((AbstractState) widenedState, precision); }
From source file:ome.services.graphs.GraphPathReport.java
/** * Process the Hibernate domain object model and write a report of the mapped objects. * @throws IOException if there was a problem in writing to the output file *//* ww w . java2s .c o m*/ private static void report() throws IOException { /* note all the direct superclasses and subclasses */ final Map<String, String> superclasses = new HashMap<String, String>(); final SortedSetMultimap<String, String> subclasses = TreeMultimap.create(); @SuppressWarnings("unchecked") final Map<String, ClassMetadata> classesMetadata = sessionFactory.getAllClassMetadata(); for (final String className : classesMetadata.keySet()) { try { final Class<?> actualClass = Class.forName(className); if (IObject.class.isAssignableFrom(actualClass)) { @SuppressWarnings("unchecked") final Set<String> subclassNames = sessionFactory.getEntityPersister(className) .getEntityMetamodel().getSubclassEntityNames(); for (final String subclassName : subclassNames) { if (!subclassName.equals(className)) { final Class<?> actualSubclass = Class.forName(subclassName); if (actualSubclass.getSuperclass() == actualClass) { superclasses.put(subclassName, className); subclasses.put(getSimpleName(className), getSimpleName(subclassName)); } } } } else { System.err.println("error: mapped class " + className + " is not a " + IObject.class.getName()); } } catch (ClassNotFoundException e) { System.err.println("error: could not instantiate class: " + e); } } /* queue for processing all the properties of all the mapped entities: name, type, nullability */ final Queue<PropertyDetails> propertyQueue = new LinkedList<PropertyDetails>(); final Map<String, Set<String>> allPropertyNames = new HashMap<String, Set<String>>(); for (final Map.Entry<String, ClassMetadata> classMetadata : classesMetadata.entrySet()) { final String className = classMetadata.getKey(); final ClassMetadata metadata = classMetadata.getValue(); final String[] propertyNames = metadata.getPropertyNames(); final Type[] propertyTypes = metadata.getPropertyTypes(); final boolean[] propertyNullabilities = metadata.getPropertyNullability(); for (int i = 0; i < propertyNames.length; i++) { if (!ignoreProperty(propertyNames[i])) { final List<String> propertyPath = Collections.singletonList(propertyNames[i]); propertyQueue.add(new PropertyDetails(className, propertyPath, propertyTypes[i], propertyNullabilities[i])); } } final Set<String> propertyNamesSet = new HashSet<String>(propertyNames.length); propertyNamesSet.addAll(Arrays.asList(propertyNames)); allPropertyNames.put(className, propertyNamesSet); } /* for linkedBy, X -> Y, Z: class X is linked to by class Y with Y's property Z */ final SetMultimap<String, Map.Entry<String, String>> linkedBy = HashMultimap.create(); final SetMultimap<String, String> linkers = HashMultimap.create(); final SortedMap<String, SortedMap<String, String>> classPropertyReports = new TreeMap<String, SortedMap<String, String>>(); /* process each property to note entity linkages */ while (!propertyQueue.isEmpty()) { final PropertyDetails property = propertyQueue.remove(); /* if the property has a component type, queue the parts for processing */ if (property.type instanceof ComponentType) { final ComponentType componentType = (ComponentType) property.type; final String[] componentPropertyNames = componentType.getPropertyNames(); final Type[] componentPropertyTypes = componentType.getSubtypes(); final boolean[] componentPropertyNullabilities = componentType.getPropertyNullability(); for (int i = 0; i < componentPropertyNames.length; i++) { if (!ignoreProperty(componentPropertyNames[i])) { final List<String> componentPropertyPath = new ArrayList<String>(property.path.size() + 1); componentPropertyPath.addAll(property.path); componentPropertyPath.add(componentPropertyNames[i]); propertyQueue.add(new PropertyDetails(property.holder, componentPropertyPath, componentPropertyTypes[i], componentPropertyNullabilities[i])); } } } else { /* determine if this property links to another entity */ final boolean isAssociatedEntity; if (property.type instanceof CollectionType) { final CollectionType ct = (CollectionType) property.type; isAssociatedEntity = sessionFactory.getCollectionPersister(ct.getRole()).getElementType() .isEntityType(); } else { isAssociatedEntity = property.type instanceof AssociationType; } /* determine the class and property name for reporting */ final String holderSimpleName = getSimpleName(property.holder); final String propertyPath = Joiner.on('.').join(property.path); /* build a report line for this property */ final StringBuffer sb = new StringBuffer(); final String valueClassName; if (isAssociatedEntity) { /* entity linkages by non-inherited properties are recorded */ final String valueName = ((AssociationType) property.type) .getAssociatedEntityName(sessionFactory); final String valueSimpleName = getSimpleName(valueName); final Map.Entry<String, String> classPropertyName = Maps.immutableEntry(holderSimpleName, propertyPath); linkers.put(holderSimpleName, propertyPath); linkedBy.put(valueSimpleName, classPropertyName); valueClassName = linkTo(valueSimpleName); } else { /* find a Sphinx representation for this property value type */ final UserType userType; if (property.type instanceof CustomType) { userType = ((CustomType) property.type).getUserType(); } else { userType = null; } if (property.type instanceof EnumType) { valueClassName = "enumeration"; } else if (userType instanceof GenericEnumType) { @SuppressWarnings("unchecked") final Class<? extends Unit> unitQuantityClass = ((GenericEnumType) userType) .getQuantityClass(); valueClassName = "enumeration of " + linkToJavadoc(unitQuantityClass.getName()); } else if (property.type instanceof ListType || userType instanceof ListAsSQLArrayUserType) { valueClassName = "list"; } else if (property.type instanceof MapType) { valueClassName = "map"; } else { valueClassName = "``" + property.type.getName() + "``"; } } sb.append(valueClassName); if (property.type.isCollectionType()) { sb.append(" (multiple)"); } else if (property.isNullable) { sb.append(" (optional)"); } /* determine from which class the property is inherited, if at all */ String superclassWithProperty = null; String currentClass = property.holder; while (true) { currentClass = superclasses.get(currentClass); if (currentClass == null) { break; } else if (allPropertyNames.get(currentClass).contains(property.path.get(0))) { superclassWithProperty = currentClass; } } /* check if the property actually comes from an interface */ final String declaringClassName = superclassWithProperty == null ? property.holder : superclassWithProperty; final Class<? extends IObject> interfaceForProperty = getInterfaceForProperty(declaringClassName, property.path.get(0)); /* report where the property is declared */ if (superclassWithProperty != null) { sb.append(" from "); sb.append(linkTo(getSimpleName(superclassWithProperty))); } else { if (interfaceForProperty != null) { sb.append(", see "); sb.append(linkToJavadoc(interfaceForProperty.getName())); } } SortedMap<String, String> byProperty = classPropertyReports.get(holderSimpleName); if (byProperty == null) { byProperty = new TreeMap<String, String>(); classPropertyReports.put(holderSimpleName, byProperty); } byProperty.put(propertyPath, sb.toString()); } } /* the information is gathered, now write the report */ out.write("Glossary of all OMERO Model Objects\n"); out.write("===================================\n\n"); out.write("Overview\n"); out.write("--------\n\n"); out.write("Reference\n"); out.write("---------\n\n"); for (final Map.Entry<String, SortedMap<String, String>> byClass : classPropertyReports.entrySet()) { /* label the class heading */ final String className = byClass.getKey(); out.write(".. _" + labelFor(className) + ":\n\n"); out.write(className + "\n"); final char[] underline = new char[className.length()]; for (int i = 0; i < underline.length; i++) { underline[i] = '"'; } out.write(underline); out.write("\n\n"); /* note the class' relationships */ final SortedSet<String> superclassOf = new TreeSet<String>(); for (final String subclass : subclasses.get(className)) { superclassOf.add(linkTo(subclass)); } final SortedSet<String> linkerText = new TreeSet<String>(); for (final Map.Entry<String, String> linker : linkedBy.get(className)) { linkerText.add(linkTo(linker.getKey(), linker.getValue())); } if (!(superclassOf.isEmpty() && linkerText.isEmpty())) { /* write the class' relationships */ /* out.write("Relationships\n"); out.write("^^^^^^^^^^^^^\n\n"); */ if (!superclassOf.isEmpty()) { out.write("Subclasses: " + Joiner.on(", ").join(superclassOf) + "\n\n"); } if (!linkerText.isEmpty()) { out.write("Used by: " + Joiner.on(", ").join(linkerText) + "\n\n"); } } /* write the class' properties */ /* out.write("Properties\n"); out.write("^^^^^^^^^^\n\n"); */ out.write("Properties:\n"); for (final Map.Entry<String, String> byProperty : byClass.getValue().entrySet()) { final String propertyName = byProperty.getKey(); // if (linkers.containsEntry(className, propertyName)) { // /* label properties that have other entities as values */ // out.write(".. _" + labelFor(className, propertyName) + ":\n\n"); // } out.write(" | " + propertyName + ": " + byProperty.getValue() + "\n" /* \n */); } out.write("\n"); } }
From source file:omero.cmd.graphs.SkipHeadPolicy.java
/** * Adjust an existing graph traversal policy so that orphaned model objects will always or never be included, * according to their type.//w w w .ja v a 2s . co m * @param graphPolicy the graph policy to adjust * @param graphPathBean the graph path bean, for converting class names to the actual classes * @param startFrom the model object types to from which to start inclusion, may not be empty or {@code null} * @param startAction the action associated with nodes qualifying as start objects * @param permissionsOverrides where to note for which {@code startFrom} objects permissions are not to be checked * @return the adjusted graph policy * @throws GraphException if no start classes are named */ public static GraphPolicy getSkipHeadPolicySkip(final GraphPolicy graphPolicy, final GraphPathBean graphPathBean, Collection<String> startFrom, final GraphPolicy.Action startAction, final SetMultimap<String, Long> permissionsOverrides) throws GraphException { if (CollectionUtils.isEmpty(startFrom)) { throw new GraphException(SkipHead.class.getSimpleName() + " requires the start classes to be named"); } /* convert the class names to actual classes */ final Function<String, Class<? extends IObject>> getClassFromName = new Function<String, Class<? extends IObject>>() { @Override public Class<? extends IObject> apply(String className) { final int lastDot = className.lastIndexOf('.'); if (lastDot > 0) { className = className.substring(lastDot + 1); } return graphPathBean.getClassForSimpleName(className); } }; final ImmutableSet<Class<? extends IObject>> startFromClasses = ImmutableSet .copyOf(Collections2.transform(startFrom, getClassFromName)); final Predicate<IObject> isStartFrom = new Predicate<IObject>() { @Override public boolean apply(IObject subject) { final Class<? extends IObject> subjectClass = subject.getClass(); for (final Class<? extends IObject> startFromClass : startFromClasses) { if (startFromClass.isAssignableFrom(subjectClass)) { return true; } } return false; } }; /* construct the function corresponding to the model graph descent truncation */ return new GraphPolicy() { @Override public void registerPredicate(GraphPolicyRulePredicate predicate) { graphPolicy.registerPredicate(predicate); } @Override public GraphPolicy getCleanInstance() { throw new IllegalStateException("not expecting to provide a clean instance"); } @Override public void setCondition(String name) { graphPolicy.setCondition(name); } @Override public boolean isCondition(String name) { return graphPolicy.isCondition(name); } @Override public void noteDetails(Session session, IObject object, String realClass, long id) { graphPolicy.noteDetails(session, object, realClass, id); } @Override public final Set<Details> review(Map<String, Set<Details>> linkedFrom, Details rootObject, Map<String, Set<Details>> linkedTo, Set<String> notNullable, boolean isErrorRules) throws GraphException { if (rootObject.action == startAction && isStartFrom.apply(rootObject.subject)) { if (LOGGER.isDebugEnabled()) { LOGGER.debug("deferring review of " + rootObject); } /* note which permissions overrides to start from */ final String className = rootObject.subject.getClass().getName(); final Long id = rootObject.subject.getId(); if (rootObject.isCheckPermissions) { permissionsOverrides.remove(className, id); } else { permissionsOverrides.put(className, id); } /* skip the review, start from this object in a later request */ return Collections.emptySet(); } else { /* do the review */ return graphPolicy.review(linkedFrom, rootObject, linkedTo, notNullable, isErrorRules); } } }; }
From source file:org.opendaylight.groupbasedpolicy.renderer.ofoverlay.flow.DestinationMapper.java
@Override public void sync(NodeId nodeId, OfWriter ofWriter) throws Exception { TenantId currentTenant = null;/*from w w w . j a v a2 s .co m*/ ofWriter.writeFlow(nodeId, TABLE_ID, dropFlow(Integer.valueOf(1), null, TABLE_ID)); SetMultimap<EpKey, EpKey> visitedEps = HashMultimap.create(); Set<EndpointFwdCtxOrdinals> epOrdSet = new HashSet<>(); for (Endpoint srcEp : ctx.getEndpointManager().getEndpointsForNode(nodeId)) { Set<EndpointGroupId> srcEpgIds = new HashSet<>(); if (srcEp.getEndpointGroup() != null) srcEpgIds.add(srcEp.getEndpointGroup()); if (srcEp.getEndpointGroups() != null) srcEpgIds.addAll(srcEp.getEndpointGroups()); for (EndpointGroupId epgId : srcEpgIds) { EgKey epg = new EgKey(srcEp.getTenant(), epgId); Set<EgKey> peers = Sets.union(Collections.singleton(epg), ctx.getCurrentPolicy().getPeers(epg)); for (EgKey peer : peers) { for (Endpoint peerEp : ctx.getEndpointManager().getEndpointsForGroup(peer)) { currentTenant = peerEp.getTenant(); subnetsByTenant.put(currentTenant, getSubnets(currentTenant)); EpKey srcEpKey = new EpKey(srcEp.getL2Context(), srcEp.getMacAddress()); EpKey peerEpKey = new EpKey(peerEp.getL2Context(), peerEp.getMacAddress()); if (visitedEps.get(srcEpKey) != null && visitedEps.get(srcEpKey).contains(peerEpKey)) { continue; } syncEP(ofWriter, nodeId, srcEp, peerEp); visitedEps.put(srcEpKey, peerEpKey); // Process subnets and flood-domains for epPeer EndpointFwdCtxOrdinals epOrds = OrdinalFactory.getEndpointFwdCtxOrdinals(ctx, peerEp); if (epOrds == null) { LOG.debug("getEndpointFwdCtxOrdinals is null for EP {}", peerEp); continue; } epOrdSet.add(epOrds); } } } } for (Entry<TenantId, HashSet<Subnet>> subnetEntry : subnetsByTenant.entrySet()) { if (subnetEntry.getValue() == null) { LOG.trace("Tenant: {} has empty subnet entry.", subnetEntry.getKey()); continue; } currentTenant = subnetEntry.getKey(); for (Subnet sn : subnetEntry.getValue()) { L3Context l3c = getL3ContextForSubnet(currentTenant, sn); Flow arpFlow = createRouterArpFlow(currentTenant, nodeId, sn, OrdinalFactory.getContextOrdinal(currentTenant, l3c.getId())); if (arpFlow != null) { ofWriter.writeFlow(nodeId, TABLE_ID, arpFlow); } else { LOG.debug( "Gateway ARP flow is not created, because virtual router IP has not been set for subnet {} .", sn.getIpPrefix().getValue()); } } } // Write broadcast flows per flood domain. for (EndpointFwdCtxOrdinals epOrd : epOrdSet) { if (ofWriter.groupExists(nodeId, Integer.valueOf(epOrd.getFdId()).longValue())) { ofWriter.writeFlow(nodeId, TABLE_ID, createBroadcastFlow(epOrd)); } } // L3 Prefix Endpoint handling Collection<EndpointL3Prefix> prefixEps = ctx.getEndpointManager() .getEndpointsL3PrefixForTenant(currentTenant); if (prefixEps != null) { LOG.trace("DestinationMapper - Processing L3PrefixEndpoints"); for (EndpointL3Prefix prefixEp : prefixEps) { List<Subnet> localSubnets = getLocalSubnets(nodeId); if (localSubnets == null) { continue; } for (Subnet localSubnet : localSubnets) { Flow prefixFlow = createL3PrefixFlow(prefixEp, nodeId, localSubnet); if (prefixFlow != null) { ofWriter.writeFlow(nodeId, TABLE_ID, prefixFlow); LOG.trace("Wrote L3Prefix flow"); } } } } }