Example usage for com.google.common.collect Multimap size

List of usage examples for com.google.common.collect Multimap size

Introduction

In this page you can find the example usage for com.google.common.collect Multimap size.

Prototype

int size();

Source Link

Document

Returns the number of key-value pairs in this multimap.

Usage

From source file:org.apache.pulsar.broker.loadbalance.impl.SimpleLoadManagerImpl.java

private synchronized ResourceUnit getLeastLoadedBroker(ServiceUnitId serviceUnit,
        Map<Long, Set<ResourceUnit>> availableBrokers) {
    ResourceUnit selectedBroker = null;/* w  w w  .  j  a  v a 2 s .c  o  m*/
    // If the broker is already assigned, return that candidate.
    for (final Map.Entry<ResourceUnit, ResourceUnitRanking> entry : resourceUnitRankings.entrySet()) {
        final ResourceUnit resourceUnit = entry.getKey();
        final ResourceUnitRanking ranking = entry.getValue();
        if (ranking.isServiceUnitPreAllocated(serviceUnit.toString())) {
            return resourceUnit;
        }
    }
    Multimap<Long, ResourceUnit> finalCandidates = getFinalCandidates(serviceUnit, availableBrokers);
    // Remove candidates that point to inactive brokers
    Set<String> activeBrokers = Collections.emptySet();
    try {
        activeBrokers = availableActiveBrokers.get();
        // Need to use an explicit Iterator object to prevent concurrent modification exceptions
        Iterator<Map.Entry<Long, ResourceUnit>> candidateIterator = finalCandidates.entries().iterator();
        while (candidateIterator.hasNext()) {
            Map.Entry<Long, ResourceUnit> candidate = candidateIterator.next();
            String candidateBrokerName = candidate.getValue().getResourceId().replace("http://", "");
            if (!activeBrokers.contains(candidateBrokerName)) {
                candidateIterator.remove(); // Current candidate points to an inactive broker, so remove it
            }
        }
    } catch (Exception e) {
        log.warn("Error during attempt to remove inactive brokers while searching for least active broker", e);
    }

    if (finalCandidates.size() > 0) {
        if (this.getLoadBalancerPlacementStrategy().equals(LOADBALANCER_STRATEGY_LLS)
                || this.getLoadBalancerPlacementStrategy().equals(LOADBALANCER_STRATEGY_LEAST_MSG)) {
            selectedBroker = findBrokerForPlacement(finalCandidates, serviceUnit);
        } else {
            selectedBroker = placementStrategy.findBrokerForPlacement(finalCandidates);
        }
        log.info("Selected : [{}] for ServiceUnit : [{}]", selectedBroker.getResourceId(),
                serviceUnit.toString());
        return selectedBroker;
    } else {
        // No available broker found
        log.warn("No broker available to acquire service unit: [{}]", serviceUnit);
        return null;
    }
}

From source file:org.jclouds.rest.internal.RestAnnotationProcessor.java

@Override
public GeneratedHttpRequest apply(Invocation invocation) {
    checkNotNull(invocation, "invocation");
    inputParamValidator.validateMethodParametersOrThrow(invocation);

    Optional<URI> endpoint = Optional.absent();
    HttpRequest r = findOrNull(invocation.getArgs(), HttpRequest.class);
    if (r != null) {
        endpoint = Optional.fromNullable(r.getEndpoint());
        if (endpoint.isPresent())
            logger.trace("using endpoint %s from invocation.getArgs() for %s", endpoint, invocation);
    } else if (caller != null) {
        endpoint = getEndpointFor(caller);
        if (endpoint.isPresent())
            logger.trace("using endpoint %s from caller %s for %s", endpoint, caller, invocation);
        else//w  w w  .  j  a  va 2s.co m
            endpoint = findEndpoint(invocation);
    } else {
        endpoint = findEndpoint(invocation);
    }

    if (!endpoint.isPresent())
        throw new NoSuchElementException(format("no endpoint found for %s", invocation));
    GeneratedHttpRequest.Builder requestBuilder = GeneratedHttpRequest.builder().invocation(invocation)
            .caller(caller);
    String requestMethod = null;
    if (r != null) {
        requestMethod = r.getMethod();
        requestBuilder.fromHttpRequest(r);
    } else {
        requestMethod = tryFindHttpMethod(invocation.getInvokable()).get();
        requestBuilder.method(requestMethod);
    }

    requestBuilder.filters(getFiltersIfAnnotated(invocation));

    Multimap<String, Object> tokenValues = LinkedHashMultimap.create();

    tokenValues.put(Constants.PROPERTY_API_VERSION, apiVersion);
    tokenValues.put(Constants.PROPERTY_BUILD_VERSION, buildVersion);
    // URI template in rfc6570 form
    UriBuilder uriBuilder = uriBuilder(endpoint.get().toString());

    overridePathEncoding(uriBuilder, invocation);

    if (caller != null)
        tokenValues.putAll(addPathAndGetTokens(caller, uriBuilder));
    tokenValues.putAll(addPathAndGetTokens(invocation, uriBuilder));
    Multimap<String, Object> formParams;
    if (caller != null) {
        formParams = addFormParams(tokenValues, caller);
        formParams.putAll(addFormParams(tokenValues, invocation));
    } else {
        formParams = addFormParams(tokenValues, invocation);
    }
    Multimap<String, Object> queryParams = addQueryParams(tokenValues, invocation);
    Multimap<String, String> headers = buildHeaders(tokenValues, invocation);

    if (r != null)
        headers.putAll(r.getHeaders());

    if (shouldAddHostHeader(invocation)) {
        StringBuilder hostHeader = new StringBuilder(endpoint.get().getHost());
        if (endpoint.get().getPort() != -1)
            hostHeader.append(":").append(endpoint.get().getPort());
        headers.put(HOST, hostHeader.toString());
    }

    Payload payload = null;
    for (HttpRequestOptions options : findOptionsIn(invocation)) {
        injector.injectMembers(options);// TODO test case
        for (Entry<String, String> header : options.buildRequestHeaders().entries()) {
            headers.put(header.getKey(), replaceTokens(header.getValue(), tokenValues));
        }
        for (Entry<String, String> query : options.buildQueryParameters().entries()) {
            queryParams.put(query.getKey(), replaceTokens(query.getValue(), tokenValues));
        }
        for (Entry<String, String> form : options.buildFormParameters().entries()) {
            formParams.put(form.getKey(), replaceTokens(form.getValue(), tokenValues));
        }

        String pathSuffix = options.buildPathSuffix();
        if (pathSuffix != null) {
            uriBuilder.appendPath(pathSuffix);
        }
        String stringPayload = options.buildStringPayload();
        if (stringPayload != null)
            payload = Payloads.newStringPayload(stringPayload);
    }

    if (queryParams.size() > 0) {
        uriBuilder.query(queryParams);
    }

    requestBuilder.headers(filterOutContentHeaders(headers));

    requestBuilder.endpoint(uriBuilder.build(convertUnsafe(tokenValues)));

    if (payload == null) {
        PayloadEnclosing payloadEnclosing = findOrNull(invocation.getArgs(), PayloadEnclosing.class);
        payload = (payloadEnclosing != null) ? payloadEnclosing.getPayload()
                : findOrNull(invocation.getArgs(), Payload.class);
    }

    List<? extends Part> parts = getParts(invocation,
            ImmutableMultimap.<String, Object>builder().putAll(tokenValues).putAll(formParams).build());

    if (parts.size() > 0) {
        if (formParams.size() > 0) {
            parts = newLinkedList(concat(transform(formParams.entries(), ENTRY_TO_PART), parts));
        }
        payload = new MultipartForm(MultipartForm.BOUNDARY, parts);
    } else if (formParams.size() > 0) {
        payload = Payloads
                .newUrlEncodedFormPayload(transformValues(formParams, NullableToStringFunction.INSTANCE));
    } else if (headers.containsKey(CONTENT_TYPE) && !HttpRequest.NON_PAYLOAD_METHODS.contains(requestMethod)) {
        if (payload == null)
            payload = Payloads.newByteArrayPayload(new byte[] {});
        payload.getContentMetadata().setContentType(get(headers.get(CONTENT_TYPE), 0));
    }
    if (payload != null) {
        requestBuilder.payload(payload);
    }
    GeneratedHttpRequest request = requestBuilder.build();

    org.jclouds.rest.MapBinder mapBinder = getMapPayloadBinderOrNull(invocation);
    if (mapBinder != null) {
        Map<String, Object> mapParams;
        if (caller != null) {
            mapParams = buildPayloadParams(caller);
            mapParams.putAll(buildPayloadParams(invocation));
        } else {
            mapParams = buildPayloadParams(invocation);
        }
        if (invocation.getInvokable().isAnnotationPresent(PayloadParams.class)) {
            PayloadParams params = invocation.getInvokable().getAnnotation(PayloadParams.class);
            addMapPayload(mapParams, params, headers);
        }
        request = mapBinder.bindToRequest(request, mapParams);
    } else {
        request = decorateRequest(request);
    }

    if (request.getPayload() != null) {
        contentMetadataCodec.fromHeaders(request.getPayload().getContentMetadata(), headers);
    }
    utils.checkRequestHasRequiredProperties(request);
    return request;
}

From source file:com.trebogeer.jcql.JCQLMain.java

/**
 * Generates java model (pojos) from existing cassandra CQL schema.
 * // TODO - segregate mappers from pojos and make them separately configurable via options. The  whole stack of generated code might not always be needed.
 *
 * @param beans         udt definitions//from   ww  w . j a v a 2s .c  o  m
 * @param tables        table definitions
 * @param partitionKeys partition keys from table metadata
 */
private void generateModelCode(Multimap<String, Pair<String, DataType>> beans,
        Multimap<String, Pair<String, ColumnMetadata>> tables,
        ArrayListMultimap<String, String> partitionKeys) {
    JDefinedClass rowMapper;
    JDefinedClass toUDTMapper = null;
    JDefinedClass binder = null;
    String commonsPackage = (cfg.cpackage != null && !"".equals(cfg.cpackage)) ? cfg.cpackage : cfg.jpackage;
    try {
        rowMapper = model._class(PUBLIC, commonsPackage + ".RowMapper", INTERFACE);
        rowMapper._extends(model.ref(Serializable.class));
        JTypeVar jtv = rowMapper.generify("T");
        JTypeVar jtvRow = rowMapper.generify("R").bound(model.ref(com.datastax.driver.core.GettableData.class));
        rowMapper.method(NONE, jtv, "map").param(jtvRow, "data");
    } catch (Exception e) {
        throw new RuntimeException("Failed to generate mapper interface.", e);
    }

    if (tables != null && !tables.isEmpty()) {
        try {
            binder = model._class(PUBLIC, commonsPackage + ".TableBindMapper", INTERFACE);
            binder._extends(model.ref(Serializable.class));
            JTypeVar jtv = binder.generify("T");
            JMethod jm = binder.method(NONE, model.VOID, "bind");
            jm.param(jtv, "data");
            jm.param(model.ref(BoundStatement.class), "st");
            jm.param(model.ref(Session.class), "session");
        } catch (Exception e) {
            throw new RuntimeException("Failed to generate table bind interface.", e);
        }
    }

    if (beans != null && beans.size() > 0) {
        try {
            toUDTMapper = model._class(PUBLIC, commonsPackage + ".BeanToUDTMapper", INTERFACE);
            toUDTMapper._extends(model.ref(Serializable.class));
            JTypeVar jtv = toUDTMapper.generify("T");
            JMethod toUDT = toUDTMapper.method(NONE, model.ref(UDTValue.class), "toUDT");
            JVar toUDTArg0 = toUDT.param(jtv, "data");
            JVar toUDTArg1 = toUDT.param(Session.class, "session");
        } catch (JClassAlreadyExistsException e) {
            throw new RuntimeException("Failed to generate UDT mapper interface.", e);
        }
    }
    if (beans != null) {
        for (String cl : beans.keySet()) {
            try {
                String camName = camelize(cl);
                JDefinedClass clazz = JCQLUtils.getBeanClass(cfg.jpackage, camName, model);
                clazz.field(PRIVATE | STATIC | FINAL, model.LONG, "serialVersionUID",
                        JExpr.lit((long) ((cfg.jpackage + "." + camName).hashCode())));

                // row mapper
                rowMapperCode(clazz, rowMapper, beans.get(cl),
                        model.ref(com.datastax.driver.core.GettableData.class));

                // pojo to UDT mapper
                toUDTMapperCode(clazz, toUDTMapper, beans.get(cl), cl);

                // fields/getters/setters/annotations
                clazz.annotate(UDT.class).param("keyspace", cfg.keysapce).param("name", cl);
                // JExpr.newArray(codeModel.ref(String.class)).add(ID).add(CODE).add(NAME)
                for (Pair<String, DataType> field : beans.get(cl)) {
                    javaBeanFieldWithGetterSetter(clazz, field.getValue1(), field.getValue0(), -1,
                            com.datastax.driver.mapping.annotations.Field.class);

                }
            } catch (JClassAlreadyExistsException e) {
                logger.warn("Class '{}' already exists for UDT, skipping ", cl);
            }

        }
    }
    if (tables != null && !tables.isEmpty()) {
        for (String table : tables.keySet()) {
            try {
                String camName = camelize(table);
                JDefinedClass clazz = JCQLUtils.getBeanClass(cfg.jpackage, camName, model);
                clazz.field(PRIVATE | STATIC | FINAL, model.LONG, "serialVersionUID",
                        JExpr.lit((long) ((cfg.jpackage + "." + camName).hashCode())));

                Collection<Pair<String, DataType>> dataTypes = Collections2
                        .filter(Collections2.transform(tables.get(table),
                                new Function<Pair<String, ColumnMetadata>, Pair<String, DataType>>() {
                                    @Override
                                    public Pair<String, DataType> apply(Pair<String, ColumnMetadata> input) {
                                        return Pair.with(input.getValue0(), input.getValue1().getType());
                                    }
                                }),
                                input -> input != null && !"solr_query".equalsIgnoreCase(input.getValue0()));

                // row mapper
                rowMapperCode(clazz, rowMapper, dataTypes, model.ref(com.datastax.driver.core.Row.class));

                // bind to statement code

                binderToStatemet(clazz, binder, dataTypes);

                // fields/getters/setters/annotations
                clazz.annotate(Table.class).param("keyspace", cfg.keysapce).param("name", table);
                List<String> pkList = partitionKeys.get(table);
                Set<String> pks = new HashSet<>(pkList);

                for (Pair<String, ColumnMetadata> field : tables.get(table)) {
                    String fieldName = field.getValue0();
                    int order = -1;
                    if (pks.contains(fieldName)) {
                        order = 0;
                        if (pks.size() > 1) {
                            order = pkList.indexOf(field.getValue0());
                        }
                    }
                    javaBeanFieldWithGetterSetter(clazz, field.getValue1().getType(), fieldName, order,
                            Column.class);

                }
            } catch (JClassAlreadyExistsException ex) {
                logger.warn("Class '{}' already exists for table, skipping ", table);
            }
        }
    }
}

From source file:org.apache.pulsar.broker.loadbalance.impl.SimpleLoadManagerImpl.java

/**
 * Assign owner for specified ServiceUnit from the given candidates, following the the principles: 1) Optimum
 * distribution: fill up one broker till its load reaches optimum level (defined by underload threshold) before pull
 * another idle broker in; 2) Even distribution: once all brokers' load are above optimum level, maintain all
 * brokers to have even load; 3) Set the underload threshold to small value (like 1) for pure even distribution, and
 * high value (like 80) for pure optimum distribution;
 *
 * Strategy to select broker: 1) The first choice is the least loaded broker which is underload but not idle; 2) The
 * second choice is idle broker (if there is any); 3) Othewise simply select the least loaded broker if it is NOT
 * overloaded; 4) If all brokers are overloaded, select the broker with maximum available capacity (considering
 * brokers could have different hardware configuration, this usually means to select the broker with more hardware
 * resource);/*from   w  w  w .  j  a va2  s  .c o  m*/
 *
 * Broker's load level: 1) Load ranking (triggered by LoadReport update) estimate the load level according to the
 * resourse usage and namespace bundles already loaded by each broker; 2) When leader broker decide the owner for a
 * new namespace bundle, it may take time for the real owner to actually load the bundle and refresh LoadReport,
 * leader broker will store the bundle in a list called preAllocatedBundles, and the quota of all
 * preAllocatedBundles in preAllocatedQuotas, and re-estimate the broker's load level by putting the
 * preAllocatedQuota into calculation; 3) Everything (preAllocatedBundles and preAllocatedQuotas) will get reset in
 * load ranking.
 */
private synchronized ResourceUnit findBrokerForPlacement(Multimap<Long, ResourceUnit> candidates,
        ServiceUnitId serviceUnit) {
    long underloadThreshold = this.getLoadBalancerBrokerUnderloadedThresholdPercentage();
    long overloadThreshold = this.getLoadBalancerBrokerOverloadedThresholdPercentage();
    ResourceQuota defaultQuota = pulsar.getLocalZkCacheService().getResourceQuotaCache().getDefaultQuota();

    double minLoadPercentage = 101.0;
    long maxAvailability = -1;
    ResourceUnit idleRU = null;
    ResourceUnit maxAvailableRU = null;
    ResourceUnit randomRU = null;
    ResourceUnit selectedRU = null;

    ResourceUnitRanking selectedRanking = null;
    String serviceUnitId = serviceUnit.toString();
    // If the ranking is expected to be in the range [0,100] (which is the case for LOADBALANCER_STRATEGY_LLS),
    // the ranks are bounded. Otherwise (as is the case in LOADBALANCER_STRATEGY_LEAST_MSG, the ranks are simply
    // the total message rate which is in the range [0,Infinity) so they are unbounded. The
    // "boundedness" affects how two ranks are compared to see which one is better
    boolean unboundedRanks = getLoadBalancerPlacementStrategy().equals(LOADBALANCER_STRATEGY_LEAST_MSG);
    long randomBrokerIndex = (candidates.size() > 0) ? (this.brokerRotationCursor % candidates.size()) : 0;

    // find the least loaded & not-idle broker
    for (Map.Entry<Long, ResourceUnit> candidateOwner : candidates.entries()) {
        ResourceUnit candidate = candidateOwner.getValue();
        randomBrokerIndex--;

        // skip broker which is not ranked. this should never happen except in unit test
        if (!resourceUnitRankings.containsKey(candidate)) {
            continue;
        }

        String resourceUnitId = candidate.getResourceId();
        ResourceUnitRanking ranking = resourceUnitRankings.get(candidate);

        // check if this ServiceUnit is already loaded
        if (ranking.isServiceUnitLoaded(serviceUnitId)) {
            ranking.removeLoadedServiceUnit(serviceUnitId, this.getResourceQuota(serviceUnitId));
        }

        // record a random broker
        if (randomBrokerIndex < 0 && randomRU == null) {
            randomRU = candidate;
        }

        // check the available capacity
        double loadPercentage = ranking.getEstimatedLoadPercentage();
        double availablePercentage = Math.max(0, (100 - loadPercentage) / 100);
        long availability = (long) (ranking.estimateMaxCapacity(defaultQuota) * availablePercentage);
        if (availability > maxAvailability) {
            maxAvailability = availability;
            maxAvailableRU = candidate;
        }

        // check the load percentage
        if (ranking.isIdle()) {
            if (idleRU == null) {
                idleRU = candidate;
            }
        } else {
            if (selectedRU == null) {
                selectedRU = candidate;
                selectedRanking = ranking;
                minLoadPercentage = loadPercentage;
            } else {
                if ((unboundedRanks ? ranking.compareMessageRateTo(selectedRanking)
                        : ranking.compareTo(selectedRanking)) < 0) {
                    minLoadPercentage = loadPercentage;
                    selectedRU = candidate;
                    selectedRanking = ranking;
                }
            }
        }
    }

    if ((minLoadPercentage > underloadThreshold && idleRU != null) || selectedRU == null) {
        // assigned to idle broker is the least loaded broker already have optimum load (which means NOT
        // underloaded), or all brokers are idle
        selectedRU = idleRU;
    } else if (minLoadPercentage >= 100.0 && randomRU != null && !unboundedRanks) {
        // all brokers are full, assign to a random one
        selectedRU = randomRU;
    } else if (minLoadPercentage > overloadThreshold && !unboundedRanks) {
        // assign to the broker with maximum available capacity if all brokers are overloaded
        selectedRU = maxAvailableRU;
    }

    // re-calculate load level for selected broker
    if (selectedRU != null) {
        this.brokerRotationCursor = (this.brokerRotationCursor + 1) % 1000000;
        ResourceUnitRanking ranking = resourceUnitRankings.get(selectedRU);
        String loadPercentageDesc = ranking.getEstimatedLoadPercentageString();
        log.info("Assign {} to {} with ({}).", serviceUnitId, selectedRU.getResourceId(), loadPercentageDesc);
        if (!ranking.isServiceUnitPreAllocated(serviceUnitId)) {
            final String namespaceName = LoadManagerShared.getNamespaceNameFromBundleName(serviceUnitId);
            final String bundleRange = LoadManagerShared.getBundleRangeFromBundleName(serviceUnitId);
            ResourceQuota quota = this.getResourceQuota(serviceUnitId);
            // Add preallocated bundle range so incoming bundles from the same namespace are not assigned to the
            // same broker.
            brokerToNamespaceToBundleRange
                    .computeIfAbsent(selectedRU.getResourceId().replace("http://", ""), k -> new HashMap<>())
                    .computeIfAbsent(namespaceName, k -> new HashSet<>()).add(bundleRange);
            ranking.addPreAllocatedServiceUnit(serviceUnitId, quota);
            resourceUnitRankings.put(selectedRU, ranking);
        }
    }
    return selectedRU;
}

From source file:io.mapzone.arena.analytics.graph.OrganisationPersonGraphFunction.java

@Override
public void generate(MdToolkit tk, IProgressMonitor monitor, Graph graph) throws Exception {
    if (!tk.isClosed()) {
        tk.createSnackbar(Appearance.FadeIn, "Analysis started - stay tuned...");
    }/*from  w  w  w.ja v  a 2  s .c o  m*/

    final Map<String, Node> organisations = Maps.newHashMap();
    final Map<String, Node> persons = Maps.newHashMap();
    final Multimap<Node, Node> organisation2Persons = ArrayListMultimap.create();
    final Multimap<Node, Node> person2Organisations = ArrayListMultimap.create();

    // iterate on features
    // create Node for each organisation
    // increase weight for each entry per organisation
    FeatureIterator iterator = featureSource.getFeatures().features();
    int i = 0;
    while (iterator.hasNext() && i < 5000) {
        i++;
        SimpleFeature feature = (SimpleFeature) iterator.next();
        String organisationKey = (String) feature.getAttribute("Organisation");
        Node organisationFeature = organisations.get(organisationKey);
        if (organisationFeature == null) {
            organisationFeature = new Node(Node.Type.virtual, "o:" + feature.getID(), featureSource, feature,
                    organisationKey, 1);
            organisations.put(organisationKey, organisationFeature);
            graph.addOrUpdateNode(organisationFeature);
        } else {
            // add weight
            int size = organisation2Persons.get(organisationFeature).size() + 1;
            if (size <= 15) {
                organisationFeature.increaseWeight();
            }
            graph.addOrUpdateNode(organisationFeature);
        }
        String personKey = (String) feature.getAttribute("Name") + " "
                + (String) feature.getAttribute("Vorname");
        Node personFeature = persons.get(personKey);
        if (personFeature == null) {
            personFeature = new Node(Node.Type.real, "p:" + feature.getID(), featureSource, feature, personKey,
                    1);
            persons.put(personKey, personFeature);
            graph.addOrUpdateNode(personFeature);
        } else {
            int size = person2Organisations.get(personFeature).size() + 1;
            if (size <= 15) {
                personFeature.increaseWeight();
            }
            graph.addOrUpdateNode(personFeature);
        }
        // add also the person to the organisation
        organisation2Persons.put(organisationFeature, personFeature);
        person2Organisations.put(personFeature, organisationFeature);

        graph.addOrUpdateEdge(organisationFeature, personFeature);

        if (i % 100 == 0) {
            log.info("added " + i);
        }
    }
    if (!tk.isClosed()) {
        tk.createSnackbar(Appearance.FadeIn, organisations.size() + " organisations, " + persons.size()
                + " persons and " + organisation2Persons.size() + " relations analysed");
    }
    organisations.clear();
    persons.clear();
    organisation2Persons.clear();
    person2Organisations.clear();
    graph.layout();
}

From source file:org.eclipse.xtend.core.validation.XtendValidator.java

protected <Executable extends IResolvedExecutable> void doCheckDuplicateExecutables(JvmGenericType inferredType,
        List<Executable> declaredOperations, Function<String, List<Executable>> bySignature,
        Set<EObject> flaggedOperations) {
    Set<Executable> processed = Sets.newHashSet();
    for (Executable declaredExecutable : declaredOperations) {
        if (!processed.contains(declaredExecutable)) {
            List<Executable> sameErasure = bySignature.apply(declaredExecutable.getResolvedErasureSignature());
            if (sameErasure.size() > 1) {
                Multimap<String, Executable> perSignature = HashMultimap.create(sameErasure.size(), 2);
                outer: for (Executable executable : sameErasure) {
                    for (LightweightTypeReference parameterType : executable.getResolvedParameterTypes()) {
                        if (parameterType.isUnknown())
                            continue outer;
                    }//from ww  w  .  jav a  2  s.  co m
                    perSignature.put(executable.getResolvedSignature(), executable);
                }
                if (perSignature.size() > 1) {
                    for (Collection<Executable> sameSignature : perSignature.asMap().values()) {
                        for (Executable operationWithSameSignature : sameSignature) {
                            JvmExecutable executable = operationWithSameSignature.getDeclaration();
                            EObject otherSource = associations.getPrimarySourceElement(executable);
                            if (flaggedOperations.add(otherSource)) {
                                if (sameSignature.size() > 1) {
                                    error("Duplicate " + typeLabel(executable) + " "
                                            + operationWithSameSignature.getSimpleSignature() + " in type "
                                            + inferredType.getSimpleName(), otherSource,
                                            nameFeature(otherSource), DUPLICATE_METHOD);
                                } else {
                                    error("The " + typeLabel(executable) + " "
                                            + operationWithSameSignature.getSimpleSignature()
                                            + " has the same erasure "
                                            + operationWithSameSignature.getResolvedErasureSignature()
                                            + " as another " + typeLabel(executable) + " in type "
                                            + inferredType.getSimpleName(), otherSource,
                                            nameFeature(otherSource), DUPLICATE_METHOD);
                                }
                            }
                        }
                    }
                }
            }
        }
    }
}

From source file:com.android.tools.idea.rendering.LayoutlibCallbackImpl.java

/**
 * Searches for cycles in the {@code <include>} tag graph of the layout files we've
 * been asked to provide parsers for/*from w ww.ja  v a  2 s .com*/
 */
private boolean findCycles() {
    Map<File, String> fileToLayout = Maps.newHashMap();
    Map<String, File> layoutToFile = Maps.newHashMap();
    Multimap<String, String> includeMap = ArrayListMultimap.create();
    for (File file : myParserFiles) {
        String layoutName = LintUtils.getLayoutName(file);
        layoutToFile.put(layoutName, file);
        fileToLayout.put(file, layoutName);
        try {
            String xml = Files.toString(file, Charsets.UTF_8);
            Document document = XmlUtils.parseDocumentSilently(xml, true);
            if (document != null) {
                NodeList includeNodeList = document.getElementsByTagName(VIEW_INCLUDE);
                for (int i = 0, n = includeNodeList.getLength(); i < n; i++) {
                    Element include = (Element) includeNodeList.item(i);
                    String included = include.getAttribute(ATTR_LAYOUT);
                    if (included.startsWith(LAYOUT_RESOURCE_PREFIX)) {
                        String resource = included.substring(LAYOUT_RESOURCE_PREFIX.length());
                        includeMap.put(layoutName, resource);
                    }
                }

                // Deals with tools:layout attribute from fragments
                NodeList fragmentNodeList = document.getElementsByTagName(VIEW_FRAGMENT);
                for (int i = 0, n = fragmentNodeList.getLength(); i < n; i++) {
                    Element fragment = (Element) fragmentNodeList.item(i);
                    String included = fragment.getAttributeNS(TOOLS_URI, ATTR_LAYOUT);
                    if (included.startsWith(LAYOUT_RESOURCE_PREFIX)) {
                        String resource = included.substring(LAYOUT_RESOURCE_PREFIX.length());
                        includeMap.put(layoutName, resource);
                    }
                }
            }
        } catch (IOException e) {
            LOG.warn("Could not check file " + file + " for cyclic dependencies", e);
        }
    }

    // We now have a DAG over the include dependencies in the layouts
    // Do a DFS to detect cycles

    // Perform DFS on the include graph and look for a cycle; if we find one, produce
    // a chain of includes on the way back to show to the user
    if (includeMap.size() > 0) {
        for (String from : includeMap.keySet()) {
            Set<String> visiting = Sets.newHashSetWithExpectedSize(includeMap.size());
            List<String> chain = dfs(from, visiting, includeMap);
            if (chain != null) {
                if (myLogger != null) {
                    RenderProblem.Html problem = RenderProblem.create(WARNING);
                    HtmlBuilder builder = problem.getHtmlBuilder();
                    builder.add("Found cyclical <include> chain: ");
                    boolean first = true;
                    Collections.reverse(chain);
                    for (String layout : chain) {
                        if (first) {
                            first = false;
                        } else {
                            builder.add(" includes ");
                        }
                        File file = layoutToFile.get(layout);
                        if (file != null) {
                            try {
                                String url = SdkUtils.fileToUrlString(file);
                                builder.addLink(layout, url);
                            } catch (MalformedURLException e) {
                                builder.add(layout);
                            }
                        } else {
                            builder.add(layout);
                        }
                    }

                    myLogger.addMessage(problem);
                }
                return true;
            }
        }
    }

    return false;
}

From source file:de.hzi.helmholtz.Compare.PathwayComparisonWithModules.java

public Multimap<Double, String> SubsetIdentification(PathwayWithModules firstPathway,
        PathwayWithModules secondPathway, BiMap<Integer, Integer> newSourceGeneIdToPositionMap,
        BiMap<Integer, Integer> newTargetGeneIdToPositionMap, int Yes) {
    Multimap<Double, String> result = TreeMultimap.create(Ordering.natural().reverse(), Ordering.natural());

    Iterator<ModuleGene> sourceGeneIt = firstPathway.moduleGeneIterator();
    int currentQueryGene = 0;
    while (sourceGeneIt.hasNext()) {
        currentQueryGene++;/*  w ww . j a va 2  s . c  o  m*/
        ModuleGene queryGene = sourceGeneIt.next();
        Multimap<Integer, String> resultr = TreeMultimap.create(Ordering.natural(), Ordering.natural());
        int currentTargetGene = 0;
        Multiset<String> qfunction = LinkedHashMultiset.create();
        List<String> qfunctionList = new ArrayList<String>();
        List<String> qactivity = new ArrayList<String>();
        List<Set<String>> qsubstrate = new ArrayList<Set<String>>();
        for (Module m : queryGene.getModule()) {
            for (Domain d : m.getDomains()) {
                qfunction.add(d.getDomainFunctionString());
                qfunctionList.add(d.getDomainFunctionString());
                qactivity.add(d.getStatus().toString());
                qsubstrate.add(d.getSubstrates());
            }
        }
        List<String> TargenesSelected = new ArrayList<String>();
        Iterator<ModuleGene> targetGeneIt = secondPathway.moduleGeneIterator();
        while (targetGeneIt.hasNext()) {
            currentTargetGene++;
            ModuleGene targetGene = targetGeneIt.next();
            Multiset<String> tfunction = LinkedHashMultiset.create();
            List<String> tactivity = new ArrayList<String>();
            List<Set<String>> tsubstrate = new ArrayList<Set<String>>();
            List<String> tfunctionList = new ArrayList<String>();
            Iterator<Module> mIter = targetGene.moduleIterator();
            while (mIter.hasNext()) {
                Module m = mIter.next();
                Iterator<Domain> dIter = m.domainIterator();
                while (dIter.hasNext()) {
                    Domain d = dIter.next();
                    tfunction.add(d.getDomainFunctionString());
                    tfunctionList.add(d.getDomainFunctionString());
                    tactivity.add(d.getStatus().toString());
                    tsubstrate.add(d.getSubstrates());
                }
            }
            Multiset<String> DomainsCovered = Multisets.intersection(qfunction, tfunction);
            int Differences = Math.max(Math.abs(DomainsCovered.size() - tfunction.size()),
                    Math.abs(DomainsCovered.size() - qfunction.size()));
            if (DomainsCovered.size() == tfunction.size() && tfunction.size() > 4) {
                TargenesSelected.add(Integer.toString(currentTargetGene));
            } else {
                resultr.put(Differences, Integer.toString(currentTargetGene));
            }

        }
        int count = 0;
        if (resultr.size() > 0) {
            while (TargenesSelected.size() < 2) {
                Multiset<String> k = LinkedHashMultiset.create(resultr.values());
                Multiset<String> t = LinkedHashMultiset.create(TargenesSelected);
                Multiset<String> Covered = Multisets.intersection(k, t);
                if (Covered.size() == k.size()) {
                    break;
                }

                try {
                    TargenesSelected.addAll(
                            resultr.get(Integer.parseInt(resultr.keySet().toArray()[count].toString())));
                } catch (Exception ds) {
                }
                count = count + 1;
            }
        }
        // //System.out.println(TargenesSelected);
        //  Permutation perm = new Permutation();
        //  List<String> perms = perm.run(TargenesSelected);
        CombinationGenerator c = new CombinationGenerator(10, 10);
        List<String> perms = c.GenerateAllPossibleCombinations(TargenesSelected);
        myFunction sim = new myFunction();
        double score = 0;
        String targetIdentified = "";
        List<ModuleGene> targetGenesList = secondPathway.getModulegenes();
        for (String permu : perms) {
            String[] values = permu.replace("[", "").replace("]", "").split(",");
            List<String> mergedTargetgenes = new ArrayList<String>();
            List<Integer> ToRemove = new ArrayList<Integer>();
            List<String> tactivity = new ArrayList<String>();
            List<Set<String>> tsubstrate = new ArrayList<Set<String>>();
            for (String j : values) {
                ToRemove.add(Integer.parseInt(j.trim()));
                for (Module m : targetGenesList.get(Integer.parseInt(j.trim()) - 1).getModule()) {
                    for (Domain i : m.getDomains()) {
                        mergedTargetgenes.add(i.getDomainFunctionString());
                        tactivity.add(i.getStatus().toString());
                        tsubstrate.add(i.getSubstrates());
                    }
                }
            }
            Multimap<Double, Multimap<String, Integer>> FunctionScores = sim.calculate(qfunctionList,
                    mergedTargetgenes);
            Multimap<Double, Multimap<String, Integer>> activityscores = myFunction.calculate(qactivity,
                    tactivity);
            Multimap<Double, Multimap<String, Integer>> substratescores = myFunction
                    .calculate(getSubstrateList(qsubstrate), getSubstrateList(tsubstrate));
            Object FunctionScore = FunctionScores.asMap().keySet().toArray()[0];
            Object activityScore = activityscores.asMap().keySet().toArray()[0];
            Object substrateScore = substratescores.asMap().keySet().toArray()[0];

            double finalScore = Math
                    .round((((2.9 * Double.parseDouble(FunctionScore.toString().trim()))
                            + (0.05 * Double.parseDouble(activityScore.toString().trim()))
                            + (0.05 * Double.parseDouble(substrateScore.toString().trim()))) / 3) * 100.0)
                    / 100.0;
            targetIdentified = permu.replace(",", "+");
            String ConvertedGeneIDs = "";
            if (Yes == 0) {
                ConvertedGeneIDs = reconstructWithGeneId(Integer.toString(currentQueryGene),
                        newSourceGeneIdToPositionMap) + "->"
                        + reconstructWithGeneId(targetIdentified.replace("[", "").replace("]", ""),
                                newTargetGeneIdToPositionMap);
            } else {
                ConvertedGeneIDs = reconstructWithGeneId(targetIdentified.replace("[", "").replace("]", ""),
                        newTargetGeneIdToPositionMap) + "->"
                        + reconstructWithGeneId(Integer.toString(currentQueryGene),
                                newSourceGeneIdToPositionMap);
            }
            // String ConvertedGeneIDs = reconstructWithGeneId(Integer.toString(currentQueryGene), newSourceGeneIdToPositionMap) + "->" + reconstructWithGeneId(targetIdentified.replace("[", "").replace("]", ""), newTargetGeneIdToPositionMap);

            result.put(finalScore, ConvertedGeneIDs);

            ScoreFunctionMatchMisMatch.putAll(ConvertedGeneIDs, FunctionScores.values());
            ScoreStatusMatchMisMatch.putAll(ConvertedGeneIDs, activityscores.values());
            ScoreSubstrateMatchMisMatch.putAll(ConvertedGeneIDs, substratescores.values());

        }

    }
    return result;
}

From source file:de.hzi.helmholtz.Compare.PathwayComparisonUsingModules.java

public Multimap<Double, String> SubsetIdentification(PathwayUsingModules firstPathway,
        PathwayUsingModules secondPathway, BiMap<String, Integer> newSourceGeneIdToPositionMap,
        BiMap<String, Integer> newTargetGeneIdToPositionMap, int Yes) {
    Multimap<Double, String> result = TreeMultimap.create(Ordering.natural().reverse(), Ordering.natural());

    Iterator<Module> sourceGeneIt = firstPathway.geneIterator();
    int currentQueryGene = 0;
    while (sourceGeneIt.hasNext()) {
        currentQueryGene++;/*  w  w w  . j a va2s . c  om*/
        Module queryGene = sourceGeneIt.next();
        Multimap<Integer, String> resultr = TreeMultimap.create(Ordering.natural(), Ordering.natural());
        int currentTargetGene = 0;
        Multiset<String> qfunction = LinkedHashMultiset.create();
        List<String> qfunctionList = new ArrayList<String>();
        List<String> qactivity = new ArrayList<String>();
        List<Set<String>> qsubstrate = new ArrayList<Set<String>>();
        for (Domain d : queryGene.getDomains()) {
            qfunction.add(d.getDomainFunctionString());
            qfunctionList.add(d.getDomainFunctionString());
            qactivity.add(d.getStatus().toString());
            qsubstrate.add(d.getSubstrates());
        }
        List<String> TargenesSelected = new ArrayList<String>();
        Iterator<Module> targetGeneIt = secondPathway.geneIterator();
        while (targetGeneIt.hasNext()) {
            currentTargetGene++;
            Module targetGene = targetGeneIt.next();
            Multiset<String> tfunction = LinkedHashMultiset.create();
            List<String> tactivity = new ArrayList<String>();
            List<Set<String>> tsubstrate = new ArrayList<Set<String>>();
            List<String> tfunctionList = new ArrayList<String>();
            Iterator<Domain> dIter = targetGene.domainIterator();
            while (dIter.hasNext()) {
                Domain d = dIter.next();
                tfunction.add(d.getDomainFunctionString());
                tfunctionList.add(d.getDomainFunctionString());
                tactivity.add(d.getStatus().toString());
                tsubstrate.add(d.getSubstrates());
            }
            Multiset<String> DomainsCovered = Multisets.intersection(qfunction, tfunction);
            int Differences = Math.max(Math.abs(DomainsCovered.size() - tfunction.size()),
                    Math.abs(DomainsCovered.size() - qfunction.size()));
            if (DomainsCovered.size() == tfunction.size() && tfunction.size() > 4) {
                TargenesSelected.add(Integer.toString(currentTargetGene));
            } else {
                resultr.put(Differences, Integer.toString(currentTargetGene));
            }

        }
        int count = 0;
        if (resultr.size() > 0) {
            int tsize = 0;
            if ((firstPathway.size() > 8 && firstPathway.size() < 10)
                    || (secondPathway.size() > 8 && secondPathway.size() < 10)) {
                tsize = 2;
            } else if ((firstPathway.size() > 2 && firstPathway.size() < 8)
                    && (secondPathway.size() > 2 && secondPathway.size() < 8)) {
                tsize = 4;
            } else {
                tsize = 1;
            }
            while (TargenesSelected.size() < tsize) {
                Multiset<String> k = LinkedHashMultiset.create(resultr.values());
                Multiset<String> t = LinkedHashMultiset.create(TargenesSelected);
                Multiset<String> Covered = Multisets.intersection(k, t);
                if (Covered.size() == k.size()) {
                    break;
                }

                try {
                    TargenesSelected.addAll(
                            resultr.get(Integer.parseInt(resultr.keySet().toArray()[count].toString())));
                } catch (Exception ds) {
                }
                count = count + 1;
            }
        }
        // ////System.out.println(TargenesSelected);
        //  Permutation perm = new Permutation();
        //  List<String> perms = perm.run(TargenesSelected);
        CombinationGenerator c = new CombinationGenerator(10, 10);
        List<String> perms = c.GenerateAllPossibleCombinations(TargenesSelected);
        myFunction sim = new myFunction();
        double score = 0;
        String targetIdentified = "";
        List<Module> targetGenesList = secondPathway.getModules();
        for (String permu : perms) {
            String[] values = permu.replace("[", "").replace("]", "").split(",");
            List<String> mergedTargetgenes = new ArrayList<String>();
            List<Integer> ToRemove = new ArrayList<Integer>();
            List<String> tactivity = new ArrayList<String>();
            List<Set<String>> tsubstrate = new ArrayList<Set<String>>();
            for (String j : values) {
                ToRemove.add(Integer.parseInt(j.trim()));
                for (Domain i : targetGenesList.get(Integer.parseInt(j.trim()) - 1).getDomains()) {

                    mergedTargetgenes.add(i.getDomainFunctionString());
                    tactivity.add(i.getStatus().toString());
                    tsubstrate.add(i.getSubstrates());
                }
            }
            Multimap<Double, Multimap<String, Integer>> FunctionScores = sim.calculate(qfunctionList,
                    mergedTargetgenes);
            Multimap<Double, Multimap<String, Integer>> activityscores = myFunction.calculate(qactivity,
                    tactivity);
            Multimap<Double, Multimap<String, Integer>> substratescores = myFunction
                    .calculate(getSubstrateList(qsubstrate), getSubstrateList(tsubstrate));
            Object FunctionScore = FunctionScores.asMap().keySet().toArray()[0];
            Object activityScore = activityscores.asMap().keySet().toArray()[0];
            Object substrateScore = substratescores.asMap().keySet().toArray()[0];

            double finalScore = Math
                    .round((((2.9 * Double.parseDouble(FunctionScore.toString().trim()))
                            + (0.05 * Double.parseDouble(activityScore.toString().trim()))
                            + (0.05 * Double.parseDouble(substrateScore.toString().trim()))) / 3) * 100.0)
                    / 100.0;
            targetIdentified = permu.replace(",", "+");
            String ConvertedGeneIDs = "";
            if (Yes == 0) {
                ConvertedGeneIDs = reconstructWithGeneId(Integer.toString(currentQueryGene),
                        newSourceGeneIdToPositionMap) + "->"
                        + reconstructWithGeneId(targetIdentified.replace("[", "").replace("]", ""),
                                newTargetGeneIdToPositionMap);
            } else {
                ConvertedGeneIDs = reconstructWithGeneId(targetIdentified.replace("[", "").replace("]", ""),
                        newTargetGeneIdToPositionMap) + "->"
                        + reconstructWithGeneId(Integer.toString(currentQueryGene),
                                newSourceGeneIdToPositionMap);
            }
            // String ConvertedGeneIDs = reconstructWithGeneId(Integer.toString(currentQueryGene), newSourceGeneIdToPositionMap) + "->" + reconstructWithGeneId(targetIdentified.replace("[", "").replace("]", ""), newTargetGeneIdToPositionMap);

            result.put(finalScore, ConvertedGeneIDs);

            ScoreFunctionMatchMisMatch.putAll(ConvertedGeneIDs, FunctionScores.values());
            ScoreStatusMatchMisMatch.putAll(ConvertedGeneIDs, activityscores.values());
            ScoreSubstrateMatchMisMatch.putAll(ConvertedGeneIDs, substratescores.values());

        }

    }
    return result;
}

From source file:com.flexive.core.storage.genericSQL.GenericTreeStorageSpreaded.java

/**
 * {@inheritDoc}// w w  w. j  av a  2s  . c o m
 */
@Override
public void checkTree(Connection con, FxTreeMode mode) throws FxApplicationException {
    PreparedStatement stmt = null;
    try {
        // 1 - ID, 2 - LFT, 3 - RGT, 4 - CHILDCOUNT, 5 - DEPTH, 6 - PARENT
        final String sql = "SELECT t.id, t.LFT, t.RGT, t.CHILDCOUNT, t.DEPTH, t.PARENT " + "FROM "
                + getTable(mode) + " t";
        stmt = con.prepareStatement(sql);
        stmt.setFetchSize(10000);
        final ResultSet rs = stmt.executeQuery();

        // collect nodes, build lookup tables
        final Map<Long, CheckedNodeInfo> nodeMap = Maps.newHashMap(); // node ID -> node info
        final Multimap<Long, CheckedNodeInfo> childMap = HashMultimap.create(); // node ID -> children
        final Multimap<BigInteger, CheckedNodeInfo> leftNodeInfos = HashMultimap.create(1000, 1);
        final Multimap<BigInteger, CheckedNodeInfo> rightNodeInfos = HashMultimap.create(1000, 1);
        while (rs.next()) {
            final CheckedNodeInfo info = new CheckedNodeInfo(rs.getLong(1), rs.getLong(6), getNodeBounds(rs, 2),
                    getNodeBounds(rs, 3), rs.getInt(4), rs.getInt(5));
            nodeMap.put(info.id, info);
            childMap.put(info.parentId, info);
            leftNodeInfos.put(info.left, info);
            rightNodeInfos.put(info.right, info);
        }

        // process all nodes
        for (CheckedNodeInfo node : nodeMap.values()) {

            // check node boundaries
            if (node.left.compareTo(node.right) > 0) {
                throw new FxTreeException(LOG, "ex.tree.check.failed", mode,
                        "#" + node.id + ": left boundary greater than right.");
            }

            // check node bounds of children
            BigInteger min = MAX_RIGHT;
            BigInteger max = BigInteger.ZERO;
            final Collection<CheckedNodeInfo> children = childMap.get(node.id);
            for (CheckedNodeInfo child : children) {
                if (child.left.compareTo(min) < 0) {
                    min = child.left;
                }
                if (child.right.compareTo(max) > 0) {
                    max = child.right;
                }
            }
            if (max.compareTo(node.right) > 0) {
                throw new FxTreeException(LOG, "ex.tree.check.failed", mode,
                        "#" + node.id + " out of bounds (right)");
            }
            if (min.compareTo(node.left) < 0) {
                throw new FxTreeException(LOG, "ex.tree.check.failed", mode,
                        "#" + node.id + " out of bounds (left)");
            }

            // Check stored child count
            if (node.directChildCount != children.size()) {
                throw new FxTreeException(LOG, "ex.tree.check.failed", mode,
                        "#" + node.id + " invalid direct child count [" + node.directChildCount + "!="
                                + children.size() + "]");
            }

            // Check depth
            if (node.id != FxTreeNode.ROOT_NODE && node.depth != nodeMap.get(node.parentId).depth + 1) {
                throw new FxTreeException(LOG, "ex.tree.check.failed", mode, "#" + node.id + " invalid depth: "
                        + node.depth + ", parent depth=" + nodeMap.get(node.parentId).depth);
            }
        }

        checkUniqueBounds(mode, leftNodeInfos, "left");
        checkUniqueBounds(mode, rightNodeInfos, "right");

        if (LOG.isDebugEnabled())
            LOG.debug(
                    "Successfully checked [" + childMap.size() + "] tree nodes in mode [" + mode.name() + "]!");
    } catch (SQLException e) {
        throw new FxTreeException(LOG, e, "ex.tree.check.failed", mode, e.getMessage());
    } finally {
        Database.closeObjects(GenericTreeStorageSpreaded.class, stmt);
    }
}