Example usage for com.google.common.collect Multimap asMap

List of usage examples for com.google.common.collect Multimap asMap

Introduction

In this page you can find the example usage for com.google.common.collect Multimap asMap.

Prototype

Map<K, Collection<V>> asMap();

Source Link

Document

Returns a view of this multimap as a Map from each distinct key to the nonempty collection of that key's associated values.

Usage

From source file:org.apache.giraph.comm.netty.NettyWorkerServer.java

/**
 * Resolve mutation requests./*from w w  w .  j av a 2 s  .  co  m*/
 *
 * @param graphState Graph state
 */
private void resolveMutations(GraphState<I, V, E, M> graphState) {
    Multimap<Integer, I> resolveVertexIndices = HashMultimap
            .create(service.getPartitionStore().getNumPartitions(), 100);
    // Add any mutated vertex indices to be resolved
    for (Entry<I, VertexMutations<I, V, E, M>> e : serverData.getVertexMutations().entrySet()) {
        I vertexId = e.getKey();
        Integer partitionId = service.getPartitionId(vertexId);
        if (!resolveVertexIndices.put(partitionId, vertexId)) {
            throw new IllegalStateException(
                    "resolveMutations: Already has missing vertex on this " + "worker for " + vertexId);
        }
    }
    // Keep track of the vertices which are not here but have received messages
    for (Integer partitionId : service.getPartitionStore().getPartitionIds()) {
        Iterable<I> destinations = serverData.getCurrentMessageStore()
                .getPartitionDestinationVertices(partitionId);
        if (!Iterables.isEmpty(destinations)) {
            Partition<I, V, E, M> partition = service.getPartitionStore().getPartition(partitionId);
            for (I vertexId : destinations) {
                if (partition.getVertex(vertexId) == null) {
                    if (!resolveVertexIndices.put(partitionId, vertexId)) {
                        throw new IllegalStateException("resolveMutations: Already has missing vertex on this "
                                + "worker for " + vertexId);
                    }
                }
            }
            service.getPartitionStore().putPartition(partition);
        }
    }
    // Resolve all graph mutations
    VertexResolver<I, V, E, M> vertexResolver = conf.createVertexResolver(graphState);
    for (Entry<Integer, Collection<I>> e : resolveVertexIndices.asMap().entrySet()) {
        Partition<I, V, E, M> partition = service.getPartitionStore().getPartition(e.getKey());
        for (I vertexIndex : e.getValue()) {
            Vertex<I, V, E, M> originalVertex = partition.getVertex(vertexIndex);

            VertexMutations<I, V, E, M> mutations = null;
            VertexMutations<I, V, E, M> vertexMutations = serverData.getVertexMutations().get(vertexIndex);
            if (vertexMutations != null) {
                synchronized (vertexMutations) {
                    mutations = vertexMutations.copy();
                }
                serverData.getVertexMutations().remove(vertexIndex);
            }
            Vertex<I, V, E, M> vertex = vertexResolver.resolve(vertexIndex, originalVertex, mutations,
                    serverData.getCurrentMessageStore().hasMessagesForVertex(vertexIndex));
            graphState.getContext().progress();

            if (LOG.isDebugEnabled()) {
                LOG.debug("resolveMutations: Resolved vertex index " + vertexIndex + " with original vertex "
                        + originalVertex + ", returned vertex " + vertex + " on superstep "
                        + service.getSuperstep() + " with mutations " + mutations);
            }
            if (vertex != null) {
                partition.putVertex(vertex);
            } else if (originalVertex != null) {
                partition.removeVertex(originalVertex.getId());
            }
        }
        service.getPartitionStore().putPartition(partition);
    }
    if (!serverData.getVertexMutations().isEmpty()) {
        throw new IllegalStateException("resolveMutations: Illegally " + "still has "
                + serverData.getVertexMutations().size() + " mutations left.");
    }
}

From source file:com.puppetlabs.geppetto.validation.impl.DirectoryValidatorImpl.java

private Multimap<ModuleName, MetadataInfo> collectModuleData(List<Entry<File, Resource>> modulesToValidate,
        final SubMonitor ticker) {
    Set<URI> resourcesOnPath = Sets.newHashSet();
    Set<File> filesOnPath = Sets.newHashSet();
    final PPSearchPath searchPath = ppRunner.getDefaultSearchPath();
    for (File f : metadataFiles) {
        f = f.getAbsoluteFile();/*from  ww w.ja  va  2  s. co m*/
        String absPath = f.getPath();
        if (isOnPath(pathToFile(absPath), searchPath)) {
            filesOnPath.add(f);
            if (Forge.MODULEFILE_NAME.equals(f.getName()))
                absPath = new File(f.getParentFile(), Forge.METADATA_JSON_NAME).getPath();
            resourcesOnPath.add(URI.createFileURI(absPath));
        }
    }

    // Ensure that all modules visible on the path are loaded but remember those that
    // were found using the searchPath (as opposed to the ones found at the root)
    Set<File> nonRootsOnPath = Sets.newHashSet();
    List<File> resolvedPath = searchPath.getResolvedPath();
    for (File f : resolvedPath) {
        File dir = null;
        if ("*".equals(f.getName()))
            dir = f.getParentFile();
        else if (f.isDirectory())
            dir = f;

        if (dir != null) {
            File metadata = new File(dir, Forge.METADATA_JSON_NAME);
            if (!metadata.isFile()) {
                metadata = new File(dir, Forge.MODULEFILE_NAME);
                if (!metadata.isFile())
                    continue;
            }
            f = metadata;
        } else {
            String n = f.getName();
            if (!((Forge.METADATA_JSON_NAME.equals(n) || Forge.MODULEFILE_NAME.equals(n)) && f.isFile()))
                continue;
        }

        f = f.getAbsoluteFile();
        if (filesOnPath.add(f)) {
            nonRootsOnPath.add(f);
            resourcesOnPath.add(URI.createFileURI(Forge.MODULEFILE_NAME.equals(f.getName())
                    ? new File(f.getParentFile(), Forge.METADATA_JSON_NAME).getPath()
                    : f.getPath()));
        }
    }

    Map<File, Resource> mdResources = loadModuleResources(filesOnPath, ticker);
    if (options.isCheckModuleSemantics())
        for (Entry<File, Resource> r : mdResources.entrySet()) {
            File f = r.getKey();
            if (!nonRootsOnPath.contains(f) && options.isValidationCandidate(f))
                modulesToValidate.add(r);
        }

    final IPath nodeRootPath = new Path(root.getAbsolutePath()).append(NAME_OF_DIR_WITH_RESTRICTED_SCOPE);
    final Multimap<ModuleName, MetadataInfo> moduleData = ArrayListMultimap.create();
    ModuleUtil mdUtil = ppRunner.getModuleUtil();
    for (File f : filesOnPath) {
        // load and remember all that loaded ok

        Resource mdResource = mdResources.get(f);
        if (mdResource == null)
            continue;

        EList<EObject> contents = mdResource.getContents();
        if (contents.isEmpty())
            continue;

        EObject first = contents.get(0);
        if (first instanceof JsonMetadata) {
            JsonMetadata m = (JsonMetadata) first;
            ModuleName moduleName = mdUtil.createModuleName(mdUtil.getName(m));
            if (moduleName != null)
                // remember the metadata and where it came from
                // and if it represents a NODE as opposed to a regular MODULE
                moduleData.put(moduleName, new MetadataInfo(mdUtil.getApiMetadata(m), f,
                        nodeRootPath.isPrefixOf(new Path(f.getAbsolutePath()))));
        }
    }

    if (options.isCheckModuleSemantics())
        for (Map.Entry<ModuleName, Collection<MetadataInfo>> entry : moduleData.asMap().entrySet()) {
            Collection<MetadataInfo> mis = entry.getValue();
            boolean redefined = mis.size() > 1;
            for (MetadataInfo mi : mis) {
                for (Dependency dep : mi.getMetadata().getDependencies()) {
                    boolean resolved = false;
                    Collection<MetadataInfo> targetMis = moduleData.get(dep.getName());
                    for (MetadataInfo targetMi : targetMis) {
                        VersionRange vr = dep.getVersionRequirement();
                        if (vr == null || vr.isIncluded(targetMi.getMetadata().getVersion())) {
                            mi.addResolvedDependency(dep, targetMi);
                            resolved = true;
                        }
                    }
                    if (!resolved)
                        mi.addUnresolvedDependency(dep);
                }
                if (redefined)
                    addFileDiagnostic(options.getModuleValidationAdvisor().getModuleRedefinition(),
                            ValidationService.MODULE, mi.getFile(),
                            "Redefinition - equally named module already exists",
                            ModuleDiagnostics.ISSUE__MODULE_REDEFINITION);
            }
        }

    return moduleData;
}

From source file:org.vclipse.vcml2idoc.transformation.VCML2IDocSwitch.java

/**
 * @param material/*from w ww.  j  ava2s  .  com*/
 * @return
 */
private List<IDoc> getIDocsCLFMAS(final Material material) {
    final List<IDoc> result = new ArrayList<IDoc>();
    if (generateIDocsFor(IVCML2IDocPreferences.CLFMAS)) {
        // group classes by type
        final Multimap<Integer, Classification> classesByType = ArrayListMultimap.create();
        for (final Classification classification : material.getClassifications()) {
            Class cls = classification.getCls();
            classesByType.put(VcmlUtils.getClassType(toUpperCase(cls.getName())), classification);
        }
        // create CLFMAS IDoc for each class
        for (final Entry<Integer, Collection<Classification>> entry : classesByType.asMap().entrySet()) {
            final int classType = entry.getKey();
            final IDoc iDoc = createIDocRootSegment("CLFMAS02", "CLFMAS");
            // Master Object Classification
            final Segment segmentE1OCLFM = addChildSegment(iDoc, "E1OCLFM");
            setValue(segmentE1OCLFM, "MSGFN", "004");
            setValue(segmentE1OCLFM, "OBTAB", "MARA");
            setValue(segmentE1OCLFM, "OBJEK", toUpperCase(material.getName()));
            setValue(segmentE1OCLFM, "KLART", classType);
            setValue(segmentE1OCLFM, "MAFID", "O");
            setValue(segmentE1OCLFM, "OBJECT_TABLE", "MARA");
            addSegmentE1DATEM(segmentE1OCLFM, material);
            // Distribution Classification: Object Class Assignment
            for (final Classification classification : entry.getValue()) {
                Class cls = classification.getCls();
                final Segment segmentE1KSSKM = addChildSegment(segmentE1OCLFM, "E1KSSKM");
                setValue(segmentE1KSSKM, "MSGFN", "004");
                setValue(segmentE1KSSKM, "CLASS", VcmlUtils.getClassName(toUpperCase(cls.getName())));
                setValue(segmentE1KSSKM, "DATUV", "00000000");
                setValue(segmentE1KSSKM, "STATU", VcmlUtils.createIntFromStatus(cls.getStatus())); // TODO is status neccessary here?
            }
            // Distribution of classification: assigned char. values
            for (final Classification classification : entry.getValue()) {
                for (final ValueAssignment va : classification.getValueAssignments()) {
                    for (final Literal literal : va.getValues()) {
                        final Segment segmentE1AUSPM = addChildSegment(segmentE1OCLFM, "E1AUSPM");
                        setValue(segmentE1AUSPM, "MSGFN", "004");
                        Characteristic characteristic = va.getCharacteristic();
                        setValue(segmentE1AUSPM, "ATNAM", characteristic.getName());
                        // AENNR: Change number
                        if (literal instanceof NumericLiteral) {
                            BigDecimal value = new BigDecimal(((NumericLiteral) literal).getValue());
                            setValue(segmentE1AUSPM, "ATFLV", value.toString());
                            setValue(segmentE1AUSPM, "ATFLB", value.toString());
                        } else if (literal instanceof SymbolicLiteral) {
                            setValue(segmentE1AUSPM, "ATWRT", ((SymbolicLiteral) literal).getValue());
                        } else {
                            throw new IllegalArgumentException(literal.toString());
                        }
                        setValue(segmentE1AUSPM, "ATCOD", 1);
                        setValue(segmentE1AUSPM, "ATTLV", 0);
                        setValue(segmentE1AUSPM, "ATTLB", 0);
                        setValue(segmentE1AUSPM, "ATINC", 0);
                    }
                }
            }
            addSegmentE1UPSLINK(iDoc, "MARA", VcmlUtils.DEFAULT_VALIDITY_START);
            addSegmentE1UPSITM(
                    iDoc, "CLFMAS", "CLF", String.format("%1$-30s%2$-30s%3$24s", "MARA",
                            toUpperCase(material.getName()), classType + "*"),
                    HIELEV_CLFMAS, inslev_CLFMAS++, 1);
            result.add(iDoc);
        }
    }
    return result;
}

From source file:org.elasticsearch.cassandra.cluster.routing.LocalFirstSearchStrategy.java

@Override
public AbstractSearchStrategy.Result topology(String ksName, Collection<InetAddress> staredShard) {
    Keyspace.open(ksName);//from www  .  j  a  v a  2 s  .c o  m

    Set<InetAddress> liveNodes = Gossiper.instance.getLiveTokenOwners();
    InetAddress localAddress = FBUtilities.getBroadcastAddress();
    Map<Range<Token>, List<InetAddress>> allRanges = StorageService.instance
            .getRangeToAddressMapInLocalDC(ksName);

    Multimap<InetAddress, Range<Token>> topo = ArrayListMultimap.create();
    boolean consistent = true;

    Collection<Range<Token>> localRanges = new ArrayList<Range<Token>>();
    for (Entry<Range<Token>, List<InetAddress>> entry : allRanges.entrySet()) {
        List<InetAddress> addrList = entry.getValue();
        if (addrList.contains(localAddress)) {
            localRanges.add(entry.getKey());
            entry.getValue().remove(localAddress);
        }
    }
    logger.debug("{} localRanges for keyspace {} on address {} = {}", localRanges.size(), ksName,
            FBUtilities.getBroadcastAddress(), localRanges);

    topo.putAll(localAddress, localRanges);

    // remove localRanges from allRanges.
    for (Range<Token> range : localRanges) {
        allRanges.remove(range);
    }

    // remove dead nodes form allRanges values.
    for (Entry<Range<Token>, List<InetAddress>> entry : allRanges.entrySet()) {
        List<InetAddress> addrList = entry.getValue();
        for (Iterator<InetAddress> i = addrList.iterator(); i.hasNext();) {
            InetAddress addr = i.next();
            if (!liveNodes.contains(addr)) {
                i.remove();
            }
        }
        if (addrList.size() == 0) {
            consistent = false;
            logger.warn("Inconsistent search for keyspace {}, no alive node for range {}", ksName,
                    entry.getKey());
        }
    }

    // pickup a random address for non-local ranges
    Random rnd = new Random();
    for (Entry<Range<Token>, List<InetAddress>> entry : allRanges.entrySet()) {
        List<InetAddress> addrList = entry.getValue();
        InetAddress addr = addrList.get(rnd.nextInt(addrList.size()));
        topo.put(addr, entry.getKey());
    }
    if (logger.isDebugEnabled()) {
        logger.debug("topology for keyspace {} = {}", ksName, topo.asMap());
    }
    return null;
    // return new AbstractSearchStrategy.Result(topo.asMap(), consistent,
    // Gossiper.instance.getUnreachableTokenOwners().size());
}

From source file:uk.ac.manchester.cs.owl.owlapi.OWLOntologyManagerImpl.java

protected void fixIllegalPunnings(OWLOntology o) {
    Collection<IRI> illegals = o.determineIllegalPunnings(true);
    Multimap<IRI, OWLDeclarationAxiom> illegalDeclarations = HashMultimap.create();
    o.axioms(AxiomType.DECLARATION, Imports.INCLUDED).filter(d -> illegals.contains(d.getEntity().getIRI()))
            .forEach(d -> illegalDeclarations.put(d.getEntity().getIRI(), d));
    Map<OWLEntity, OWLEntity> replacementMap = new HashMap<>();
    for (Map.Entry<IRI, Collection<OWLDeclarationAxiom>> e : illegalDeclarations.asMap().entrySet()) {
        if (e.getValue().size() == 1) {
            // One declaration only: illegal punning comes from use or from
            // defaulting of types
            OWLDeclarationAxiom correctDeclaration = e.getValue().iterator().next();
            // currently we only know how to fix the incorrect defaulting of
            // properties to annotation properties
            OWLEntity entity = correctDeclaration.getEntity();
            if (entity.isOWLDataProperty() || entity.isOWLObjectProperty()) {
                OWLAnnotationProperty wrongProperty = dataFactory.getOWLAnnotationProperty(entity.getIRI());
                replacementMap.put(wrongProperty, entity);
            }//w  ww  .  ja  v  a2  s  .c  o m
        } else {
            // Multiple declarations: bad data. Cannot be repaired automatically.
            String errorMessage = "Illegal redeclarations of entities: reuse of entity {} in punning not allowed {}";
            LOGGER.warn(errorMessage, e.getKey(), e.getValue());
        }
    }
    OWLAnnotationPropertyTransformer changer = new OWLAnnotationPropertyTransformer(replacementMap,
            dataFactory);
    List<OWLAxiomChange> list = new ArrayList<>();
    o.importsClosure().forEach(ont -> {
        for (OWLEntity e : replacementMap.keySet()) {
            // all axioms referring the annotation property
            // must be rebuilt.
            ont.referencingAxioms(e).forEach(ax -> {
                list.add(new RemoveAxiom(ont, ax));
                list.add(new AddAxiom(ont, changer.transformObject(ax)));
            });
        }
    });
    o.getOWLOntologyManager().applyChanges(list);
}

From source file:edu.byu.nlp.crowdsourcing.measurements.classification.ClassificationMeasurementModelExpectations.java

public void initialize(Dataset dataset, Map<String, Integer> instanceIndices, double[][] logNuY) {

    Counter<String> expectationTypes = new HashCounter<String>();
    if (measurementsForAnnotator == null) {

        // multimaps
        Multimap<Integer, MeasurementExpectation<Integer>> perDocIndex = ArrayListMultimap.create();
        Multimap<Integer, MeasurementExpectation<Integer>> perAnnotator = ArrayListMultimap.create();
        Multimap<Pair<Integer, Integer>, MeasurementExpectation<Integer>> perAnnotatorAndDocIndex = ArrayListMultimap
                .create();/*from  ww w.j a  va 2 s.  c o m*/
        Multimap<Triple<Integer, Integer, Integer>, MeasurementExpectation<Integer>> perAnnotatorDocIndexAndLabel = ArrayListMultimap
                .create();
        measurementLookup = Maps.newHashMap();

        // initialize each measurement expectation with the data (and index it for easy lookup)
        for (Measurement measurement : dataset.getMeasurements()) {
            int label = ((ClassificationMeasurement) measurement).getLabel();
            MeasurementExpectation<Integer> expectation = ClassificationMeasurementExpectations
                    .fromMeasurement(measurement, dataset, instanceIndices, logNuY);
            measurementLookup.put(measurement, expectation);
            if (expectation.getDependentIndices().size() == 0) {
                // ignore measurements that don't apply to any documents
                expectationTypes.incrementCount("Ineffective+" + measurement.getClass().getSimpleName(), 1);
            } else {
                expectationTypes.incrementCount(measurement.getClass().getSimpleName(), 1);
                perAnnotator.put(measurement.getAnnotator(), expectation);
                for (Integer docIndex : expectation.getDependentIndices()) {
                    perDocIndex.put(docIndex, expectation);
                    perAnnotatorAndDocIndex.put(Pair.of(measurement.getAnnotator(), docIndex), expectation);
                    perAnnotatorDocIndexAndLabel.put(Triple.of(measurement.getAnnotator(), docIndex, label),
                            expectation);
                }
            }
        }
        logger.info("Measurement Types: " + expectationTypes);

        measurementsForAnnotator = perAnnotator.asMap();
        measurementsForAnnotatorAndDocIndex = perAnnotatorAndDocIndex.asMap();
        measurementsForAnnotatorDocIndexAndLabel = perAnnotatorDocIndexAndLabel.asMap();
        measurementsForDocIndex = perDocIndex.asMap();
    }

}

From source file:org.eclipse.xtext.serializer.analysis.ContextPDAProvider.java

@Override
public SerializationContextMap<Pda<ISerState, RuleCall>> getContextPDAs(Grammar grammar) {
    Builder<Pda<ISerState, RuleCall>> result = SerializationContextMap.<Pda<ISerState, RuleCall>>builder();
    SerializationContextMap<Pda<ISerState, RuleCall>> grammarPDAs = grammarPdaProvider.getGrammarPDAs(grammar);
    Multimap<Action, SerializerPDA> actionPdas = ArrayListMultimap.create();
    Multimap<Action, ISerializationContext> actionContexts = LinkedHashMultimap.create();
    Map<ParserRule, Integer> indexedRules = indexRules(grammar);
    for (SerializationContextMap.Entry<Pda<ISerState, RuleCall>> e : grammarPDAs.values()) {
        List<ISerializationContext> contexts = e.getContexts();
        Pda<ISerState, RuleCall> pda = e.getValue();
        List<ISerState> actions = Lists.newArrayList();
        for (ISerState state : nfaUtil.collect(pda)) {
            if (GrammarUtil.isAssignedAction(state.getGrammarElement())) {
                actions.add(state);//from w ww . j  av a 2s  . c om
            }
        }
        if (actions.isEmpty()) {
            Pda<ISerState, RuleCall> filtered = filterUnneededUnassignedRuleCalls(pda, indexedRules);
            result.put(contexts, filtered);
        } else {
            try {
                SerializerPDA rulePda = extract(pda.getStop());
                Pda<ISerState, RuleCall> filtered = filterUnneededUnassignedRuleCalls(rulePda, indexedRules);
                result.put(contexts, filtered);
                for (ISerState state : actions) {
                    Action action = (Action) state.getGrammarElement();
                    SerializerPDA actionPda = extract(state);
                    actionPdas.put(action, actionPda);
                    actionContexts.putAll(action, contexts);
                }
            } catch (Exception x) {
                LOG.error("Error extracting PDA for action in context '" + contexts + "': " + x.getMessage(),
                        x);
            }
        }
    }
    for (Map.Entry<Action, Collection<SerializerPDA>> action : actionPdas.asMap().entrySet()) {
        SerializerPDA merged = merge(new ActionContext(null, action.getKey()), action.getValue());
        Set<Set<Parameter>> parameterPermutations = Sets.newLinkedHashSet();
        for (ISerializationContext container : actionContexts.get(action.getKey())) {
            parameterPermutations.add(container.getEnabledBooleanParameters());
        }
        // for (IContext container : actionContexts.get(action.getKey())) {
        for (Set<Parameter> parameters : parameterPermutations) {
            ISerializationContext context = new ActionContext( /* container */ null, action.getKey());
            if (!parameters.isEmpty())
                context = new SerializationContext.ParameterValueContext(context, parameters);
            Pda<ISerState, RuleCall> filtered = filterUnneededUnassignedRuleCalls(merged, indexedRules);
            result.put(context, filtered);
        }
        // }
    }
    return result.create();
}

From source file:com.cinchapi.concourse.server.storage.db.SearchRecord.java

/**
 * Return the Set of primary keys for records that match {@code query}.
 * /*from   w  w  w. jav a  2  s  . co m*/
 * @param query
 * @return the Set of PrimaryKeys
 */
public Set<PrimaryKey> search(Text query) {
    read.lock();
    try {
        Multimap<PrimaryKey, Integer> reference = HashMultimap.create();
        String[] toks = query.toString().toLowerCase()
                .split(TStrings.REGEX_GROUP_OF_ONE_OR_MORE_WHITESPACE_CHARS);
        boolean initial = true;
        int offset = 0;
        for (String tok : toks) {
            Multimap<PrimaryKey, Integer> temp = HashMultimap.create();
            if (STOPWORDS.contains(tok)) {
                // When skipping a stop word, we must record an offset to
                // correctly determine if the next term match is in the
                // correct relative position to the previous term match
                ++offset;
                continue;
            }
            Set<Position> positions = get(Text.wrap(tok));
            for (Position position : positions) {
                PrimaryKey key = position.getPrimaryKey();
                int pos = position.getIndex();
                if (initial) {
                    temp.put(key, pos);
                } else {
                    for (int current : reference.get(key)) {
                        if (pos == current + 1 + offset) {
                            temp.put(key, pos);
                        }
                    }
                }
            }
            initial = false;
            reference = temp;
            offset = 0;
        }

        // Result Scoring: Scoring is simply the number of times the query
        // appears in a document [e.g. the number of Positions mapped from
        // key: #reference.get(key).size()]. The total number of positions
        // in #reference is equal to the total number of times a document
        // appears in the corpus [e.g. reference.asMap().values().size()].
        Multimap<Integer, PrimaryKey> sorted = TreeMultimap.create(Collections.<Integer>reverseOrder(),
                PrimaryKey.Sorter.INSTANCE);
        for (Entry<PrimaryKey, Collection<Integer>> entry : reference.asMap().entrySet()) {
            sorted.put(entry.getValue().size(), entry.getKey());
        }
        return Sets.newLinkedHashSet(sorted.values());
    } finally {
        read.unlock();
    }
}

From source file:io.druid.java.util.http.client.NettyHttpClient.java

@Override
public <Intermediate, Final> ListenableFuture<Final> go(final Request request,
        final HttpResponseHandler<Intermediate, Final> handler, final Duration requestReadTimeout) {
    final HttpMethod method = request.getMethod();
    final URL url = request.getUrl();
    final Multimap<String, String> headers = request.getHeaders();

    final String requestDesc = StringUtils.format("%s %s", method, url);
    if (log.isDebugEnabled()) {
        log.debug("[%s] starting", requestDesc);
    }/*w w w . j ava2  s .  c  o m*/

    // Block while acquiring a channel from the pool, then complete the request asynchronously.
    final Channel channel;
    final String hostKey = getPoolKey(url);
    final ResourceContainer<ChannelFuture> channelResourceContainer = pool.take(hostKey);
    final ChannelFuture channelFuture = channelResourceContainer.get().awaitUninterruptibly();
    if (!channelFuture.isSuccess()) {
        channelResourceContainer.returnResource(); // Some other poor sap will have to deal with it...
        return Futures.immediateFailedFuture(
                new ChannelException("Faulty channel in resource pool", channelFuture.getCause()));
    } else {
        channel = channelFuture.getChannel();
    }

    final String urlFile = Strings.nullToEmpty(url.getFile());
    final HttpRequest httpRequest = new DefaultHttpRequest(HttpVersion.HTTP_1_1, method,
            urlFile.isEmpty() ? "/" : urlFile);

    if (!headers.containsKey(HttpHeaders.Names.HOST)) {
        httpRequest.headers().add(HttpHeaders.Names.HOST, getHost(url));
    }

    // If Accept-Encoding is set in the Request, use that. Otherwise use the default from "compressionCodec".
    if (!headers.containsKey(HttpHeaders.Names.ACCEPT_ENCODING)) {
        httpRequest.headers().set(HttpHeaders.Names.ACCEPT_ENCODING, compressionCodec.getEncodingString());
    }

    for (Map.Entry<String, Collection<String>> entry : headers.asMap().entrySet()) {
        String key = entry.getKey();

        for (String obj : entry.getValue()) {
            httpRequest.headers().add(key, obj);
        }
    }

    if (request.hasContent()) {
        httpRequest.setContent(request.getContent());
    }

    final long readTimeout = getReadTimeout(requestReadTimeout);
    final SettableFuture<Final> retVal = SettableFuture.create();

    if (readTimeout > 0) {
        channel.getPipeline().addLast(READ_TIMEOUT_HANDLER_NAME,
                new ReadTimeoutHandler(timer, readTimeout, TimeUnit.MILLISECONDS));
    }

    channel.getPipeline().addLast(LAST_HANDLER_NAME, new SimpleChannelUpstreamHandler() {
        private volatile ClientResponse<Intermediate> response = null;

        @Override
        public void messageReceived(ChannelHandlerContext ctx, MessageEvent e) {
            if (log.isDebugEnabled()) {
                log.debug("[%s] messageReceived: %s", requestDesc, e.getMessage());
            }
            try {
                Object msg = e.getMessage();

                if (msg instanceof HttpResponse) {
                    HttpResponse httpResponse = (HttpResponse) msg;
                    if (log.isDebugEnabled()) {
                        log.debug("[%s] Got response: %s", requestDesc, httpResponse.getStatus());
                    }

                    response = handler.handleResponse(httpResponse);
                    if (response.isFinished()) {
                        retVal.set((Final) response.getObj());
                    }

                    if (!httpResponse.isChunked()) {
                        finishRequest();
                    }
                } else if (msg instanceof HttpChunk) {
                    HttpChunk httpChunk = (HttpChunk) msg;
                    if (log.isDebugEnabled()) {
                        log.debug("[%s] Got chunk: %sB, last=%s", requestDesc,
                                httpChunk.getContent().readableBytes(), httpChunk.isLast());
                    }

                    if (httpChunk.isLast()) {
                        finishRequest();
                    } else {
                        response = handler.handleChunk(response, httpChunk);
                        if (response.isFinished() && !retVal.isDone()) {
                            retVal.set((Final) response.getObj());
                        }
                    }
                } else {
                    throw new IllegalStateException(
                            StringUtils.format("Unknown message type[%s]", msg.getClass()));
                }
            } catch (Exception ex) {
                log.warn(ex, "[%s] Exception thrown while processing message, closing channel.", requestDesc);

                if (!retVal.isDone()) {
                    retVal.set(null);
                }
                channel.close();
                channelResourceContainer.returnResource();

                throw ex;
            }
        }

        private void finishRequest() {
            ClientResponse<Final> finalResponse = handler.done(response);
            if (!finalResponse.isFinished()) {
                throw new IllegalStateException(
                        StringUtils.format("[%s] Didn't get a completed ClientResponse Object from [%s]",
                                requestDesc, handler.getClass()));
            }
            if (!retVal.isDone()) {
                retVal.set(finalResponse.getObj());
            }
            removeHandlers();
            channelResourceContainer.returnResource();
        }

        @Override
        public void exceptionCaught(ChannelHandlerContext context, ExceptionEvent event) {
            if (log.isDebugEnabled()) {
                final Throwable cause = event.getCause();
                if (cause == null) {
                    log.debug("[%s] Caught exception", requestDesc);
                } else {
                    log.debug(cause, "[%s] Caught exception", requestDesc);
                }
            }

            retVal.setException(event.getCause());
            // response is non-null if we received initial chunk and then exception occurs
            if (response != null) {
                handler.exceptionCaught(response, event.getCause());
            }
            removeHandlers();
            try {
                channel.close();
            } catch (Exception e) {
                // ignore
            } finally {
                channelResourceContainer.returnResource();
            }

            context.sendUpstream(event);
        }

        @Override
        public void channelDisconnected(ChannelHandlerContext context, ChannelStateEvent event) {
            if (log.isDebugEnabled()) {
                log.debug("[%s] Channel disconnected", requestDesc);
            }
            // response is non-null if we received initial chunk and then exception occurs
            if (response != null) {
                handler.exceptionCaught(response, new ChannelException("Channel disconnected"));
            }
            channel.close();
            channelResourceContainer.returnResource();
            if (!retVal.isDone()) {
                log.warn("[%s] Channel disconnected before response complete", requestDesc);
                retVal.setException(new ChannelException("Channel disconnected"));
            }
            context.sendUpstream(event);
        }

        private void removeHandlers() {
            if (readTimeout > 0) {
                channel.getPipeline().remove(READ_TIMEOUT_HANDLER_NAME);
            }
            channel.getPipeline().remove(LAST_HANDLER_NAME);
        }
    });

    channel.write(httpRequest).addListener(new ChannelFutureListener() {
        @Override
        public void operationComplete(ChannelFuture future) {
            if (!future.isSuccess()) {
                channel.close();
                channelResourceContainer.returnResource();
                if (!retVal.isDone()) {
                    retVal.setException(new ChannelException(
                            StringUtils.format("[%s] Failed to write request to channel", requestDesc),
                            future.getCause()));
                }
            }
        }
    });

    return retVal;
}

From source file:io.crate.analyze.relations.RelationSplitter.java

private void processOrderBy() {
    if (!querySpec.orderBy().isPresent()) {
        return;// www  .  j  a  v a  2  s.  c  o  m
    }
    OrderBy orderBy = querySpec.orderBy().get();
    Set<AnalyzedRelation> relations = Collections
            .newSetFromMap(new IdentityHashMap<AnalyzedRelation, Boolean>());
    Multimap<AnalyzedRelation, Integer> splits = Multimaps.newSetMultimap(
            new IdentityHashMap<AnalyzedRelation, Collection<Integer>>(specs.size()), INT_SET_SUPPLIER);

    // Detect remaining orderBy before any push down happens,
    // since if remaining orderBy is detected we need to
    // process again all pushed down orderBys and merge them
    // to the remaining OrderBy in the correct order.
    for (Symbol symbol : orderBy.orderBySymbols()) {
        relations.clear();
        RelationCounter.INSTANCE.process(symbol, relations);

        if (relations.size() > 1 ||
        // Outer Join requires post-order-by because the nested loop adds rows which affects ordering
                JoinPairs.isOuterRelation(relations.iterator().next().getQualifiedName(), joinPairs)) {
            remainingOrderBy = new RemainingOrderBy();
            break;
        }
    }

    Integer idx = 0;
    for (Symbol symbol : orderBy.orderBySymbols()) {
        relations.clear();
        RelationCounter.INSTANCE.process(symbol, relations);

        // If remainingOrderBy detected then don't push down anything but
        // merge it with remainingOrderBy since we need to re-apply this
        // sort again at the place where remainingOrderBy is applied.
        if (remainingOrderBy != null) {
            OrderBy newOrderBy = orderBy.subset(Collections.singletonList(idx));
            for (AnalyzedRelation rel : relations) {
                remainingOrderBy.addRelation(rel.getQualifiedName());
            }
            remainingOrderBy.addOrderBy(newOrderBy);
        } else { // push down
            splits.put(Iterables.getOnlyElement(relations), idx);
        }
        idx++;
    }

    // Process pushed down order by
    for (Map.Entry<AnalyzedRelation, Collection<Integer>> entry : splits.asMap().entrySet()) {
        AnalyzedRelation relation = entry.getKey();
        OrderBy newOrderBy = orderBy.subset(entry.getValue());
        QuerySpec spec = getSpec(relation);
        assert !spec.orderBy().isPresent();
        spec.orderBy(newOrderBy);
        requiredForQuery.addAll(newOrderBy.orderBySymbols());
    }
}