Example usage for com.google.common.collect BiMap get

List of usage examples for com.google.common.collect BiMap get

Introduction

In this page you can find the example usage for com.google.common.collect BiMap get.

Prototype

V get(Object key);

Source Link

Document

Returns the value to which the specified key is mapped, or null if this map contains no mapping for the key.

Usage

From source file:brooklyn.entity.network.bind.BindDnsServerImpl.java

public void update() {
    Lifecycle serverState = getAttribute(Attributes.SERVICE_STATE_ACTUAL);
    if (Lifecycle.STOPPED.equals(serverState) || Lifecycle.STOPPING.equals(serverState)
            || Lifecycle.DESTROYED.equals(serverState) || !getAttribute(Attributes.SERVICE_UP)) {
        LOG.debug("Skipped update of {} when service state is {} and running is {}",
                new Object[] { this, getAttribute(Attributes.SERVICE_STATE_ACTUAL), getAttribute(SERVICE_UP) });
        return;//from   w  w w . j a v a 2  s.  co  m
    }
    synchronized (this) {
        Iterable<Entity> availableEntities = FluentIterable.from(getEntities().getMembers())
                .filter(new HasHostnameAndValidLifecycle());
        LOG.debug("{} updating with entities: {}", this, Iterables.toString(availableEntities));
        ImmutableListMultimap<String, Entity> hostnameToEntity = Multimaps.index(availableEntities,
                new HostnameTransformer());

        Map<String, String> octetToName = Maps.newHashMap();
        BiMap<String, String> ipToARecord = HashBiMap.create();
        Multimap<String, String> aRecordToCnames = MultimapBuilder.hashKeys().hashSetValues().build();
        Multimap<String, String> ipToAllNames = MultimapBuilder.hashKeys().hashSetValues().build();

        for (Map.Entry<String, Entity> e : hostnameToEntity.entries()) {
            String domainName = e.getKey();
            Maybe<SshMachineLocation> location = Machines
                    .findUniqueSshMachineLocation(e.getValue().getLocations());
            if (!location.isPresent()) {
                LOG.debug("Member {} of {} does not have an SSH location so will not be configured",
                        e.getValue(), this);
                continue;
            } else if (ipToARecord.inverse().containsKey(domainName)) {
                continue;
            }

            String address = location.get().getAddress().getHostAddress();
            ipToAllNames.put(address, domainName);
            if (!ipToARecord.containsKey(address)) {
                ipToARecord.put(address, domainName);
                if (getReverseLookupNetwork().contains(new Cidr(address + "/32"))) {
                    String octet = Iterables.get(Splitter.on('.').split(address), 3);
                    if (!octetToName.containsKey(octet))
                        octetToName.put(octet, domainName);
                }
            } else {
                aRecordToCnames.put(ipToARecord.get(address), domainName);
            }
        }
        setAttribute(A_RECORDS, ImmutableMap.copyOf(ipToARecord.inverse()));
        setAttribute(PTR_RECORDS, ImmutableMap.copyOf(octetToName));
        setAttribute(CNAME_RECORDS, Multimaps.unmodifiableMultimap(aRecordToCnames));
        setAttribute(ADDRESS_MAPPINGS, Multimaps.unmodifiableMultimap(ipToAllNames));

        // Update Bind configuration files and restart the service
        getDriver().updateBindConfiguration();
    }
}

From source file:org.apache.brooklyn.entity.network.bind.BindDnsServerImpl.java

public void update() {
    Lifecycle serverState = getAttribute(Attributes.SERVICE_STATE_ACTUAL);
    if (Lifecycle.STOPPED.equals(serverState) || Lifecycle.STOPPING.equals(serverState)
            || Lifecycle.DESTROYED.equals(serverState) || !getAttribute(Attributes.SERVICE_UP)) {
        LOG.debug("Skipped update of {} when service state is {} and running is {}",
                new Object[] { this, getAttribute(Attributes.SERVICE_STATE_ACTUAL), getAttribute(SERVICE_UP) });
        return;//  w  w  w.j  a  v a  2 s .c o  m
    }
    synchronized (this) {
        Iterable<Entity> availableEntities = FluentIterable.from(getEntities().getMembers())
                .filter(new HasHostnameAndValidLifecycle());
        LOG.debug("{} updating with entities: {}", this, Iterables.toString(availableEntities));
        ImmutableListMultimap<String, Entity> hostnameToEntity = Multimaps.index(availableEntities,
                new HostnameTransformer());

        Map<String, String> octetToName = Maps.newHashMap();
        BiMap<String, String> ipToARecord = HashBiMap.create();
        Multimap<String, String> aRecordToCnames = MultimapBuilder.hashKeys().hashSetValues().build();
        Multimap<String, String> ipToAllNames = MultimapBuilder.hashKeys().hashSetValues().build();

        for (Map.Entry<String, Entity> e : hostnameToEntity.entries()) {
            String domainName = e.getKey();
            Maybe<SshMachineLocation> location = Machines.findUniqueMachineLocation(e.getValue().getLocations(),
                    SshMachineLocation.class);
            if (!location.isPresent()) {
                LOG.debug("Member {} of {} does not have an SSH location so will not be configured",
                        e.getValue(), this);
                continue;
            } else if (ipToARecord.inverse().containsKey(domainName)) {
                continue;
            }

            String address = location.get().getAddress().getHostAddress();
            ipToAllNames.put(address, domainName);
            if (!ipToARecord.containsKey(address)) {
                ipToARecord.put(address, domainName);
                if (getReverseLookupNetwork().contains(new Cidr(address + "/32"))) {
                    String octet = Iterables.get(Splitter.on('.').split(address), 3);
                    if (!octetToName.containsKey(octet))
                        octetToName.put(octet, domainName);
                }
            } else {
                aRecordToCnames.put(ipToARecord.get(address), domainName);
            }
        }
        sensors().set(A_RECORDS, ImmutableMap.copyOf(ipToARecord.inverse()));
        sensors().set(PTR_RECORDS, ImmutableMap.copyOf(octetToName));
        sensors().set(CNAME_RECORDS, Multimaps.unmodifiableMultimap(aRecordToCnames));
        sensors().set(ADDRESS_MAPPINGS, Multimaps.unmodifiableMultimap(ipToAllNames));

        // Update Bind configuration files and restart the service
        getDriver().updateBindConfiguration();
    }
}

From source file:de.uni_potsdam.hpi.asg.logictool.mapping.SequenceBasedAndGateDecomposer.java

public boolean decomposeAND(NetlistTerm term) {

    logger.info("Decomposition of " + term.toString());

    Set<Signal> signals = netlist.getDrivenSignalsTransitive(term);
    if (signals.isEmpty()) {
        logger.warn("No signal(s) for term " + term + " found");
        return false;
    } else if (signals.size() > 1) {
        logger.warn("Term " + term + " drives more than one signal. This is not supported yet");
        return false;
    }/*from   ww  w  .  j  a  va2 s.  c o  m*/
    Signal origsig = signals.iterator().next();
    if (!isAOC(term, origsig)) {
        logger.warn("Algorithm not applicable for non-AOC architectures");
        return false;
    }

    int startgatesize = BDDHelper.numberOfVars(term.getBdd());

    BDD bdd = term.getBdd();
    Set<Signal> origrelevant = findRelevantSigs(bdd);
    if (origrelevant == null) {
        return false;
    }

    StateGraph sg2 = sghelper.getNewStateGraph(origrelevant, origsig);
    if (sg2 == null) {
        logger.warn("Failed to generate new SG. Using the original one.");
        sg2 = origsg;
    }

    BiMap<Signal, Signal> sigmap = HashBiMap.create();
    Set<Signal> relevant = new HashSet<>();
    boolean found;
    for (Signal oldSig : origrelevant) {
        found = false;
        for (Signal newSig : sg2.getAllSignals()) {
            if (oldSig.getName().equals(newSig.getName())) {
                sigmap.put(oldSig, newSig);
                found = true;
                break;
            }
        }
        if (!found) {
            logger.error("Signal " + oldSig.getName() + " not found");
            return false;
        }
        relevant.add(sigmap.get(oldSig));
    }
    found = false;
    for (Signal newSig : sg2.getAllSignals()) {
        if (origsig.getName().equals(newSig.getName())) {
            sigmap.put(origsig, newSig);
            found = true;
            break;
        }
    }
    if (!found) {
        logger.error("Signal " + origsig.getName() + " not found");
        return false;
    }
    Signal sig = sigmap.get(origsig);

    Map<Signal, Boolean> posnegmap = getInputsPosOrNeg(term, sigmap);
    BDD newbdd = factory.one();
    for (Entry<Signal, Boolean> entry : posnegmap.entrySet()) {
        if (entry.getValue()) {
            newbdd = newbdd.andWith(getPosBDD(entry.getKey()));
        } else {
            newbdd = newbdd.andWith(getNegBDD(entry.getKey()));
        }
        if (entry.getKey() instanceof QuasiSignal) {
            relevant.add(entry.getKey());
        }
    }

    Set<State> startStates = new HashSet<>();
    for (State s : sg2.getStates()) {
        for (Entry<Transition, State> entry2 : s.getNextStates().entrySet()) {
            if (entry2.getKey().getSignal() == sig) {
                startStates.add(entry2.getValue());
            }
        }
    }

    List<List<Signal>> fallingPartitions = new ArrayList<>();
    for (Signal sig2 : relevant) {
        List<Signal> tmp = new ArrayList<>();
        tmp.add(sig2);
        fallingPartitions.add(tmp);
    }

    SortedSet<IOBehaviour> sequencesFront = new TreeSet<>(new SequenceFrontCmp());
    SortedSet<IOBehaviour> sequencesBack = new TreeSet<>(new SequenceBackCmp());
    Set<IOBehaviour> newSequences = new HashSet<>();
    Set<IOBehaviour> rmSequences = new HashSet<>();
    Deque<IOBehaviourSimulationStep> steps = new ArrayDeque<>();

    pool = new IOBehaviourSimulationStepPool(new IOBehaviourSimulationStepFactory());
    pool.setMaxTotal(-1);

    try {
        root = pool.borrowObject();
    } catch (Exception e) {
        e.printStackTrace();
        logger.error("Could not borrow object");
        return false;
    }

    IOBehaviourSimulationStep newStep;
    for (State s : startStates) {
        try {
            newStep = pool.borrowObject();
        } catch (Exception e) {
            e.printStackTrace();
            logger.error("Could not borrow object");
            return false;
        }
        root.getNextSteps().add(newStep);
        newStep.setPrevStep(root);
        newStep.setStart(s);
        newStep.setNextState(s);
        steps.add(newStep);
    }

    if (steps.isEmpty()) {
        return false;
    }

    final long checkThreshold = 100;

    long stepsEvaledTotal = 0;
    IOBehaviourSimulationStep step = null;
    while (!steps.isEmpty()) {
        step = steps.removeLast();
        //         System.out.println("#Step: " + step.toString());
        getNewSteps(step, sig, newSequences, steps, relevant);
        stepsEvaledTotal++;
        if (newSequences.size() >= checkThreshold) {
            removeCandidates(sequencesFront, sequencesBack, newSequences, rmSequences);
        }
    }
    removeCandidates(sequencesFront, sequencesBack, newSequences, rmSequences);
    logger.debug("Sequences: " + sequencesFront.size() + " - Tmp Sequences: " + newSequences.size()
            + " - Steps to evaluate: " + steps.size() + " - Steps evaluated: " + stepsEvaledTotal);
    logger.debug("Pool: " + "Created: " + pool.getCreatedCount() + ", Borrowed: " + pool.getBorrowedCount()
            + ", Returned: " + pool.getReturnedCount() + ", Active: " + pool.getNumActive() + ", Idle: "
            + pool.getNumIdle());
    logger.debug("RmSub: " + rmSub + " // RmFall: " + rmFall);

    SortedSet<IOBehaviour> sequences = new TreeSet<>(sequencesFront);
    sequencesFront.clear();
    sequencesBack.clear();
    //      System.out.println(sequences.toString());

    List<IOBehaviour> falling = new ArrayList<>();
    List<IOBehaviour> rising = new ArrayList<>();
    List<IOBehaviour> constant = new ArrayList<>();
    if (!categoriseSequences(newbdd, sequences, falling, rising, constant)) {
        return false;
    }
    //      System.out.println("Falling:");
    //      for(IOBehaviour beh : falling) {
    //         System.out.println(beh.toString());
    //      }
    //      System.out.println("Rising:");
    //      for(IOBehaviour beh : rising) {
    //         System.out.println(beh.toString());
    //      }
    //      System.out.println("Constant:");
    //      for(IOBehaviour beh : constant) {
    //         System.out.println(beh.toString());
    //      }

    fallingPartitions = getPossiblePartitionsFromFalling(falling, relevant);
    //      System.out.println("FallingPartitions: " + fallingPartitions.toString());

    Map<Integer, List<Partition>> partitions = getPartitions(relevant, startgatesize);
    if (partitions == null) {
        logger.error("There was a problem while creating partions for signal " + sig.getName());
        return false;
    }

    //      System.out.println("Init:");
    //      for(Entry<Integer, List<Partition>> entry : partitions.entrySet()) {
    //         System.out.println(entry.getKey());
    //         for(Partition p : entry.getValue()) {
    //            System.out.println("\t" + p.toString());
    //         }
    //      }

    filterPartitions(partitions, fallingPartitions);
    if (partitions.isEmpty()) {
        logger.error("No suitable partions found");
        return false;
    }

    //      System.out.println("After filter Falling:");
    //      for(Entry<Integer, List<Partition>> entry : partitions.entrySet()) {
    //         System.out.println(entry.getKey());
    //         for(Partition p : entry.getValue()) {
    //            System.out.println("\t" + p.toString());
    //         }
    //      }

    //      System.out.println("posneg: " + posnegmap.toString());

    setPartitionBDDs(partitions, posnegmap);

    if (!checkRising(rising, partitions)) {
        logger.error("Check rising failed");
        return false;
    }
    if (partitions.isEmpty()) {
        logger.error("No suitable partions found");
        return false;
    }

    //      System.out.println("After filter Rising:");
    //      for(Entry<Integer, List<Partition>> entry : partitions.entrySet()) {
    //         System.out.println(entry.getKey());
    //         for(Partition p : entry.getValue()) {
    //            System.out.println("\t" + p.toString());
    //         }
    //      }

    if (!checkConstant(constant, partitions)) {
        logger.error("Check constant failed");
        return false;
    }
    if (partitions.isEmpty()) {
        logger.error("No suitable partions found");
        return false;
    }

    //      System.out.println("After filter Constant:");
    //      for(Entry<Integer, List<Partition>> entry : partitions.entrySet()) {
    //         System.out.println(entry.getKey());
    //         for(Partition p : entry.getValue()) {
    //            System.out.println("\t" + p.toString());
    //         }
    //      }

    applyDecoResult(term, partitions, posnegmap, sigmap);
    return true;
}

From source file:org.apache.ctakes.ytex.uima.mapper.DocumentMapperServiceImpl.java

/**
 * save the annotation properties for a given type
 * /*from   w ww .j av a 2 s.co  m*/
 * @param mapIdToAnno
 *            map of all annoIDs to Annotation
 * @param annoIds
 *            annotation ids for a single type
 * @param listAnnoLinks
 *            annotation to annotation links to save
 */
private void saveAnnoPrimitive(final BiMap<Annotation, Integer> mapAnnoToId, final Set<Integer> annoIds,
        final List<AnnoLink> listAnnoLinks) {
    if (log.isTraceEnabled())
        log.trace("begin saveAnnoPrimitive");
    final BiMap<Integer, Annotation> mapIdToAnno = mapAnnoToId.inverse();
    // nothing to do
    if (annoIds.size() == 0)
        return;
    // covert to array for spring batch update
    // final Integer[] annoIdArray = annoIds.toArray(new Integer[] {});
    final List<Integer> annoIdList = new ArrayList<Integer>(annoIds);
    // get mappinginfo
    // final TOP t = mapIdToAnno.get(annoIdArray[0]);
    final TOP t = mapIdToAnno.get(annoIdList.get(0));
    final Type type = t.getType();
    final AnnoMappingInfo mapInfo = this.getMapInfo(t);
    // get non primitive fields, insert them after inserting the annotation
    final Set<String> fsNames = this.tl_mapFieldInfo.get().get(type.getName());
    final ListMultimap<String, AnnoFSAttribute> mapAnnoToFS = ArrayListMultimap.create();
    // don't know how to map this annotation
    if (mapInfo == null)
        return;
    // jdbcTemplate.batchUpdate(mapInfo.getSql(),
    // new BatchPreparedStatementSetter() {
    //
    // @Override
    // public int getBatchSize() {
    // return annoIdArray.length;
    // }
    this.chunkedBatchUpdate(mapInfo.getSql(), annoIdList, new ChunkPreparedStatementSetter<Integer>() {

        @Override
        public void setValues(PreparedStatement ps, int idx, Integer annoId) throws SQLException {
            // get the entry
            // int annoId = annoIdArray[idx];
            Annotation anno = mapIdToAnno.get(annoId);
            saveAnnoBindVariables(type, mapInfo, ps, annoId, anno, mapAnnoToId);
            // pull out the composite fields for storage
            for (String fieldName : fsNames) {
                Feature feat = type.getFeatureByBaseName(fieldName);
                if (!feat.getRange().isPrimitive()) {
                    // handle arrays and lists
                    FeatureStructure fsCol = anno.getFeatureValue(feat);
                    if (fsCol != null && (fsCol instanceof FSArray || fsCol instanceof FSList)) {
                        List<FeatureStructure> fsList = extractList(fsCol);
                        int i = 0;
                        for (FeatureStructure fs : fsList) {
                            if (fs instanceof Annotation) {
                                // annotations are linked via the
                                // anno_link table
                                Integer childAnnoId = mapAnnoToId.get(fs);
                                if (childAnnoId != null) {
                                    listAnnoLinks.add(new AnnoLink(annoId, childAnnoId, feat.getShortName()));
                                }
                            } else {
                                // featureStructs that are not
                                // annotations get stored in their
                                // own tables
                                // with a many to one relationship
                                // to the annotation
                                mapAnnoToFS.put(fs.getType().getName(), new AnnoFSAttribute(annoId, fs, i++));
                            }
                        }
                    }
                } else {
                    // handle primitive attributes
                    mapAnnoToFS.put(feat.getRange().getName(),
                            new AnnoFSAttribute(annoId, anno.getFeatureValue(feat), null));
                }
            }
        }
    }

    );
    for (String fsType : mapAnnoToFS.keySet()) {
        this.saveAnnoFS(mapAnnoToFS.get(fsType), mapAnnoToId);
    }
    if (log.isTraceEnabled())
        log.trace("end saveAnnoPrimitive");
}

From source file:de.uni_potsdam.hpi.asg.logictool.mapping.SequenceBasedAndGateDecomposer.java

private void applyDecoResult(NetlistTerm term, Map<Integer, List<Partition>> partitions,
        Map<Signal, Boolean> posnegmap, BiMap<Signal, Signal> sigmap) {
    BiMap<Signal, Signal> sigmapinv = sigmap.inverse();
    if (partitions.isEmpty()) {
        return;//from w  ww .  j  a va2 s .  c o m
    }
    Entry<Integer, List<Partition>> entry = partitions.entrySet().iterator().next();
    if (entry.getValue().isEmpty()) {
        return;
    }
    Partition p = entry.getValue().get(0);
    //      System.out.println(partitions);
    logger.debug("Used Partition: " + p.getPartition().toString());

    Set<NetlistVariable> firstlevelands = new HashSet<>();
    for (PartitionPart part : p.getPartition()) {
        if (part.getPart().size() == 1) {
            Signal s = part.getPart().iterator().next();
            NetlistVariable var = null;
            if (s instanceof QuasiSignal) {
                var = quasimap.inverse().get((QuasiSignal) s);
            } else {
                var = netlist.getNetlistVariableBySignal(sigmapinv.get(s));
            }
            firstlevelands.add(var);
            continue;
        }
        BDD tmpBDD = netlist.getFac().one();
        for (Signal sig : part.getPart()) {
            NetlistVariable var = null;
            if (sig instanceof QuasiSignal) {
                var = quasimap.inverse().get((QuasiSignal) sig);
            } else {
                Signal netlistsig = sigmapinv.get(sig);
                var = netlist.getNetlistVariableBySignal(netlistsig);
            }
            if (posnegmap.get(sig)) {
                tmpBDD = tmpBDD.andWith(var.toBDD());
            } else {
                tmpBDD = tmpBDD.andWith(var.toNotBDD());
            }
        }
        NetlistTerm tmpterm = netlist.getNetlistTermByBdd(tmpBDD);
        NetlistVariable tmpvar = netlist.getNewTmpVar();
        netlist.addConnection(tmpvar, tmpterm);
        firstlevelands.add(tmpterm.getDrivee());
    }
    BDD newbdd = netlist.getFac().one();
    for (NetlistVariable var : firstlevelands) {
        Signal sig = netlist.getSignalByNetlistVariable(var);
        boolean pos = true;
        if (sig != null) {
            pos = posnegmap.get(sigmap.get(sig));
        }

        if (pos) {
            newbdd = newbdd.and(var.toBDD());
        } else {
            newbdd = newbdd.and(var.toNotBDD());
        }
    }
    netlist.alterTermBDD(term, newbdd);
    //      new NetlistGraph(netlist, Arrays.asList("rD_9"), true);
}

From source file:org.eclipse.elk.alg.graphviz.dot.transform.DotExporter.java

/**
 * Transform the edges of the given parent node.
 * //from   ww  w .  j  a  va 2s  . c om
 * @param parent a parent node
 * @param statements the list to which new statements are added
 * @param transData transformation data
 */
private void transformEdges(final KNode parent, final List<Statement> statements,
        final IDotTransformationData<KNode, GraphvizModel> transData) {
    boolean hierarchy = transData.getProperty(HIERARCHY);
    boolean transformEdgeLayout = transData.getProperty(TRANSFORM_EDGE_LAYOUT);
    KShapeLayout parentLayout = parent.getData(KShapeLayout.class);
    Direction direction = parentLayout.getProperty(CoreOptions.DIRECTION);
    boolean vertical = direction == Direction.DOWN || direction == Direction.UP
            || direction == Direction.UNDEFINED;
    LinkedList<KNode> nodes = new LinkedList<KNode>(parent.getChildren());
    BiMap<KGraphElement, String> nodeIds = transData.getProperty(GRAPH_ELEMS).inverse();

    while (!nodes.isEmpty()) {
        KNode source = nodes.removeFirst();
        for (KEdge edge : source.getOutgoingEdges()) {
            KNode target = edge.getTarget();
            // cross-hierarchy edges are considered only if hierarchy mode is active
            if (source.getParent() == target.getParent()
                    || hierarchy && isInsideGraph(target, transData.getSourceGraph())) {
                EdgeStatement edgeStatement = DotFactory.eINSTANCE.createEdgeStatement();
                List<Attribute> attributes = edgeStatement.getAttributes();
                // set source node or cluster
                Node sourceNode = DotFactory.eINSTANCE.createNode();
                if (hierarchy && !source.getChildren().isEmpty()) {
                    sourceNode.setName(source.getData(KShapeLayout.class).getProperty(CLUSTER_DUMMY));
                    attributes.add(createAttribute(Attributes.LTAIL, nodeIds.get(source)));
                } else {
                    sourceNode.setName(nodeIds.get(source));
                }
                edgeStatement.setSourceNode(sourceNode);
                // set target node or cluster
                EdgeTarget edgeTarget = DotFactory.eINSTANCE.createEdgeTarget();
                Node targetNode = DotFactory.eINSTANCE.createNode();
                if (hierarchy && !target.getChildren().isEmpty()) {
                    targetNode.setName(target.getData(KShapeLayout.class).getProperty(CLUSTER_DUMMY));
                    attributes.add(createAttribute(Attributes.LHEAD, nodeIds.get(target)));
                } else {
                    targetNode.setName(nodeIds.get(target));
                }
                edgeTarget.setTargetnode(targetNode);
                edgeStatement.getEdgeTargets().add(edgeTarget);

                // add edge labels at head, tail, and middle position
                setEdgeLabels(edge, attributes, vertical);

                if (transData.getProperty(USE_EDGE_IDS)) {
                    // add comment with edge identifier
                    String edgeID = getEdgeID(edge, transData);
                    attributes.add(createAttribute(Attributes.COMMENT, "\"" + edgeID + "\""));
                }

                // include edge routing for full export
                KEdgeLayout edgeLayout = edge.getData(KEdgeLayout.class);
                KPoint sourcePoint = edgeLayout.getSourcePoint();
                KPoint targetPoint = edgeLayout.getTargetPoint();
                if (transformEdgeLayout && (edgeLayout.getBendPoints().size() > 0 || sourcePoint.getX() != 0
                        || sourcePoint.getY() != 0 || targetPoint.getX() != 0 || targetPoint.getY() != 0)) {
                    KNode referenceNode = source;
                    if (!ElkUtil.isDescendant(target, source)) {
                        referenceNode = source.getParent();
                    }
                    StringBuilder pos = new StringBuilder();
                    Iterator<KVector> pointIter = edgeLayout.createVectorChain().iterator();
                    while (pointIter.hasNext()) {
                        KVector point = pointIter.next();
                        ElkUtil.toAbsolute(point, referenceNode);
                        pos.append(point.x);
                        pos.append(",");
                        pos.append(point.y);
                        if (pointIter.hasNext()) {
                            pos.append(" ");
                        }
                    }
                    attributes.add(createAttribute(Attributes.POS, "\"" + pos + "\""));
                }

                statements.add(edgeStatement);
            }
        }
        if (hierarchy) {
            nodes.addAll(source.getChildren());
        }
    }
}

From source file:com.wolvereness.overmapped.MembersSubRoutine.java

@Override
public void invoke(final OverMapped instance, final Map<String, ByteClass> classes,
        final Multimap<String, String> depends, final Multimap<String, String> rdepends,
        final BiMap<String, String> nameMaps, final BiMap<String, String> inverseNameMaps,
        final BiMap<Signature, Signature> signatureMaps, final BiMap<Signature, Signature> inverseSignatureMaps,
        final Remapper inverseMapper, final MutableSignature mutableSignature, final Set<String> searchCache,
        final Map<Signature, Integer> flags, final Map<?, ?> map)
        throws ClassCastException, NullPointerException, MojoFailureException {
    final Object memberMaps = map.get(tag);
    if (!(memberMaps instanceof Map))
        return;//from w  w w .j a  v  a 2 s.  co  m

    final Store store = new Store(searchCache, instance.isFindParents() ? new HashSet<String>() : null,
            instance);

    for (final Map.Entry<?, ?> memberMap : ((Map<?, ?>) memberMaps).entrySet()) {
        final Map<?, ?> maps = asType(memberMap.getValue(),
                "`%4$s' points to a %2$s `%1$s', expected a %5$s, in `%3$s'", false, memberMaps, memberMap,
                Map.class);

        if (memberMap.getKey() instanceof Collection<?> && ((Collection<?>) memberMap.getKey()).size() > 1) {
            final Iterable<String> classNames;
            {
                final ImmutableCollection.Builder<String> containingClassNames = ImmutableList.builder();
                for (final Object clazz : (Collection<?>) memberMap.getKey()) {
                    final String unresolvedClassName = asType(clazz,
                            "`%4$s' contains a %2$s `%1$s', expected a %5$s, from `%3$s'", false, memberMaps,
                            memberMap.getKey(), String.class);
                    final String className = inverseNameMaps.get(unresolvedClassName);
                    if (className == null) {
                        instance.missingAction.actMemberClass(instance.getLog(), unresolvedClassName,
                                memberMap.getKey(), inverseNameMaps);
                        continue;
                    }
                    containingClassNames.add(className);
                }
                classNames = containingClassNames.build();
            }

            for (final Map.Entry<?, ?> entry : maps.entrySet()) {
                parseMapping(store, inverseMapper, mutableSignature, maps, entry, false);
                final String newName = store.newName, oldName = store.oldName, description = store.description,
                        originalDescription = store.originalDescription;

                if (!mutableSignature.update("", "", description).isMethod())
                    throw new MojoFailureException(String
                            .format("Malformed mapping %s for %s; can only map methods.", entry, memberMap));

                for (final String className : classNames) {
                    updateMember(store, signatureMaps, inverseSignatureMaps, mutableSignature, oldName, newName,
                            description, className, nameMaps, originalDescription, nameMaps.get(className));

                    if (mutableSignature.isMethod() && !mutableSignature.isConstructor()) {
                        final Set<String> parents = store.parents;
                        if (parents != null) {
                            parents.addAll(depends.get(className));
                        }
                        for (final String inherited : rdepends.get(className)) {
                            if (!updateMember(store, signatureMaps, inverseSignatureMaps, mutableSignature,
                                    oldName, newName, description, inherited, nameMaps, originalDescription,
                                    nameMaps.get(inherited)))
                                continue;

                            if (parents != null) {
                                parents.addAll(depends.get(inherited));
                            }
                        }
                    }
                }
                performParentChecks(store, nameMaps, inverseSignatureMaps, mutableSignature, classNames,
                        newName, oldName, description, originalDescription);
                store.searchCache.clear();
            }

            continue;
        }

        if (memberMap.getKey() instanceof Collection<?> && ((Collection<?>) memberMap.getKey()).size() < 1)
            throw new MojoFailureException(
                    String.format("Malformed mapping %s -> %s", memberMap.getKey(), maps));

        final String unresolvedClassName = asType(
                memberMap.getKey() instanceof Collection<?>
                        ? ((Collection<?>) memberMap.getKey()).iterator().next()
                        : memberMap.getKey(),
                "`%4$s' points from a %2$s `%1$s', expected a %5$s, in `%3$s'", false, memberMaps, memberMap,
                String.class);
        final String className = inverseNameMaps.get(unresolvedClassName);
        if (className == null) {
            instance.missingAction.actMemberClass(instance.getLog(), unresolvedClassName, memberMap.getKey(),
                    inverseNameMaps);
            continue;
        }

        for (final Map.Entry<?, ?> entry : maps.entrySet()) {
            processSingleClassMappings(store, classes, depends, rdepends, nameMaps, signatureMaps,
                    inverseSignatureMaps, inverseMapper, mutableSignature, maps, className, unresolvedClassName,
                    entry);
        }
    }
}

From source file:org.eclipse.elk.graphviz.dot.transform.DotExporter.java

/**
 * Transform the edges of the given parent node.
 * // w  ww.j  a va  2  s. c om
 * @param parent a parent node
 * @param statements the list to which new statements are added
 * @param hierarchy whether hierarchy mode is active
 * @param transData transformation data
 */
private void transformEdges(final KNode parent, final List<Statement> statements, final boolean hierarchy,
        final IDotTransformationData<KNode, GraphvizModel> transData) {
    boolean fullExport = transData.getProperty(FULL_EXPORT);
    KShapeLayout parentLayout = parent.getData(KShapeLayout.class);
    Direction direction = parentLayout.getProperty(LayoutOptions.DIRECTION);
    boolean vertical = direction == Direction.DOWN || direction == Direction.UP
            || direction == Direction.UNDEFINED;
    LinkedList<KNode> nodes = new LinkedList<KNode>(parent.getChildren());
    BiMap<KGraphElement, String> nodeIds = transData.getProperty(GRAPH_ELEMS).inverse();

    while (!nodes.isEmpty()) {
        KNode source = nodes.removeFirst();
        for (KEdge edge : source.getOutgoingEdges()) {
            KNode target = edge.getTarget();
            // cross-hierarchy edges are considered only if hierarchy mode is active
            if (source.getParent() == target.getParent()
                    || hierarchy && isInsideGraph(target, transData.getSourceGraph())) {
                EdgeStatement edgeStatement = DotFactory.eINSTANCE.createEdgeStatement();
                List<Attribute> attributes = edgeStatement.getAttributes();
                // set source node or cluster
                Node sourceNode = DotFactory.eINSTANCE.createNode();
                if (hierarchy && !source.getChildren().isEmpty()) {
                    sourceNode.setName(source.getData(KShapeLayout.class).getProperty(CLUSTER_DUMMY));
                    attributes.add(createAttribute(Attributes.LTAIL, nodeIds.get(source)));
                } else {
                    sourceNode.setName(nodeIds.get(source));
                }
                edgeStatement.setSourceNode(sourceNode);
                // set target node or cluster
                EdgeTarget edgeTarget = DotFactory.eINSTANCE.createEdgeTarget();
                Node targetNode = DotFactory.eINSTANCE.createNode();
                if (hierarchy && !target.getChildren().isEmpty()) {
                    targetNode.setName(target.getData(KShapeLayout.class).getProperty(CLUSTER_DUMMY));
                    attributes.add(createAttribute(Attributes.LHEAD, nodeIds.get(target)));
                } else {
                    targetNode.setName(nodeIds.get(target));
                }
                edgeTarget.setTargetnode(targetNode);
                edgeStatement.getEdgeTargets().add(edgeTarget);

                // add edge labels at head, tail, and middle position
                setEdgeLabels(edge, attributes, vertical);

                if (transData.getProperty(USE_EDGE_IDS)) {
                    // add comment with edge identifier
                    String edgeID = getEdgeID(edge, transData);
                    attributes.add(createAttribute(Attributes.COMMENT, "\"" + edgeID + "\""));
                }

                // include edge routing for full export
                KEdgeLayout edgeLayout = edge.getData(KEdgeLayout.class);
                KPoint sourcePoint = edgeLayout.getSourcePoint();
                KPoint targetPoint = edgeLayout.getTargetPoint();
                if (fullExport && !edgeLayout.getProperty(LayoutOptions.NO_LAYOUT)
                        && (edgeLayout.getBendPoints().size() > 0 || sourcePoint.getX() != 0
                                || sourcePoint.getY() != 0 || targetPoint.getX() != 0
                                || targetPoint.getY() != 0)) {
                    KNode referenceNode = source;
                    if (!ElkUtil.isDescendant(target, source)) {
                        referenceNode = source.getParent();
                    }
                    StringBuilder pos = new StringBuilder();
                    Iterator<KVector> pointIter = edgeLayout.createVectorChain().iterator();
                    while (pointIter.hasNext()) {
                        KVector point = pointIter.next();
                        ElkUtil.toAbsolute(point, referenceNode);
                        pos.append(point.x);
                        pos.append(",");
                        pos.append(point.y);
                        if (pointIter.hasNext()) {
                            pos.append(" ");
                        }
                    }
                    attributes.add(createAttribute(Attributes.POS, "\"" + pos + "\""));
                }

                statements.add(edgeStatement);
            }
        }
        if (hierarchy) {
            nodes.addAll(source.getChildren());
        }
    }
}

From source file:uk.ac.susx.tag.method51.twitter.NBClassifierIGImpl.java

public IGPackage calculateIG(List<Tweet> tweets) {

    Date latest = new Date(0);
    Date earliest = new Date();

    for (Tweet tweet : tweets) {

        if (tweet.getCreated().after(latest)) {
            latest = tweet.getCreated();
        }//from   w  w  w.  ja va  2  s  .com
        if (tweet.getCreated().before(earliest)) {
            earliest = tweet.getCreated();
        }
    }

    List<ProcessedInstance> processedInstances = getProcessedInstances(tweets);
    classify(processedInstances);

    Map<String, List<String>> igFeatures = Querying
            .labelledFeatures2Strings(Querying.queryFeatures(processedInstances, null, 50), pipeline);

    String s = new Gson().toJson(igFeatures);
    System.out.println(s);

    IntSet vocab = uk.ac.susx.tag.classificationframework.Util.inferVocabulary(processedInstances);
    BiMap<Integer, Integer> features = HashBiMap.create(vocab.size());

    {
        int i = 0;
        for (int j : vocab) {
            features.put(i, j);
            ++i;
        }
    }

    int numClasses = classifier.getLabels().size();
    int numFeatures = features.size();
    int numInstances = processedInstances.size();
    int[][] classFeatureCounts = new int[numClasses][numFeatures];
    int[] featureCounts = new int[numFeatures];
    int[] classCounts = new int[numClasses];
    int[] featureClasses = new int[numFeatures];
    Arrays.fill(featureClasses, -1);

    countClassFeatures(classCounts, featureCounts, classFeatureCounts, processedInstances, features);

    final double[] IG = calcInfoGain(classCounts, featureCounts, classFeatureCounts, featureClasses, 0.1);

    final Integer[] sortedIdx = new Integer[IG.length];
    for (int i = 0; i < sortedIdx.length; ++i) {
        sortedIdx[i] = i;
    }

    Arrays.sort(sortedIdx, new Comparator<Integer>() {
        @Override
        public int compare(final Integer o1, final Integer o2) {
            return Double.compare(IG[o2], IG[o1]);
        }
    });

    int featureMass = 0;
    for (int f : featureCounts) {
        featureMass += f;
    }

    Map<String, List<Term>> out = new HashMap<>();

    int[] numIGFeaturesPerClass = new int[numClasses];

    for (int i = 0; i < classifier.getLabels().size(); i++) {

        List<Term> terms = new ArrayList<>();

        String label = pipeline.labelString(i);

        //System.out.println("---------------");
        //System.out.println(label);

        //System.out.println();

        int n = 0;
        int j = 0;
        while (n < numIGFeatures && j < IG.length) {

            int idx = sortedIdx[j];

            if (featureClasses[idx] == i) {
                String feature = pipeline.featureString(features.get(idx));

                double score = IG[idx];
                int count = featureCounts[idx];
                //int count = classFeatureCounts[i][idx];
                double PCint = (classCounts[i] - classFeatureCounts[i][idx])
                        / (double) (numInstances - featureCounts[idx]);
                double PCif = classFeatureCounts[i][idx] / (double) featureCounts[idx];
                double PfCi = classFeatureCounts[i][idx] / (double) classCounts[i];
                double Pf = featureCounts[idx] / (double) numInstances;

                //System.out.print(feature + " " + score + " " + featureClasses[idx] + " " + featureCounts[idx] + " ");
                ++n;

                Term term = new Term();
                term.setText(feature.replace("_", " "));
                term.setClassIndex(i);
                term.setInfo("score", score);
                term.setInfo("count", count);
                term.setInfo("p(c|nf)", PCint);
                term.setInfo("p(c|f)", PCif);
                term.setInfo("p(f|c)", PfCi);
                term.setInfo("p(f)", Pf);
                term.setInfo("idx", idx);

                terms.add(term);
            }
            ++j;
        }

        numIGFeaturesPerClass[i] = n;

        out.put(label, terms);

        //System.out.println();
    }

    //System.out.println();
    //System.out.println();

    if (print) {
        System.out.println(latest.toString());

        System.out.println("Num Docs " + tweets.size());
        for (int j = 0; j < numClasses; ++j) {
            System.out.println("P(C" + j + ") " + classCounts[j] / (double) numInstances);
        }

        System.out.println(String.format("%-6s %-30s %-10s %-10s %-10s %-10s %-10s %-10s %-10s", "Class",
                "Term", "IG", "Count", "p(f|c)", "p(nf|c)", "p(c|f)", "p(c|nf)", "p(f)"));

        for (int f = 0; f < numIGFeatures; ++f) {

            for (int i = 0; i < numClasses; ++i) {
                String label = pipeline.labelString(i);

                if (numIGFeaturesPerClass[i] <= f) {
                    continue;
                }
                Term term = out.get(label).get(f);

                int idx = (Integer) term.getInfo("idx");

                for (int j = 0; j < numClasses; ++j) {

                    double PCint = (classCounts[j] - classFeatureCounts[j][idx])
                            / (double) (numInstances - featureCounts[idx]);
                    double PCif = classFeatureCounts[j][idx] / (double) featureCounts[idx];

                    double PnfCi = (featureCounts[j] - classFeatureCounts[j][idx])
                            / (double) (numInstances - classCounts[j]);
                    double PfCi = classFeatureCounts[j][idx] / (double) classCounts[j];

                    double Pf = featureCounts[idx] / (double) numInstances;
                    //int count = classFeatureCounts[i][idx];
                    int count = featureCounts[idx];

                    double score = (Double) term.getInfo("score");

                    System.out.println(
                            String.format("%-6d %-30s %-10.5f %-10d %-10.5f %-10.5f %-10.5f %-10.5f %-10.5f", j,
                                    term.getText(), score, count, PfCi, PnfCi, PCif, PCint, Pf));
                }
            }
        }

        System.out.println();
        System.out.println();
    }

    IGPackage igPackage = new IGPackage();
    igPackage.classTerms = out;
    igPackage.earliest = earliest;
    igPackage.latest = latest;

    igPackage.vol = numInstances;

    return igPackage;
}

From source file:com.google.cloud.genomics.mapreduce.PcaServlet.java

private void writePcaData(double[][] data, BiMap<Integer, String> callsetNames, PrintWriter writer) {
    int rows = data.length;
    int cols = data.length;

    // Center the similarity matrix.
    double matrixSum = 0;
    double[] rowSums = new double[rows];
    for (int i = 0; i < rows; i++) {
        for (int j = 0; j < cols; j++) {
            matrixSum += data[i][j];/*w ww.ja va  2 s . c  o  m*/
            rowSums[i] += data[i][j];
        }
    }
    double matrixMean = matrixSum / rows / cols;
    for (int i = 0; i < rows; i++) {
        for (int j = 0; j < cols; j++) {
            double rowMean = rowSums[i] / rows;
            double colMean = rowSums[j] / rows;
            data[i][j] = data[i][j] - rowMean - colMean + matrixMean;
        }
    }

    // Determine the eigenvectors, and scale them so that their
    // sum of squares equals their associated eigenvalue.
    Matrix matrix = new Matrix(data);
    EigenvalueDecomposition eig = matrix.eig();
    Matrix eigenvectors = eig.getV();
    double[] realEigenvalues = eig.getRealEigenvalues();

    for (int j = 0; j < eigenvectors.getColumnDimension(); j++) {
        double sumSquares = 0;
        for (int i = 0; i < eigenvectors.getRowDimension(); i++) {
            sumSquares += eigenvectors.get(i, j) * eigenvectors.get(i, j);
        }
        for (int i = 0; i < eigenvectors.getRowDimension(); i++) {
            eigenvectors.set(i, j, eigenvectors.get(i, j) * Math.sqrt(realEigenvalues[j] / sumSquares));
        }
    }

    // Find the indices of the top two eigenvalues.
    int maxIndex = -1;
    int secondIndex = -1;
    double maxEigenvalue = 0;
    double secondEigenvalue = 0;

    for (int i = 0; i < realEigenvalues.length; i++) {
        double eigenvector = realEigenvalues[i];
        if (eigenvector > maxEigenvalue) {
            secondEigenvalue = maxEigenvalue;
            secondIndex = maxIndex;
            maxEigenvalue = eigenvector;
            maxIndex = i;
        } else if (eigenvector > secondEigenvalue) {
            secondEigenvalue = eigenvector;
            secondIndex = i;
        }
    }

    // Output projected data as json
    for (int i = 0; i < rows; i++) {
        String callsetName = callsetNames.get(i);

        String[] result = new String[] { callsetName, String.valueOf(eigenvectors.get(i, maxIndex)),
                String.valueOf(eigenvectors.get(i, secondIndex)) };

        // TODO: format as json so that this can be used to make a graph
        writer.println(Joiner.on("\t").join(result));
    }

}