Example usage for com.google.common.collect BiMap put

List of usage examples for com.google.common.collect BiMap put

Introduction

In this page you can find the example usage for com.google.common.collect BiMap put.

Prototype

@Override
@Nullable
V put(@Nullable K key, @Nullable V value);

Source Link

Usage

From source file:org.apache.ctakes.ytex.kernel.BaseClassifierEvaluationParser.java

protected BiMap<Integer, String> loadClassIdMap(File dataDir, String label) throws IOException {
    BiMap<Integer, String> classIndexMap = HashBiMap.create();
    String filename = FileUtil.getScopedFileName(dataDir.getPath(), label, null, null, "class.properties");
    File f = new File(filename);
    if (f.exists()) {
        BufferedReader r = null;//www.jav  a 2  s .c  om
        try {
            r = new BufferedReader(new FileReader(f));
            Properties props = new Properties();
            props.load(r);
            for (String key : props.stringPropertyNames()) {
                classIndexMap.put(Integer.parseInt(key), props.getProperty(key));
            }
        } finally {
            try {
                r.close();
            } catch (IOException e) {
            }
        }
    }
    return classIndexMap;
}

From source file:org.apache.ctakes.ytex.kernel.KernelUtilImpl.java

/**
 * assign numeric indices to string class names
 * /*from w w w.  j a  v a 2s. co m*/
 * @param labelToClasMap
 * @param labelToClassIndexMap
 */
@Override
public void fillLabelToClassToIndexMap(Map<String, SortedSet<String>> labelToClasMap,
        Map<String, BiMap<String, Integer>> labelToClassIndexMap) {
    for (Map.Entry<String, SortedSet<String>> labelToClass : labelToClasMap.entrySet()) {
        BiMap<String, Integer> classToIndexMap = HashBiMap.create();
        labelToClassIndexMap.put(labelToClass.getKey(), classToIndexMap);
        int nIndex = 1;
        for (String className : labelToClass.getValue()) {
            Integer classNumber = null;
            try {
                classNumber = Integer.parseInt(className);
            } catch (NumberFormatException fe) {
            }
            if (classNumber == null) {
                classToIndexMap.put(className, nIndex++);
            } else {
                classToIndexMap.put(className, classNumber);
            }
        }
    }
}

From source file:de.ovgu.serpentchess.view.chessboard.PointCalculator.java

public BiMap<Point, Field> calculatePointMap(int viewWidth, int viewHeight) {
    // create a Field out of bounds
    int invalidFieldIdentifier = -1;

    BiMap<Point, Field> pointMap = HashBiMap.create();

    // offset to adjust height of points because of triangles on the upper
    // and lower border
    int offset = (int) ((double) viewHeight / 70);
    double fieldWidth = (double) viewWidth / 26;
    double fieldHeight = ((double) viewHeight - 2 * offset) / 13;

    MatrixToFieldConverter conv = new MatrixToFieldConverter();

    for (int j = 0; j < 15; j++) {
        int mod = j % 2;
        int startIndex = (mod == 0) ? 1 : 0;

        for (int i = startIndex; i < 27; i = i + 2) {
            int x = (int) Math.round(i * fieldWidth);
            int y = (int) Math.round((j - 0.5) * fieldHeight);
            Field f = conv.calculateField(i, j);
            if (f.getX() != 0) {
                pointMap.put(new Point(x, y + offset), f);
            } else {
                pointMap.put(new Point(x, y + offset), new Field(0, invalidFieldIdentifier--));
            }//from w ww .ja v  a2s .  c om
        }
    }
    return pointMap;
}

From source file:de.lemo.dms.processing.questions.QCourseUserPaths.java

@SuppressWarnings("unchecked")
@POST// w  ww  . j  a  v  a  2  s .c o m
public JSONObject compute(@FormParam(MetaParam.COURSE_IDS) final List<Long> courses,
        @FormParam(MetaParam.START_TIME) final Long startTime, @FormParam(MetaParam.END_TIME) Long endTime,
        @FormParam(MetaParam.GENDER) List<Long> gender) throws JSONException {

    validateTimestamps(startTime, endTime);

    final Stopwatch stopWatch = new Stopwatch();
    stopWatch.start();

    final IDBHandler dbHandler = ServerConfiguration.getInstance().getMiningDbHandler();
    final Session session = dbHandler.getMiningSession();

    Criteria criteria;
    List<Long> users = new ArrayList<Long>(StudentHelper.getCourseStudentsAliasKeys(courses, gender).values());

    criteria = session.createCriteria(ILogMining.class, "log").add(Restrictions.in("log.course.id", courses))
            .add(Restrictions.between("log.timestamp", startTime, endTime))
            .add(Restrictions.eq("log.action", "view"));
    if (!users.isEmpty()) {
        criteria.add(Restrictions.in("log.user.id", users));
    } else {
        this.logger.debug("No users found for courses. Returning empty JSONObject.");
        return new JSONObject();
    }

    final List<ILogMining> logs = criteria.list();

    this.logger.debug("Found " + users.size() + " actions. " + +stopWatch.elapsedTime(TimeUnit.SECONDS));

    long courseCount = 0;
    final BiMap<CourseMining, Long> courseNodePositions = HashBiMap.create();
    final Map<Long/* user id */, List<Long/* course id */>> userPaths = Maps.newHashMap();

    this.logger.debug("Paths fetched: " + logs.size() + ". " + stopWatch.elapsedTime(TimeUnit.SECONDS));

    Map<Long, Long> idToAlias = StudentHelper.getCourseStudentsRealKeys(courses, gender);

    for (final ILogMining log : logs) {

        final CourseMining course = log.getCourse();
        Long nodeID = courseNodePositions.get(course);
        if (nodeID == null) {
            nodeID = courseCount++;
            courseNodePositions.put(course, nodeID);
        }

        final long userId = idToAlias.get(log.getUser().getId());

        List<Long> nodeIDs = userPaths.get(userId);
        if (nodeIDs == null) {
            nodeIDs = Lists.newArrayList();
            userPaths.put(userId, nodeIDs);
        }
        nodeIDs.add(nodeID);
    }

    this.logger.debug("userPaths: " + userPaths.size());

    final Map<Long /* node id */, List<UserPathLink>> coursePaths = Maps.newHashMap();

    for (final Entry<Long, List<Long>> userEntry : userPaths.entrySet()) {

        UserPathLink lastLink = null;

        for (final Long nodeID : userEntry.getValue()) {
            List<UserPathLink> links = coursePaths.get(nodeID);
            if (links == null) {
                links = Lists.newArrayList();
                coursePaths.put(nodeID, links);
            }
            final UserPathLink link = new UserPathLink(String.valueOf(nodeID), "0");
            links.add(link);

            if (lastLink != null) {
                lastLink.setTarget(String.valueOf(nodeID));
            }
            lastLink = link;
        }
    }
    stopWatch.stop();
    this.logger.debug("coursePaths: " + coursePaths.size());
    this.logger.debug("Total Fetched log entries: " + (logs.size() + logs.size()) + " log entries."
            + stopWatch.elapsedTime(TimeUnit.SECONDS));

    final Set<UserPathLink> links = Sets.newHashSet();

    final JSONObject result = new JSONObject();
    final JSONArray nodes = new JSONArray();
    final JSONArray edges = new JSONArray();

    for (final Entry<Long, List<UserPathLink>> courseEntry : coursePaths.entrySet()) {
        final JSONObject node = new JSONObject();
        node.put("name", courseNodePositions.inverse().get(courseEntry.getKey()).getTitle());
        node.put("value", courseEntry.getValue().size());
        node.put("group", courses.contains(courseNodePositions.inverse().get(courseEntry.getKey())) ? 1 : 2);
        nodes.put(node);

        for (final UserPathLink edge : courseEntry.getValue()) {
            if (edge.getTarget() == edge.getSource()) {
                continue;
            }
            links.add(edge);
        }
    }

    for (final UserPathLink link : links) {
        final JSONObject edgeJSON = new JSONObject();
        edgeJSON.put("target", link.getTarget());
        edgeJSON.put("source", link.getSource());
        edges.put(edgeJSON);
    }

    this.logger.debug("Nodes: " + nodes.length() + ", Links: " + edges.length() + "   / time: "
            + stopWatch.elapsedTime(TimeUnit.SECONDS));

    result.put("nodes", nodes);
    result.put("links", edges);
    session.close();
    return result;
}

From source file:org.apache.hadoop.nfs.nfs3.IdUserGroup.java

/**
 * Get the whole list of users and groups and save them in the maps.
 *//*from  w  w  w  . j  a  v a 2 s  .co  m*/
private void updateMapInternal(BiMap<Integer, String> map, String name, String command, String regex)
        throws IOException {
    BufferedReader br = null;
    try {
        Process process = Runtime.getRuntime().exec(new String[] { "bash", "-c", command });
        br = new BufferedReader(new InputStreamReader(process.getInputStream()));
        String line = null;
        while ((line = br.readLine()) != null) {
            String[] nameId = line.split(regex);
            if ((nameId == null) || (nameId.length != 2)) {
                throw new IOException("Can't parse " + name + " list entry:" + line);
            }
            LOG.debug("add " + name + ":" + nameId[0] + " id:" + nameId[1]);
            map.put(Integer.valueOf(nameId[1]), nameId[0]);
        }
        LOG.info("Updated " + name + " map size:" + map.size());

    } catch (IOException e) {
        LOG.error("Can't update map " + name);
        throw e;
    } finally {
        if (br != null) {
            try {
                br.close();
            } catch (IOException e1) {
                LOG.error("Can't close BufferedReader of command result");
                e1.printStackTrace();
            }
        }
    }
}

From source file:eu.itesla_project.modules.topo.UniqueTopology.java

public void apply(Network network) {
    final VoltageLevel vl = network.getVoltageLevel(substationId);
    if (vl == null) {
        throw new AssertionError("Voltage level " + substationId + " not found");
    }// ww w .  jav a 2s . c o m

    // store old buses id
    List<Bus> oldBuses = new ArrayList<>();
    for (Bus bus : vl.getBusBreakerView().getBuses()) {
        oldBuses.add(bus);
    }

    Map<PossibleTopology.Equipment, String> eq2bus = new HashMap<>();
    for (PossibleTopology.Bus bus : buses) {
        String busId = getBusId(bus);
        vl.getBusBreakerView().newBus().setId(busId).add();

        for (PossibleTopology.Equipment eq : bus.getEquipments()) {
            eq2bus.put(eq, busId);
        }
    }

    // remove old switches
    vl.getBusBreakerView().removeAllSwitches();

    // map equipment of the topo history to terminal, this is tricky because of equipment connected at both sides
    // on the same substation
    BiMap<Terminal, PossibleTopology.Equipment> t2eq = HashBiMap.create();
    for (Bus bus : oldBuses) {

        final List<Terminal> terminals = new ArrayList<>();
        bus.visitConnectedOrConnectableEquipments(new TerminalTopologyVisitor() {
            @Override
            public void visitTerminal(Terminal t) {
                terminals.add(t);
            }
        });

        for (Terminal t : terminals) {
            for (int i = 0; i < 2; i++) {
                PossibleTopology.Equipment pt = new PossibleTopology.Equipment(t.getConnectable().getId(), i);
                if (!t2eq.containsValue(pt) && eq2bus.containsKey(pt)) {
                    t2eq.put(t, pt);
                    break;
                }
            }
        }

    }

    // move equipments to the new buses
    for (Map.Entry<Terminal, PossibleTopology.Equipment> e : t2eq.entrySet()) {
        Terminal t = e.getKey();
        PossibleTopology.Equipment eq = e.getValue();

        String newBusId = eq2bus.get(eq);
        if (newBusId == null) {
            throw new RuntimeException("Equipment " + eq + "not found in the history");
        }
        t.disconnect();
        t.getBusBreakerView().setConnectableBus(newBusId);
        t.connect();
        // reset state variables
        t.setP(Float.NaN).setQ(Float.NaN);
    }

    // remove old buses
    for (Bus oldBus : oldBuses) {
        vl.getBusBreakerView().removeBus(oldBus.getId());
    }

    // create new switches (open)
    for (UniqueTopology.Switch sw : switches) {
        vl.getBusBreakerView().newSwitch().setId(sw.getId()).setBus1(getBusId(sw.getBus1()))
                .setBus2(getBusId(sw.getBus2())).setOpen(true).add();
    }

    //        print();
    //        vl.printTopology();
}

From source file:de.uni_potsdam.hpi.asg.logictool.synthesis.helper.MonotonicCoverChecker.java

private boolean createDataStructure() { // check 1st & 2nd condition
    boolean retVal = true;
    for (Signal sig : checkSignals) {
        if (sig.getType() == SignalType.internal || sig.getType() == SignalType.output) {
            Regions regions = allregions.get(sig);
            BiMap<CFRegion, EspressoTerm> risingCubes = HashBiMap.create();
            BiMap<CFRegion, EspressoTerm> fallingCubes = HashBiMap.create();
            int id = 1;
            for (CFRegion reg : regions.getRisingRegions()) {
                int num = 0;
                for (Entry<EspressoTerm, EspressoValue> entry : table.getTable().columnMap()
                        .get(sig.getName() + "__set_" + id).entrySet()) {
                    if (entry.getValue() == EspressoValue.one) {
                        if (check1st(reg, entry.getKey())) {
                            if (check2nd(reg, entry.getKey())) {
                                risingCubes.put(reg, entry.getKey());
                                num++;/*from   ww  w . ja v  a 2 s  .c  om*/
                            }
                        }
                    }
                }
                if (num == 0) {
                    retVal = false;
                    logger.error("No matching cube found");
                } else if (num > 1) {
                    logger.warn("More than one matching cube found");
                }
                id++;
            }
            id = 1;
            for (CFRegion reg : regions.getFallingRegions()) {
                int num = 0;
                for (Entry<EspressoTerm, EspressoValue> entry : table.getTable().columnMap()
                        .get(sig.getName() + "__reset_" + id).entrySet()) {
                    if (entry.getValue() == EspressoValue.one) {
                        if (check1st(reg, entry.getKey())) {
                            if (check2nd(reg, entry.getKey())) {
                                fallingCubes.put(reg, entry.getKey());
                                num++;
                            }
                        }
                    }
                }
                if (num == 0) {
                    retVal = false;
                    logger.error("No matching cube found");
                } else if (num > 1) {
                    logger.warn("More than one matching cube found");
                }
                id++;
            }
            cubemap.put(sig, new Cubes(risingCubes, fallingCubes));
        }
    }
    return retVal;
}

From source file:org.apache.hadoop.hive.ql.parse.mr3.GenMR3Utils.java

public void removeUnionOperators(Configuration conf, GenMR3ProcContext context, BaseWork work)
        throws SemanticException {

    List<Operator<?>> roots = new ArrayList<Operator<?>>();
    roots.addAll(work.getAllRootOperators());
    if (work.getDummyOps() != null) {
        roots.addAll(work.getDummyOps());
    }/*from  w ww  . ja  va2s  . co m*/
    roots.addAll(context.eventOperatorSet);

    // need to clone the plan.
    List<Operator<?>> newRoots = Utilities.cloneOperatorTree(conf, roots);

    // we're cloning the operator plan but we're retaining the original work. That means
    // that root operators have to be replaced with the cloned ops. The replacement map
    // tells you what that mapping is.
    BiMap<Operator<?>, Operator<?>> replacementMap = HashBiMap.create();

    // there's some special handling for dummyOps required. Mapjoins won't be properly
    // initialized if their dummy parents aren't initialized. Since we cloned the plan
    // we need to replace the dummy operators in the work with the cloned ones.
    List<HashTableDummyOperator> dummyOps = new LinkedList<HashTableDummyOperator>();

    Iterator<Operator<?>> it = newRoots.iterator();
    for (Operator<?> orig : roots) {
        Operator<?> newRoot = it.next();

        replacementMap.put(orig, newRoot);

        if (newRoot instanceof HashTableDummyOperator) {
            // dummy ops need to be updated to the cloned ones.
            dummyOps.add((HashTableDummyOperator) newRoot);
            it.remove();
        } else if (newRoot instanceof AppMasterEventOperator) {
            // event operators point to table scan operators. When cloning these we
            // need to restore the original scan.
            if (newRoot.getConf() instanceof DynamicPruningEventDesc) {
                TableScanOperator ts = ((DynamicPruningEventDesc) orig.getConf()).getTableScan();
                if (ts == null) {
                    throw new AssertionError("No table scan associated with dynamic event pruning. " + orig);
                }
                ((DynamicPruningEventDesc) newRoot.getConf()).setTableScan(ts);
            }
            it.remove();
        } else {
            if (newRoot instanceof TableScanOperator) {
                if (context.tsToEventMap.containsKey(orig)) {
                    // we need to update event operators with the cloned table scan
                    for (AppMasterEventOperator event : context.tsToEventMap.get(orig)) {
                        ((DynamicPruningEventDesc) event.getConf()).setTableScan((TableScanOperator) newRoot);
                    }
                }
            }
            context.rootToWorkMap.remove(orig);
            context.rootToWorkMap.put(newRoot, work);
        }
    }

    // now we remove all the unions. we throw away any branch that's not reachable from
    // the current set of roots. The reason is that those branches will be handled in
    // different tasks.
    Deque<Operator<?>> operators = new LinkedList<Operator<?>>();
    operators.addAll(newRoots);

    Set<Operator<?>> seen = new HashSet<Operator<?>>();

    while (!operators.isEmpty()) {
        Operator<?> current = operators.pop();
        seen.add(current);

        if (current instanceof FileSinkOperator) {
            FileSinkOperator fileSink = (FileSinkOperator) current;

            // remember it for additional processing later
            context.fileSinkSet.add(fileSink);

            FileSinkDesc desc = fileSink.getConf();
            Path path = desc.getDirName();
            List<FileSinkDesc> linked;

            if (!context.linkedFileSinks.containsKey(path)) {
                linked = new ArrayList<FileSinkDesc>();
                context.linkedFileSinks.put(path, linked);
            }
            linked = context.linkedFileSinks.get(path);
            linked.add(desc);

            desc.setDirName(new Path(path, "" + linked.size()));
            desc.setLinkedFileSinkDesc(linked);
        }

        if (current instanceof AppMasterEventOperator) {
            // remember for additional processing later
            context.eventOperatorSet.add((AppMasterEventOperator) current);

            // mark the original as abandoned. Don't need it anymore.
            context.abandonedEventOperatorSet
                    .add((AppMasterEventOperator) replacementMap.inverse().get(current));
        }

        if (current instanceof UnionOperator) {
            Operator<?> parent = null;
            int count = 0;

            for (Operator<?> op : current.getParentOperators()) {
                if (seen.contains(op)) {
                    ++count;
                    parent = op;
                }
            }

            // we should have been able to reach the union from only one side.
            assert count <= 1;

            if (parent == null) {
                // root operator is union (can happen in reducers)
                replacementMap.put(current, current.getChildOperators().get(0));
            } else {
                parent.removeChildAndAdoptItsChildren(current);
            }
        }

        if (current instanceof FileSinkOperator || current instanceof ReduceSinkOperator) {
            current.setChildOperators(null);
        } else {
            operators.addAll(current.getChildOperators());
        }
    }
    work.setDummyOps(dummyOps);
    work.replaceRoots(replacementMap);
}

From source file:com.haulmont.restapi.config.RestJsonTransformations.java

protected void parseStandardTransformers(String currentEntityName, String oldEntityName, String version,
        Element transformationElem) {
    BiMap<String, String> renamedAttributesMap = HashBiMap.create();

    for (Element renameAttributeElem : Dom4j.elements(transformationElem, "renameAttribute")) {
        String oldAttributeName = renameAttributeElem.attributeValue("oldName");
        if (Strings.isNullOrEmpty(oldAttributeName)) {
            log.error("oldName attribute for renamed attribute is not defined");
            continue;
        }//  www.  j a  va2s .  c  o m

        String currentAttributeName = renameAttributeElem.attributeValue("currentName");
        if (Strings.isNullOrEmpty(currentAttributeName)) {
            log.error("currentName attribute for renamed attribute is not defined");
            continue;
        }
        renamedAttributesMap.put(oldAttributeName, currentAttributeName);
    }

    StandardEntityJsonTransformer fromVersionTransformer = restApiContext.getBean(
            StandardEntityJsonTransformer.class, oldEntityName, currentEntityName, version,
            JsonTransformationDirection.FROM_VERSION);
    fromVersionTransformer.setAttributesToRename(renamedAttributesMap);

    StandardEntityJsonTransformer toVersionTransformer = restApiContext.getBean(
            StandardEntityJsonTransformer.class, currentEntityName, oldEntityName, version,
            JsonTransformationDirection.TO_VERSION);
    toVersionTransformer.setAttributesToRename(renamedAttributesMap.inverse());

    Element fromVersionElem = transformationElem.element("fromVersion");
    if (fromVersionElem != null) {
        processOneDirectionConfig(fromVersionElem, fromVersionTransformer);
    }

    Element toVersionElem = transformationElem.element("toVersion");
    if (toVersionElem != null) {
        processOneDirectionConfig(toVersionElem, toVersionTransformer);
    }

    toVersionTransformers.put(currentEntityName, version, toVersionTransformer);
    fromVersionTransformers.put(oldEntityName, version, fromVersionTransformer);
}

From source file:net.librec.data.convertor.ArffDataConvertor.java

/**
 * Parse @DATA part of the file./*from  w  w  w . j  a v a 2s.c om*/
 *
 * @param rd  the reader of the input file.
 * @throws IOException
 */
private void dataReader(Reader rd) throws IOException {
    ArrayList<String> dataLine = new ArrayList<>();
    StringBuilder subString = new StringBuilder();
    boolean isInQuote = false;
    boolean isInBracket = false;

    int c = 0;
    while ((c = rd.read()) != -1) {
        char ch = (char) c;
        // read line by line
        if (ch == '\n') {
            if (dataLine.size() != 0) { // check if empty line
                if (!dataLine.get(0).startsWith("%")) { // check if
                    // annotation line
                    dataLine.add(subString.toString());
                    // raise error if inconsistent with attribute define
                    if (dataLine.size() != attrTypes.size()) {
                        throw new IOException("Read data error, inconsistent attribute number!");
                    }

                    // pul column value into columnIds, for one-hot encoding
                    for (int i = 0; i < dataLine.size(); i++) {
                        String col = dataLine.get(i).trim();
                        String type = attrTypes.get(i);
                        BiMap<String, Integer> colId = columnIds.get(i);
                        switch (type) {
                        case "NUMERIC":
                        case "REAL":
                        case "INTEGER":
                            break;
                        case "STRING":
                            int val = colId.containsKey(col) ? colId.get(col) : colId.size();
                            colId.put(col, val);
                            break;
                        case "NOMINAL":
                            StringBuilder sb = new StringBuilder();
                            String[] ss = col.split(",");
                            for (int ns = 0; ns < ss.length; ns++) {
                                String _s = ss[ns].trim();
                                if (!colId.containsKey(_s)) {
                                    throw new IOException("Read data error, inconsistent nominal value!");
                                }
                                sb.append(_s);
                                if (ns != ss.length - 1)
                                    sb.append(",");
                            }
                            col = sb.toString();
                            break;
                        }
                        dataLine.set(i, col);
                    }

                    instances.add(new ArffInstance(dataLine));

                    subString = new StringBuilder();
                    dataLine = new ArrayList<>();
                }
            }
        } else if (ch == '[' || ch == ']') {
            isInBracket = !isInBracket;
        } else if (ch == '\r') {
            // skip '\r'
        } else if (ch == '\"') {
            isInQuote = !isInQuote;
        } else if (ch == ',' && (!isInQuote && !isInBracket)) {
            dataLine.add(subString.toString());
            subString = new StringBuilder();
        } else {
            subString.append(ch);
        }
    }
}