Example usage for com.google.common.collect BiMap inverse

List of usage examples for com.google.common.collect BiMap inverse

Introduction

In this page you can find the example usage for com.google.common.collect BiMap inverse.

Prototype

BiMap<V, K> inverse();

Source Link

Document

Returns the inverse view of this bimap, which maps each of this bimap's values to its associated key.

Usage

From source file:to.lean.tools.gmail.importer.gmail.Mailbox.java

private void syncLabels(Gmail gmailApi, BiMap<String, String> labelIdToNameMap,
        Multimap<LocalMessage, Message> localMessageToGmailMessages) throws IOException {
    BatchRequest relabelBatch = gmailApi.batch();
    for (Map.Entry<LocalMessage, Message> entry : localMessageToGmailMessages.entries()) {
        LocalMessage localMessage = entry.getKey();
        Message gmailMessage = entry.getValue();

        Set<String> gmailLabels = gmailMessage.getLabelIds() == null ? ImmutableSet.of()
                : gmailMessage.getLabelIds().stream().map(labelIdToNameMap::get).collect(Collectors.toSet());

        List<String> missingLabelIds = localMessage.getFolders().stream()
                .map(folder -> folder.equalsIgnoreCase("Inbox") ? "INBOX" : folder)
                .filter(folder -> !gmailLabels.contains(folder))
                .map(folder -> labelIdToNameMap.inverse().get(folder)).collect(Collectors.toList());

        if (localMessage.isUnread() && !gmailLabels.contains("UNREAD")) {
            missingLabelIds.add("UNREAD");
        }//from   w  w  w.j  a  v  a 2 s. c  om
        if (localMessage.isStarred() && !gmailLabels.contains("STARRED")) {
            missingLabelIds.add("STARRED");
        }

        for (String folder : localMessage.getFolders()) {
            if (!gmailLabels.contains(folder)) {
                System.out.format("Trying to add labels %s to %s\n",
                        missingLabelIds.stream().map(labelIdToNameMap::get).collect(Collectors.joining(", ")),
                        gmailMessage.getId());
                gmailApi.users().messages()
                        .modify(user.getEmailAddress(), gmailMessage.getId(),
                                new ModifyMessageRequest().setAddLabelIds(missingLabelIds))
                        .queue(relabelBatch, new JsonBatchCallback<Message>() {
                            @Override
                            public void onFailure(GoogleJsonError e, HttpHeaders responseHeaders)
                                    throws IOException {
                                System.err.format("For label ids %s, got error: %s\n", missingLabelIds,
                                        e.toPrettyString());
                            }

                            @Override
                            public void onSuccess(Message message, HttpHeaders responseHeaders)
                                    throws IOException {
                                System.out.format(
                                        "Successfully added labels %s to %s\n", missingLabelIds.stream()
                                                .map(labelIdToNameMap::get).collect(Collectors.joining(", ")),
                                        message.getId());
                            }
                        });
            }
        }
        if (relabelBatch.size() > 0) {
            relabelBatch.execute();
        }
    }
}

From source file:com.opengamma.core.historicaltimeseries.impl.NonVersionedRedisHistoricalTimeSeriesSource.java

protected void updateTimeSeries(String redisKey, LocalDateDoubleTimeSeries timeseries) {
    try (Timer.Context context = _updateSeriesTimer.time()) {
        Jedis jedis = getJedisPool().getResource();
        try {/* www . ja  v  a 2 s.c  om*/
            Map<String, String> htsMap = Maps.newHashMap();
            BiMap<Double, String> dates = HashBiMap.create();
            for (Entry<LocalDate, Double> entry : timeseries) {
                String dateAsIntText = Integer.toString(LocalDateToIntConverter.convertToInt(entry.getKey()));
                htsMap.put(dateAsIntText, Double.toString(entry.getValue()));
                dates.put(Double.valueOf(dateAsIntText), dateAsIntText);
            }

            Pipeline pipeline = jedis.pipelined();
            pipeline.multi();
            String redisHtsDatapointKey = toRedisHtsDatapointKey(redisKey);
            pipeline.hmset(redisHtsDatapointKey, htsMap);

            String redisHtsDaysKey = toRedisHtsDaysKey(redisKey);
            for (String dateAsIntText : dates.inverse().keySet()) {
                pipeline.zrem(redisHtsDaysKey, dateAsIntText);
            }

            for (Entry<Double, String> entry : dates.entrySet()) {
                pipeline.zadd(redisHtsDaysKey, entry.getKey(), entry.getValue());
            }

            pipeline.exec();
            pipeline.sync();
            getJedisPool().returnResource(jedis);
        } catch (Exception e) {
            s_logger.error("Unable to put timeseries with id: " + redisKey, e);
            getJedisPool().returnBrokenResource(jedis);
            throw new OpenGammaRuntimeException("Unable to put timeseries with id: " + redisKey, e);
        }
    }
}

From source file:org.apache.hadoop.security.ShellBasedIdMapping.java

/**
 * Get the list of users or groups returned by the specified command,
 * and save them in the corresponding map.
 * @throws IOException //from  w  w  w. j  a  va 2  s. com
 */
@VisibleForTesting
public static boolean updateMapInternal(BiMap<Integer, String> map, String mapName, String command,
        String regex, Map<Integer, Integer> staticMapping) throws IOException {
    boolean updated = false;
    BufferedReader br = null;
    try {
        Process process = Runtime.getRuntime().exec(new String[] { "bash", "-c", command });
        br = new BufferedReader(new InputStreamReader(process.getInputStream(), Charset.defaultCharset()));
        String line = null;
        while ((line = br.readLine()) != null) {
            String[] nameId = line.split(regex);
            if ((nameId == null) || (nameId.length != 2)) {
                throw new IOException("Can't parse " + mapName + " list entry:" + line);
            }
            LOG.debug("add to " + mapName + "map:" + nameId[0] + " id:" + nameId[1]);
            // HDFS can't differentiate duplicate names with simple authentication
            final Integer key = staticMapping.get(parseId(nameId[1]));
            final String value = nameId[0];
            if (map.containsKey(key)) {
                final String prevValue = map.get(key);
                if (value.equals(prevValue)) {
                    // silently ignore equivalent entries
                    continue;
                }
                reportDuplicateEntry("Got multiple names associated with the same id: ", key, value, key,
                        prevValue);
                continue;
            }
            if (map.containsValue(value)) {
                final Integer prevKey = map.inverse().get(value);
                reportDuplicateEntry("Got multiple ids associated with the same name: ", key, value, prevKey,
                        value);
                continue;
            }
            map.put(key, value);
            updated = true;
        }
        LOG.debug("Updated " + mapName + " map size: " + map.size());

    } catch (IOException e) {
        LOG.error("Can't update " + mapName + " map");
        throw e;
    } finally {
        if (br != null) {
            try {
                br.close();
            } catch (IOException e1) {
                LOG.error("Can't close BufferedReader of command result", e1);
            }
        }
    }
    return updated;
}

From source file:org.eclipse.sirius.business.internal.contribution.RepresentationExtensionsFinder.java

/**
 * Returns the subset of the viewpoints currently selected in the session
 * which are relevant for the specified representation description. This
 * includes the representation description's parent Sirius, all the
 * Viewpoints it reuses, all the Viewpoints which extend any of those, and
 * recursively all the Viewpoints those reuses or are extended by.
 * /*www  . j  av a2s .co  m*/
 * @param session
 *            the session in which the representation description is used.
 * @return all the Viewpoints which are relevant for the computation of the
 *         effective representation description.
 */
public LinkedHashSet<Viewpoint> findAllRelevantViewpoints(Session session) {
    RepresentationDescription mainRepresentationDescription = extensionTarget;
    LinkedHashSet<Viewpoint> result = Sets.newLinkedHashSet();
    Viewpoint mainVP = new RepresentationDescriptionQuery(mainRepresentationDescription).getParentViewpoint();
    if (mainVP != null) {
        BiMap<URI, Viewpoint> candidates = HashBiMap.create();
        for (Viewpoint vp : session.getSelectedViewpoints(false)) {
            Option<URI> uri = new ViewpointQuery(vp).getViewpointURI();
            if (uri.some()) {
                candidates.put(uri.get(), vp);
            }
        }
        ViewpointRelations relations = ((org.eclipse.sirius.business.internal.movida.registry.ViewpointRegistry) ViewpointRegistry
                .getInstance()).getRelations();
        /*
         * Seed the result with the representation's parent Sirius and
         * augment it with the viewpoints we reuse or are extended by until
         * we reach a fixpoint. Guaranteed to terminate as the set of
         * Viewpoints we can add is finite (at the most, all the selected
         * Viewpoints).
         */
        boolean changed = result.add(mainVP);
        while (changed) {
            changed = false;
            // Add all the Viewpoints we reuse.
            for (Viewpoint v1 : Lists.newArrayList(result)) {
                URI uri = candidates.inverse().get(v1);
                changed = changed || Iterables.addAll(result,
                        Iterables.transform(relations.getReuse().apply(uri), Functions.forMap(candidates)));
            }
            // Add all the Viewpoints which extend any of us.
            for (Viewpoint v : session.getSelectedViewpoints(false)) {
                URI extenderUri = candidates.inverse().get(v);
                for (URI extendeeUri : relations.getCustomize().apply(extenderUri)) {
                    if (result.contains(candidates.get(extendeeUri))) {
                        changed = changed || result.add(v);
                    }
                }
            }
        }
    }
    return result;
}

From source file:de.hzi.helmholtz.Compare.PathwayComparisonUsingModules.java

public String reconstructWithGeneId(String positionIdStr, BiMap<String, Integer> newGeneIdToPositionMap) {
    String geneIdStr = "";
    String[] positions = positionIdStr.split("\\+");
    for (String position : positions) {
        int pos = Integer.parseInt(position.trim());
        geneIdStr += newGeneIdToPositionMap.inverse().get(pos) + "+";
    }//from ww  w  .  j  a  v  a 2s  .c om
    geneIdStr = geneIdStr.substring(0, geneIdStr.length() - 1);
    return geneIdStr;
}

From source file:org.artifactory.storage.db.build.service.BuildStoreServiceImpl.java

/**
 * Locates and fills in missing checksums of a build file bean
 *
 * @param buildFiles List of build files to populate
 *//*from  ww w .  j  a  va  2s  .  c  om*/
private void handleBeanPopulation(List<? extends BuildFileBean> buildFiles) {
    if (buildFiles != null && !buildFiles.isEmpty()) {
        Set<String> checksums = Sets.newHashSet();
        for (BuildFileBean buildFile : buildFiles) {
            boolean sha1Exists = StringUtils.isNotBlank(buildFile.getSha1());
            boolean md5Exists = StringUtils.isNotBlank(buildFile.getMd5());

            //If the bean has both or none of the checksums, return
            if ((sha1Exists && md5Exists) || ((!sha1Exists && !md5Exists))) {
                continue;
            }

            if (!sha1Exists) {
                checksums.add(buildFile.getMd5());
            } else {
                checksums.add(buildFile.getSha1());
            }
        }
        Set<BinaryInfo> binaryInfos = binaryStore.findBinaries(checksums);
        BiMap<String, String> found = HashBiMap.create(binaryInfos.size());
        for (BinaryInfo binaryInfo : binaryInfos) {
            found.put(binaryInfo.getSha1(), binaryInfo.getMd5());
        }
        for (BuildFileBean buildFile : buildFiles) {
            boolean sha1Exists = StringUtils.isNotBlank(buildFile.getSha1());
            boolean md5Exists = StringUtils.isNotBlank(buildFile.getMd5());

            //If the bean has both or none of the checksums, return
            if ((sha1Exists && md5Exists) || ((!sha1Exists && !md5Exists))) {
                continue;
            }

            if (!sha1Exists) {
                String newSha1 = found.inverse().get(buildFile.getMd5());
                if (ChecksumType.sha1.isValid(newSha1)) {
                    buildFile.setSha1(newSha1);
                }
            } else {
                String newMd5 = found.get(buildFile.getSha1());
                if (ChecksumType.md5.isValid(newMd5)) {
                    buildFile.setMd5(newMd5);
                }
            }
        }
    }
}

From source file:com.mycelium.wapi.wallet.bip44.Bip44Account.java

protected void ensureAddressIndexes(boolean isChangeChain, boolean full_look_ahead) {
    int index;//  w  ww.j a  va2  s.c  o  m
    BiMap<Address, Integer> addressMap;
    if (isChangeChain) {
        index = _context.getLastInternalIndexWithActivity();
        if (full_look_ahead) {
            index += INTERNAL_FULL_ADDRESS_LOOK_AHEAD_LENGTH;
        } else {
            index += INTERNAL_MINIMAL_ADDRESS_LOOK_AHEAD_LENGTH;
        }
        addressMap = _internalAddresses;
    } else {
        index = _context.getLastExternalIndexWithActivity();
        if (full_look_ahead) {
            index += EXTERNAL_FULL_ADDRESS_LOOK_AHEAD_LENGTH;
        } else {
            index += EXTERNAL_MINIMAL_ADDRESS_LOOK_AHEAD_LENGTH;
        }
        addressMap = _externalAddresses;
    }
    while (index >= 0) {
        if (addressMap.inverse().containsKey(index)) {
            return;
        }
        addressMap.put(Preconditions.checkNotNull(_keyManager.getAddress(isChangeChain, index)), index);
        index--;
    }
}

From source file:org.bimserver.ifc.IfcModel.java

@SuppressWarnings("rawtypes")
private void checkDoubleOidsPlusReferences(BiMap<IdEObject, Long> done, IdEObject idEObject) {
    if (idEObject == null) {
        return;/*  w  w w  . java2s  .c om*/
    }
    if (idEObject.eClass().getEAnnotation("wrapped") != null) {
        return;
    }
    if (done.containsKey(idEObject)) {
        return;
    }
    if (done.containsValue(idEObject.getOid())) {
        showBackReferences(idEObject);
        IdEObject existing = done.inverse().get(idEObject.getOid());
        showBackReferences(existing);
        throw new RuntimeException("Double oid: " + idEObject.getOid() + " " + idEObject + ", " + existing);
    }
    done.put(idEObject, idEObject.getOid());
    for (EReference eReference : idEObject.eClass().getEAllReferences()) {
        if (eReference.isMany()) {
            List list = (List) idEObject.eGet(eReference);
            for (Object o : list) {
                checkDoubleOidsPlusReferences(done, (IdEObject) o);
            }
        } else {
            checkDoubleOidsPlusReferences(done, (IdEObject) idEObject.eGet(eReference));
        }
    }
}

From source file:com.google.cloud.genomics.mapreduce.PcaServlet.java

@Override
public void doPost(HttpServletRequest req, HttpServletResponse resp) throws IOException {
    String bucket = req.getParameter("bucket");
    String filename = req.getParameter("filename");

    GcsService gcsService = GcsServiceFactory.createGcsService(RetryParams.getDefaultInstance());

    // TODO: Use a prefetching read channel.
    // This is currently failing with 'invalid stream header'
    //  GcsInputChannel readChannel = gcsService.openPrefetchingReadChannel(
    //      new GcsFilename(bucket, filename), 0, 1024 * 1024);

    BiMap<String, Integer> callsetIndicies = HashBiMap.create();
    Map<Pair<Integer, Integer>, Integer> callsetData = Maps.newHashMap();

    // TODO: This gcs file can't be read when deployed locally
    GcsFilename fileName = new GcsFilename(bucket, filename);
    int fileSize = (int) gcsService.getMetadata(fileName).getLength();

    ByteBuffer result = ByteBuffer.allocate(fileSize);
    GcsInputChannel readChannel = gcsService.openReadChannel(fileName, 0);
    readChannel.read(result);/*w  w  w . j  ava  2  s  .  c om*/
    readChannel.close();

    // Parse file
    String file = new String(result.array());
    for (String line : file.split(":")) {
        String[] data = line.split("-");
        int callset1 = getCallsetIndex(callsetIndicies, data[0]);
        int callset2 = getCallsetIndex(callsetIndicies, data[1]);
        Integer similarity = Integer.valueOf(data[2]);
        callsetData.put(Pair.of(callset1, callset2), similarity);
    }

    // Create matrix data
    int callsetCount = callsetIndicies.size();
    double[][] matrixData = new double[callsetCount][callsetCount];
    for (Map.Entry<Pair<Integer, Integer>, Integer> entry : callsetData.entrySet()) {
        matrixData[entry.getKey().getFirst()][entry.getKey().getSecond()] = entry.getValue();
    }

    writePcaData(matrixData, callsetIndicies.inverse(), resp.getWriter());
}

From source file:org.broadleafcommerce.core.search.dao.SolrIndexDaoImpl.java

@Override
public void populateProductCatalogStructure(List<Long> productIds, CatalogStructure catalogStructure) {
    BroadleafRequestContext context = BroadleafRequestContext.getBroadleafRequestContext();
    Boolean oldIgnoreFilters = context.getInternalIgnoreFilters();
    context.setInternalIgnoreFilters(false);
    try {//  w  ww. j  a v a2  s.  c  om
        Map<Long, Set<Long>> parentCategoriesByProduct = new HashMap<Long, Set<Long>>();
        Map<Long, Set<Long>> parentCategoriesByCategory = new HashMap<Long, Set<Long>>();

        Long[] products = productIds.toArray(new Long[productIds.size()]);
        BiMap<Long, Long> sandBoxProductToOriginalMap = sandBoxHelper.getSandBoxToOriginalMap(ProductImpl.class,
                products);
        int batchSize = 800;
        int count = 0;
        int pos = 0;
        while (pos < products.length) {
            int remaining = products.length - pos;
            int mySize = remaining > batchSize ? batchSize : remaining;
            Long[] temp = new Long[mySize];
            System.arraycopy(products, pos, temp, 0, mySize);

            //context.getAdditionalProperties().put("constrainedFilterGroups", Arrays.asList("archivedFilter"));
            TypedQuery<ParentCategoryByProduct> query = em
                    .createNamedQuery("BC_READ_PARENT_CATEGORY_IDS_BY_PRODUCTS", ParentCategoryByProduct.class);
            query.setParameter("productIds", sandBoxHelper.mergeCloneIds(ProductImpl.class, temp));

            List<ParentCategoryByProduct> results = query.getResultList();
            //context.getAdditionalProperties().remove("constrainedFilterGroups");
            for (ParentCategoryByProduct item : results) {
                Long sandBoxProductVal = item.getProduct();
                BiMap<Long, Long> reverse = sandBoxProductToOriginalMap.inverse();
                if (reverse.containsKey(sandBoxProductVal)) {
                    sandBoxProductVal = reverse.get(sandBoxProductVal);
                }
                if (!catalogStructure.getParentCategoriesByProduct().containsKey(sandBoxProductVal)) {
                    if (!parentCategoriesByProduct.containsKey(sandBoxProductVal)) {
                        parentCategoriesByProduct.put(sandBoxProductVal, new HashSet<Long>());
                    }
                    //We only want the sandbox parent - if applicable
                    //Long sandBoxVal = sandBoxHelper.getCombinedSandBoxVersionId(CategoryImpl.class, item.getParent());
                    Long sandBoxVal = sandBoxHelper.getSandBoxVersionId(CategoryImpl.class, item.getParent());
                    if (sandBoxVal == null) {
                        sandBoxVal = item.getParent();
                    }
                    parentCategoriesByProduct.get(sandBoxProductVal).add(sandBoxVal);
                }
            }
            for (Map.Entry<Long, Set<Long>> entry : parentCategoriesByProduct.entrySet()) {
                for (Long categoryId : entry.getValue()) {
                    if (!catalogStructure.getParentCategoriesByCategory().containsKey(categoryId)) {
                        Set<Long> hierarchy = new HashSet<Long>();
                        parentCategoriesByCategory.put(categoryId, hierarchy);
                    }
                    if (!catalogStructure.getProductsByCategory().containsKey(categoryId)) {
                        List<ProductsByCategoryWithOrder> categoryChildren = readProductIdsByCategory(
                                categoryId);

                        // Cache the display order bigdecimals
                        BigDecimal displayOrder = new BigDecimal("1.00000");
                        for (ProductsByCategoryWithOrder child : categoryChildren) {
                            catalogStructure.getDisplayOrdersByCategoryProduct().put(
                                    categoryId + "-" + child.getProductId(),
                                    child.getDisplayOrder() == null ? displayOrder : child.getDisplayOrder());
                            if (child.getDisplayOrder() != null) {
                                displayOrder = child.displayOrder;
                            }
                            displayOrder = displayOrder.add(new BigDecimal("1.00000"));
                        }

                        //filter the list for sandbox values
                        for (Map.Entry<Long, Long> sandBoxProduct : sandBoxProductToOriginalMap.entrySet()) {
                            for (ProductsByCategoryWithOrder child : categoryChildren) {
                                if (child.getProductId().equals(sandBoxProduct.getValue())) {
                                    child.setProductId(sandBoxProduct.getKey());
                                }
                            }
                        }

                        List<Long> categoryChildProductIds = BLCCollectionUtils.collectList(categoryChildren,
                                new TypedTransformer<Long>() {
                                    @Override
                                    public Long transform(Object input) {
                                        return ((ProductsByCategoryWithOrder) input).getProductId();
                                    }
                                });
                        catalogStructure.getProductsByCategory().put(categoryId, categoryChildProductIds);
                    }
                }
            }
            count++;
            pos = (count * batchSize) < products.length ? (count * batchSize) : products.length;
        }
        readFullCategoryHierarchy(parentCategoriesByCategory, new HashSet<Long>());
        catalogStructure.getParentCategoriesByProduct().putAll(parentCategoriesByProduct);
        catalogStructure.getParentCategoriesByCategory().putAll(parentCategoriesByCategory);
    } finally {
        context.setInternalIgnoreFilters(oldIgnoreFilters);
    }
}