Example usage for com.google.common.collect HashMultimap create

List of usage examples for com.google.common.collect HashMultimap create

Introduction

In this page you can find the example usage for com.google.common.collect HashMultimap create.

Prototype

public static <K, V> HashMultimap<K, V> create() 

Source Link

Document

Creates a new, empty HashMultimap with the default initial capacities.

Usage

From source file:org.hupo.psi.mi.example.xml.CreateEntryPerPublication.java

public static void main(String[] args) throws Exception {
    // will read this inputFile
    final PsimiXmlVersion xmlVersion = PsimiXmlVersion.VERSION_254;
    final File inputFile = new File("d:/Downloads/imex-mpidb.xml");
    final File outputFile = new File("d:/Downloads/lala.xml");

    // action!/*  w  w w .j  ava2  s  .  com*/

    // We will use a multimap (from the google collections library) to store
    // the interactions grouped by publication id
    Multimap<String, Interaction> publicationMap = HashMultimap.create();

    // Read the file
    PsimiXmlReader reader = new PsimiXmlReader(xmlVersion);
    EntrySet entrySet = reader.read(inputFile);

    // Iterate through the entries
    for (Entry entry : entrySet.getEntries()) {
        for (Interaction interaction : entry.getInteractions()) {
            String publicationId = findPublicationId(interaction);
            publicationMap.put(publicationId, interaction);
        }
    }

    // now create an Entry per interaction
    EntrySet newEntrySet = new EntrySet(xmlVersion);

    // get first source from the original inputFile
    Source source = entrySet.getEntries().iterator().next().getSource();

    // iterating through the multimap, we get the grouped interactions
    for (Map.Entry<String, Collection<Interaction>> pubInteractions : publicationMap.asMap().entrySet()) {
        Entry entry = new Entry(pubInteractions.getValue());
        entry.setSource(source);

        newEntrySet.getEntries().add(entry);
    }

    // write the output file
    PsimiXmlWriter psimiXmlWriter = new PsimiXmlWriter(xmlVersion);
    psimiXmlWriter.write(newEntrySet, outputFile);
}

From source file:org.apache.ctakes.temporal.data.analysis.PrintInconsistentAnnotations.java

public static void main(String[] args) throws Exception {
    Options options = CliFactory.parseArguments(Options.class, args);
    int windowSize = 50;

    List<Integer> patientSets = options.getPatients().getList();
    List<Integer> trainItems = THYMEData.getPatientSets(patientSets, THYMEData.TRAIN_REMAINDERS);
    List<File> files = THYMEData.getFilesFor(trainItems, options.getRawTextDirectory());

    CollectionReader reader = UriCollectionReader.getCollectionReaderFromFiles(files);
    AggregateBuilder aggregateBuilder = new AggregateBuilder();
    aggregateBuilder.add(UriToDocumentTextAnnotator.getDescription());
    aggregateBuilder.add(AnalysisEngineFactory.createEngineDescription(XMIReader.class,
            XMIReader.PARAM_XMI_DIRECTORY, options.getXMIDirectory()));

    int totalDocTimeRels = 0;
    int totalInconsistentDocTimeRels = 0;
    for (Iterator<JCas> casIter = new JCasIterator(reader, aggregateBuilder.createAggregate()); casIter
            .hasNext();) {//from   w w w.  j  a  va2s.c  om
        JCas jCas = casIter.next();
        String text = jCas.getDocumentText();
        JCas goldView = jCas.getView("GoldView");

        // group events by their narrative container
        Multimap<Annotation, EventMention> containers = HashMultimap.create();
        for (TemporalTextRelation relation : JCasUtil.select(goldView, TemporalTextRelation.class)) {
            if (relation.getCategory().equals("CONTAINS")) {
                Annotation arg1 = relation.getArg1().getArgument();
                Annotation arg2 = relation.getArg2().getArgument();
                if (arg2 instanceof EventMention) {
                    EventMention event = (EventMention) arg2;
                    containers.put(arg1, event);
                }
            }
        }

        // check each container for inconsistent DocTimeRels
        for (Annotation container : containers.keySet()) {
            Set<String> docTimeRels = Sets.newHashSet();
            for (EventMention event : containers.get(container)) {
                docTimeRels.add(event.getEvent().getProperties().getDocTimeRel());
            }
            totalDocTimeRels += docTimeRels.size();

            boolean inconsistentDocTimeRels;
            if (container instanceof EventMention) {
                EventMention mention = ((EventMention) container);
                String containerDocTimeRel = mention.getEvent().getProperties().getDocTimeRel();
                inconsistentDocTimeRels = false;
                for (String docTimeRel : docTimeRels) {
                    if (docTimeRel.equals(containerDocTimeRel)) {
                        continue;
                    }
                    if (containerDocTimeRel.equals("BEFORE/OVERLAP")
                            && (docTimeRel.equals("BEFORE") || docTimeRel.equals("OVERLAP"))) {
                        continue;
                    }
                    inconsistentDocTimeRels = true;
                    break;
                }
            } else {
                if (docTimeRels.size() == 1) {
                    inconsistentDocTimeRels = false;
                } else if (docTimeRels.contains("BEFORE/OVERLAP")) {
                    inconsistentDocTimeRels = docTimeRels.size() == 1
                            && (docTimeRels.contains("BEFORE") || docTimeRels.contains("OVERLAP"));
                } else {
                    inconsistentDocTimeRels = true;
                }
            }

            // if inconsistent: print events, DocTimeRels and surrounding context
            if (inconsistentDocTimeRels) {
                totalInconsistentDocTimeRels += docTimeRels.size();

                List<Integer> offsets = Lists.newArrayList();
                offsets.add(container.getBegin());
                offsets.add(container.getEnd());
                for (EventMention event : containers.get(container)) {
                    offsets.add(event.getBegin());
                    offsets.add(event.getEnd());
                }
                Collections.sort(offsets);
                int begin = Math.max(offsets.get(0) - windowSize, 0);
                int end = Math.min(offsets.get(offsets.size() - 1) + windowSize, text.length());
                System.err.printf("Inconsistent DocTimeRels in %s, ...%s...\n",
                        new File(ViewUriUtil.getURI(jCas)).getName(),
                        text.substring(begin, end).replaceAll("([\r\n])[\r\n]+", "$1"));
                if (container instanceof EventMention) {
                    System.err.printf("Container: \"%s\" (docTimeRel=%s)\n", container.getCoveredText(),
                            ((EventMention) container).getEvent().getProperties().getDocTimeRel());
                } else {
                    System.err.printf("Container: \"%s\"\n", container.getCoveredText());
                }
                Ordering<EventMention> byBegin = Ordering.natural()
                        .onResultOf(new Function<EventMention, Integer>() {
                            @Override
                            public Integer apply(@Nullable EventMention event) {
                                return event.getBegin();
                            }
                        });
                for (EventMention event : byBegin.sortedCopy(containers.get(container))) {
                    System.err.printf("* \"%s\" (docTimeRel=%s)\n", event.getCoveredText(),
                            event.getEvent().getProperties().getDocTimeRel());
                }
                System.err.println();
            }
        }
    }

    System.err.printf("Inconsistent DocTimeRels: %.1f%% (%d/%d)\n",
            100.0 * totalInconsistentDocTimeRels / totalDocTimeRels, totalInconsistentDocTimeRels,
            totalDocTimeRels);
}

From source file:it.units.malelab.ege.MappingPropertiesExperimenter.java

public static void main(String[] args) throws IOException, InterruptedException, ExecutionException {
    final int n = 10000;
    final int nDist = 10000;
    //prepare problems and methods
    List<String> problems = Lists.newArrayList("bool-parity5", "bool-mopm3", "sr-keijzer6", "sr-nguyen7",
            "sr-pagie1", "sr-vladislavleva4", "other-klandscapes3", "other-klandscapes7", "other-text");
    List<String> mappers = new ArrayList<>();
    for (int gs : new int[] { 64, 128, 256, 512, 1024 }) {
        mappers.add("ge-" + gs + "-2");
        mappers.add("ge-" + gs + "-4");
        mappers.add("ge-" + gs + "-8");
        mappers.add("ge-" + gs + "-12");
        mappers.add("pige-" + gs + "-4");
        mappers.add("pige-" + gs + "-8");
        mappers.add("pige-" + gs + "-16");
        mappers.add("pige-" + gs + "-24");
        mappers.add("hge-" + gs + "-0");
        mappers.add("whge-" + gs + "-2");
        mappers.add("whge-" + gs + "-3");
        mappers.add("whge-" + gs + "-5");
    }//from   w w  w . j  a  v  a  2 s  . com
    mappers.add("sge-0-5");
    mappers.add("sge-0-6");
    mappers.add("sge-0-7");
    mappers.add("sge-0-8");
    mappers.clear();
    mappers.addAll(Lists.newArrayList("ge-1024-8", "pige-1024-16", "hge-1024-0", "whge-1024-3", "sge-0-6"));
    PrintStream filePrintStream = null;
    if (args.length > 0) {
        filePrintStream = new PrintStream(args[0]);
    } else {
        filePrintStream = System.out;
    }
    filePrintStream.printf("problem;mapper;genotypeSize;param;property;value%n");
    //prepare distances
    Distance<Node<String>> phenotypeDistance = new CachedDistance<>(new LeavesEdit<String>());
    Distance<Sequence> genotypeDistance = new CachedDistance<>(new Hamming());
    //iterate
    for (String problemName : problems) {
        for (String mapperName : mappers) {
            System.out.printf("%20.20s, %20.20s", problemName, mapperName);
            //build problem
            Problem<String, NumericFitness> problem = null;
            if (problemName.equals("bool-parity5")) {
                problem = new Parity(5);
            } else if (problemName.equals("bool-mopm3")) {
                problem = new MultipleOutputParallelMultiplier(3);
            } else if (problemName.equals("sr-keijzer6")) {
                problem = new HarmonicCurve();
            } else if (problemName.equals("sr-nguyen7")) {
                problem = new Nguyen7(1);
            } else if (problemName.equals("sr-pagie1")) {
                problem = new Pagie1();
            } else if (problemName.equals("sr-vladislavleva4")) {
                problem = new Vladislavleva4(1);
            } else if (problemName.equals("other-klandscapes3")) {
                problem = new KLandscapes(3);
            } else if (problemName.equals("other-klandscapes7")) {
                problem = new KLandscapes(7);
            } else if (problemName.equals("other-text")) {
                problem = new Text();
            }
            //build configuration and evolver
            Mapper mapper = null;
            int genotypeSize = Integer.parseInt(mapperName.split("-")[1]);
            int mapperMainParam = Integer.parseInt(mapperName.split("-")[2]);
            if (mapperName.split("-")[0].equals("ge")) {
                mapper = new StandardGEMapper<>(mapperMainParam, 1, problem.getGrammar());
            } else if (mapperName.split("-")[0].equals("pige")) {
                mapper = new PiGEMapper<>(mapperMainParam, 1, problem.getGrammar());
            } else if (mapperName.split("-")[0].equals("sge")) {
                mapper = new SGEMapper<>(mapperMainParam, problem.getGrammar());
            } else if (mapperName.split("-")[0].equals("hge")) {
                mapper = new HierarchicalMapper<>(problem.getGrammar());
            } else if (mapperName.split("-")[0].equals("whge")) {
                mapper = new WeightedHierarchicalMapper<>(mapperMainParam, false, true, problem.getGrammar());
            }
            //prepare things
            Random random = new Random(1);
            Set<Sequence> genotypes = new LinkedHashSet<>(n);
            //build genotypes
            if (mapperName.split("-")[0].equals("sge")) {
                SGEGenotypeFactory<String> factory = new SGEGenotypeFactory<>((SGEMapper) mapper);
                while (genotypes.size() < n) {
                    genotypes.add(factory.build(random));
                }
                genotypeSize = factory.getBitSize();
            } else {
                BitsGenotypeFactory factory = new BitsGenotypeFactory(genotypeSize);
                while (genotypes.size() < n) {
                    genotypes.add(factory.build(random));
                }
            }
            //build and fill map
            Multimap<Node<String>, Sequence> multimap = HashMultimap.create();
            int progress = 0;
            for (Sequence genotype : genotypes) {
                Node<String> phenotype;
                try {
                    if (mapperName.split("-")[0].equals("sge")) {
                        phenotype = mapper.map((SGEGenotype<String>) genotype, new HashMap<>());
                    } else {
                        phenotype = mapper.map((BitsGenotype) genotype, new HashMap<>());
                    }
                } catch (MappingException e) {
                    phenotype = Node.EMPTY_TREE;
                }
                multimap.put(phenotype, genotype);
                progress = progress + 1;
                if (progress % Math.round(n / 10) == 0) {
                    System.out.print(".");
                }
            }
            System.out.println();
            //compute distances
            List<Pair<Double, Double>> allDistances = new ArrayList<>();
            List<Pair<Double, Double>> allValidDistances = new ArrayList<>();
            Multimap<Node<String>, Double> genotypeDistances = ArrayListMultimap.create();
            for (Node<String> phenotype : multimap.keySet()) {
                for (Sequence genotype1 : multimap.get(phenotype)) {
                    for (Sequence genotype2 : multimap.get(phenotype)) {
                        double gDistance = genotypeDistance.d(genotype1, genotype2);
                        genotypeDistances.put(phenotype, gDistance);
                        if (genotypeDistances.get(phenotype).size() > nDist) {
                            break;
                        }
                    }
                    if (genotypeDistances.get(phenotype).size() > nDist) {
                        break;
                    }
                }
            }
            List<Map.Entry<Node<String>, Sequence>> entries = new ArrayList<>(multimap.entries());
            Collections.shuffle(entries, random);
            for (Map.Entry<Node<String>, Sequence> entry1 : entries) {
                for (Map.Entry<Node<String>, Sequence> entry2 : entries) {
                    double gDistance = genotypeDistance.d(entry1.getValue(), entry2.getValue());
                    double pDistance = phenotypeDistance.d(entry1.getKey(), entry2.getKey());
                    allDistances.add(new Pair<>(gDistance, pDistance));
                    if (!Node.EMPTY_TREE.equals(entry1.getKey()) && !Node.EMPTY_TREE.equals(entry2.getKey())) {
                        allValidDistances.add(new Pair<>(gDistance, pDistance));
                    }
                    if (allDistances.size() > nDist) {
                        break;
                    }
                }
                if (allDistances.size() > nDist) {
                    break;
                }
            }
            //compute properties
            double invalidity = (double) multimap.get(Node.EMPTY_TREE).size() / (double) genotypes.size();
            double redundancy = 1 - (double) multimap.keySet().size() / (double) genotypes.size();
            double validRedundancy = redundancy;
            if (multimap.keySet().contains(Node.EMPTY_TREE)) {
                validRedundancy = 1 - ((double) multimap.keySet().size() - 1d)
                        / (double) (genotypes.size() - multimap.get(Node.EMPTY_TREE).size());
            }
            double locality = Utils.pearsonCorrelation(allDistances);
            double validLocality = Utils.pearsonCorrelation(allValidDistances);
            double[] sizes = new double[multimap.keySet().size()];
            double[] meanGenotypeDistances = new double[multimap.keySet().size()];
            int invalidIndex = -1;
            int c = 0;
            for (Node<String> phenotype : multimap.keySet()) {
                if (Node.EMPTY_TREE.equals(phenotype)) {
                    invalidIndex = c;
                }
                sizes[c] = multimap.get(phenotype).size();
                double[] distances = new double[genotypeDistances.get(phenotype).size()];
                int k = 0;
                for (Double distance : genotypeDistances.get(phenotype)) {
                    distances[k] = distance;
                    k = k + 1;
                }
                meanGenotypeDistances[c] = StatUtils.mean(distances);
                c = c + 1;
            }
            double nonUniformity = Math.sqrt(StatUtils.variance(sizes)) / StatUtils.mean(sizes);
            double nonSynonymousity = StatUtils.mean(meanGenotypeDistances)
                    / StatUtils.mean(firsts(allDistances));
            double validNonUniformity = nonUniformity;
            double validNonSynonymousity = nonSynonymousity;
            if (invalidIndex != -1) {
                double[] validSizes = new double[multimap.keySet().size() - 1];
                double[] validMeanGenotypeDistances = new double[multimap.keySet().size() - 1];
                if (invalidIndex > 0) {
                    System.arraycopy(sizes, 0, validSizes, 0, invalidIndex);
                    System.arraycopy(meanGenotypeDistances, 0, validMeanGenotypeDistances, 0, invalidIndex);
                }
                System.arraycopy(sizes, invalidIndex + 1, validSizes, invalidIndex,
                        sizes.length - invalidIndex - 1);
                System.arraycopy(meanGenotypeDistances, invalidIndex + 1, validMeanGenotypeDistances,
                        invalidIndex, meanGenotypeDistances.length - invalidIndex - 1);
                validNonUniformity = Math.sqrt(StatUtils.variance(validSizes)) / StatUtils.mean(validSizes);
                validNonSynonymousity = StatUtils.mean(validMeanGenotypeDistances)
                        / StatUtils.mean(firsts(allValidDistances));
            }
            //compute locality
            filePrintStream.printf("%s;%s;%d;%d;invalidity;%f %n", problemName, mapperName.split("-")[0],
                    genotypeSize, mapperMainParam, invalidity);
            filePrintStream.printf("%s;%s;%d;%d;redundancy;%f %n", problemName, mapperName.split("-")[0],
                    genotypeSize, mapperMainParam, redundancy);
            filePrintStream.printf("%s;%s;%d;%d;validRedundancy;%f %n", problemName, mapperName.split("-")[0],
                    genotypeSize, mapperMainParam, validRedundancy);
            filePrintStream.printf("%s;%s;%d;%d;locality;%f %n", problemName, mapperName.split("-")[0],
                    genotypeSize, mapperMainParam, locality);
            filePrintStream.printf("%s;%s;%d;%d;validLLocality;%f %n", problemName, mapperName.split("-")[0],
                    genotypeSize, mapperMainParam, validLocality);
            filePrintStream.printf("%s;%s;%d;%d;nonUniformity;%f %n", problemName, mapperName.split("-")[0],
                    genotypeSize, mapperMainParam, nonUniformity);
            filePrintStream.printf("%s;%s;%d;%d;validNonUniformity;%f %n", problemName,
                    mapperName.split("-")[0], genotypeSize, mapperMainParam, validNonUniformity);
            filePrintStream.printf("%s;%s;%d;%d;nonSynonymousity;%f %n", problemName, mapperName.split("-")[0],
                    genotypeSize, mapperMainParam, nonSynonymousity);
            filePrintStream.printf("%s;%s;%d;%d;validNonSynonymousity;%f %n", problemName,
                    mapperName.split("-")[0], genotypeSize, mapperMainParam, validNonSynonymousity);
        }
    }
    if (filePrintStream != null) {
        filePrintStream.close();
    }
}

From source file:eu.itesla_project.iidm.eurostag.export.BranchParallelIndexes.java

public static BranchParallelIndexes build(Network network, EurostagEchExportConfig config) {
    Multimap<String, Identifiable> map = HashMultimap.create();
    for (TwoTerminalsConnectable ttc : Iterables.concat(network.getLines(),
            network.getTwoWindingsTransformers())) {
        ConnectionBus bus1 = ConnectionBus.fromTerminal(ttc.getTerminal1(), config, EchUtil.FAKE_NODE_NAME1);
        ConnectionBus bus2 = ConnectionBus.fromTerminal(ttc.getTerminal2(), config, EchUtil.FAKE_NODE_NAME2);
        if (bus1.getId().compareTo(bus2.getId()) < 0) {
            map.put(bus1.getId() + bus2.getId(), ttc);
        } else {//ww  w  .ja va 2s .c om
            map.put(bus2.getId() + bus1.getId(), ttc);
        }
    }
    for (VoltageLevel vl : network.getVoltageLevels()) {
        for (Switch s : EchUtil.getSwitches(vl, config)) {
            Bus bus1 = EchUtil.getBus1(vl, s.getId(), config);
            Bus bus2 = EchUtil.getBus2(vl, s.getId(), config);
            if (bus1.getId().compareTo(bus2.getId()) < 0) {
                map.put(bus1.getId() + bus2.getId(), s);
            } else {
                map.put(bus2.getId() + bus1.getId(), s);
            }
        }
    }
    Map<String, Character> parallelIndexes = new HashMap<>();
    for (Map.Entry<String, Collection<Identifiable>> entry : map.asMap().entrySet()) {
        List<Identifiable> eqs = new ArrayList<>(entry.getValue());
        Collections.sort(eqs, (o1, o2) -> o1.getId().compareTo(o2.getId()));
        if (eqs.size() >= 2) {
            char index = '0';
            for (Identifiable l : eqs) {
                index = incParallelIndex(index);
                parallelIndexes.put(l.getId(), index);
            }
        }
    }
    return new BranchParallelIndexes(parallelIndexes);
}

From source file:org.polarsys.reqcycle.traceability.utils.EngineUtils.java

public static Multimap<Reachable, Link> toFollowingMap(Iterator<Pair<Link, Reachable>> resultOfEngine) {
    Multimap<Reachable, Link> result = HashMultimap.create();
    while (resultOfEngine.hasNext()) {
        Pair<Link, Reachable> next = resultOfEngine.next();
        Reachable source = next.getFirst().getSources().iterator().next();
        result.put(source, next.getFirst());
    }//from ww w  . j a v a 2 s .  c  om
    return result;
}

From source file:com.github.sprial404.ss.item.crafting.RecipesVanilla.java

private static void init() {

    vanillaRecipes = HashMultimap.create();

    for (Object recipeObject : CraftingManager.getInstance().getRecipeList()) {

        if (recipeObject instanceof IRecipe) {

            IRecipe recipe = (IRecipe) recipeObject;
            ItemStack recipeOutput = recipe.getRecipeOutput();

            if (recipeOutput != null) {
                ArrayList<CustomWrappedStack> recipeInputs = RecipeHelper.getRecipeInputs(recipe);
                vanillaRecipes.put(new CustomWrappedStack(recipeOutput), recipeInputs);
            }//w  w  w .j  a v  a 2s  .  c  o  m
        }
    }
}

From source file:io.apiman.manager.ui.client.local.util.MultimapUtil.java

/**
 * Creates a multimap from a key and value.
 * @param key/*from ww w  .  j a v  a2  s.com*/
 * @param value
 */
public static final Multimap<String, String> singleItemMap(String key, String value) {
    HashMultimap<String, String> multimap = HashMultimap.create();
    multimap.put(key, value);
    return multimap;
}

From source file:com.github.sprial404.ss.item.crafting.RecipesSmelting.java

private static void init() {
    smeltingRecipes = HashMultimap.create();

    @SuppressWarnings("unchecked")
    Map<Integer, ItemStack> smeltingList = FurnaceRecipes.smelting().getSmeltingList();
    Map<List<Integer>, ItemStack> metaSmeltingList = FurnaceRecipes.smelting().getMetaSmeltingList();

    for (Integer i : smeltingList.keySet()) {
        smeltingRecipes.put(new CustomWrappedStack(smeltingList.get(i)),
                Arrays.asList(smeltingEnergy, new CustomWrappedStack(new ItemStack(i, 1, 0))));
    }/*  w  w w  .j a v  a  2  s .c o  m*/

    for (List<Integer> idMetaPair : metaSmeltingList.keySet()) {
        if (idMetaPair.size() == 2) {
            smeltingRecipes.put(new CustomWrappedStack(metaSmeltingList.get(idMetaPair)),
                    Arrays.asList(smeltingEnergy,
                            new CustomWrappedStack(new ItemStack(idMetaPair.get(0), 1, idMetaPair.get(1)))));
        }
    }
}

From source file:eu.itesla_project.ucte.util.UcteGeographicalCode.java

public static Collection<UcteGeographicalCode> forCountry(Country country) {
    LOCK.lock();/*from w ww  .j a v a  2 s.co  m*/
    try {
        if (COUNTRY_TO_GEOGRAPHICAL_CODES == null) {
            COUNTRY_TO_GEOGRAPHICAL_CODES = HashMultimap.create();
            for (UcteGeographicalCode geographicalCode : UcteGeographicalCode.values()) {
                COUNTRY_TO_GEOGRAPHICAL_CODES.put(geographicalCode.getCountry(), geographicalCode);
            }
        }
    } finally {
        LOCK.unlock();
    }
    return COUNTRY_TO_GEOGRAPHICAL_CODES.get(country);
}

From source file:eu.itesla_project.entsoe.util.EntsoeGeographicalCode.java

public static Collection<EntsoeGeographicalCode> forCountry(Country country) {
    LOCK.lock();/*from   ww w.  j  av  a  2 s. com*/
    try {
        if (COUNTRY_TO_GEOGRAPHICAL_CODES == null) {
            COUNTRY_TO_GEOGRAPHICAL_CODES = HashMultimap.create();
            for (EntsoeGeographicalCode geographicalCode : EntsoeGeographicalCode.values()) {
                COUNTRY_TO_GEOGRAPHICAL_CODES.put(geographicalCode.getCountry(), geographicalCode);
            }
        }
    } finally {
        LOCK.unlock();
    }
    return COUNTRY_TO_GEOGRAPHICAL_CODES.get(country);
}