Example usage for com.google.common.collect HashBiMap create

List of usage examples for com.google.common.collect HashBiMap create

Introduction

In this page you can find the example usage for com.google.common.collect HashBiMap create.

Prototype

public static <K, V> HashBiMap<K, V> create(Map<? extends K, ? extends V> map) 

Source Link

Document

Constructs a new bimap containing initial values from map .

Usage

From source file:uk.ac.susx.tag.method51.twitter.NBClassifierIGImpl.java

public IGPackage calculateIG(List<Tweet> tweets) {

    Date latest = new Date(0);
    Date earliest = new Date();

    for (Tweet tweet : tweets) {

        if (tweet.getCreated().after(latest)) {
            latest = tweet.getCreated();
        }//  w  ww  . jav a  2 s  . co  m
        if (tweet.getCreated().before(earliest)) {
            earliest = tweet.getCreated();
        }
    }

    List<ProcessedInstance> processedInstances = getProcessedInstances(tweets);
    classify(processedInstances);

    Map<String, List<String>> igFeatures = Querying
            .labelledFeatures2Strings(Querying.queryFeatures(processedInstances, null, 50), pipeline);

    String s = new Gson().toJson(igFeatures);
    System.out.println(s);

    IntSet vocab = uk.ac.susx.tag.classificationframework.Util.inferVocabulary(processedInstances);
    BiMap<Integer, Integer> features = HashBiMap.create(vocab.size());

    {
        int i = 0;
        for (int j : vocab) {
            features.put(i, j);
            ++i;
        }
    }

    int numClasses = classifier.getLabels().size();
    int numFeatures = features.size();
    int numInstances = processedInstances.size();
    int[][] classFeatureCounts = new int[numClasses][numFeatures];
    int[] featureCounts = new int[numFeatures];
    int[] classCounts = new int[numClasses];
    int[] featureClasses = new int[numFeatures];
    Arrays.fill(featureClasses, -1);

    countClassFeatures(classCounts, featureCounts, classFeatureCounts, processedInstances, features);

    final double[] IG = calcInfoGain(classCounts, featureCounts, classFeatureCounts, featureClasses, 0.1);

    final Integer[] sortedIdx = new Integer[IG.length];
    for (int i = 0; i < sortedIdx.length; ++i) {
        sortedIdx[i] = i;
    }

    Arrays.sort(sortedIdx, new Comparator<Integer>() {
        @Override
        public int compare(final Integer o1, final Integer o2) {
            return Double.compare(IG[o2], IG[o1]);
        }
    });

    int featureMass = 0;
    for (int f : featureCounts) {
        featureMass += f;
    }

    Map<String, List<Term>> out = new HashMap<>();

    int[] numIGFeaturesPerClass = new int[numClasses];

    for (int i = 0; i < classifier.getLabels().size(); i++) {

        List<Term> terms = new ArrayList<>();

        String label = pipeline.labelString(i);

        //System.out.println("---------------");
        //System.out.println(label);

        //System.out.println();

        int n = 0;
        int j = 0;
        while (n < numIGFeatures && j < IG.length) {

            int idx = sortedIdx[j];

            if (featureClasses[idx] == i) {
                String feature = pipeline.featureString(features.get(idx));

                double score = IG[idx];
                int count = featureCounts[idx];
                //int count = classFeatureCounts[i][idx];
                double PCint = (classCounts[i] - classFeatureCounts[i][idx])
                        / (double) (numInstances - featureCounts[idx]);
                double PCif = classFeatureCounts[i][idx] / (double) featureCounts[idx];
                double PfCi = classFeatureCounts[i][idx] / (double) classCounts[i];
                double Pf = featureCounts[idx] / (double) numInstances;

                //System.out.print(feature + " " + score + " " + featureClasses[idx] + " " + featureCounts[idx] + " ");
                ++n;

                Term term = new Term();
                term.setText(feature.replace("_", " "));
                term.setClassIndex(i);
                term.setInfo("score", score);
                term.setInfo("count", count);
                term.setInfo("p(c|nf)", PCint);
                term.setInfo("p(c|f)", PCif);
                term.setInfo("p(f|c)", PfCi);
                term.setInfo("p(f)", Pf);
                term.setInfo("idx", idx);

                terms.add(term);
            }
            ++j;
        }

        numIGFeaturesPerClass[i] = n;

        out.put(label, terms);

        //System.out.println();
    }

    //System.out.println();
    //System.out.println();

    if (print) {
        System.out.println(latest.toString());

        System.out.println("Num Docs " + tweets.size());
        for (int j = 0; j < numClasses; ++j) {
            System.out.println("P(C" + j + ") " + classCounts[j] / (double) numInstances);
        }

        System.out.println(String.format("%-6s %-30s %-10s %-10s %-10s %-10s %-10s %-10s %-10s", "Class",
                "Term", "IG", "Count", "p(f|c)", "p(nf|c)", "p(c|f)", "p(c|nf)", "p(f)"));

        for (int f = 0; f < numIGFeatures; ++f) {

            for (int i = 0; i < numClasses; ++i) {
                String label = pipeline.labelString(i);

                if (numIGFeaturesPerClass[i] <= f) {
                    continue;
                }
                Term term = out.get(label).get(f);

                int idx = (Integer) term.getInfo("idx");

                for (int j = 0; j < numClasses; ++j) {

                    double PCint = (classCounts[j] - classFeatureCounts[j][idx])
                            / (double) (numInstances - featureCounts[idx]);
                    double PCif = classFeatureCounts[j][idx] / (double) featureCounts[idx];

                    double PnfCi = (featureCounts[j] - classFeatureCounts[j][idx])
                            / (double) (numInstances - classCounts[j]);
                    double PfCi = classFeatureCounts[j][idx] / (double) classCounts[j];

                    double Pf = featureCounts[idx] / (double) numInstances;
                    //int count = classFeatureCounts[i][idx];
                    int count = featureCounts[idx];

                    double score = (Double) term.getInfo("score");

                    System.out.println(
                            String.format("%-6d %-30s %-10.5f %-10d %-10.5f %-10.5f %-10.5f %-10.5f %-10.5f", j,
                                    term.getText(), score, count, PfCi, PnfCi, PCif, PCint, Pf));
                }
            }
        }

        System.out.println();
        System.out.println();
    }

    IGPackage igPackage = new IGPackage();
    igPackage.classTerms = out;
    igPackage.earliest = earliest;
    igPackage.latest = latest;

    igPackage.vol = numInstances;

    return igPackage;
}

From source file:org.mycore.tools.MCRTopologicalSort.java

/**
 * parses MCRObject xml files for parent links
 * and creates the graph/*from   w w w  . j  a  v a  2s .c o m*/
 * 
 * uses StAX cursor API (higher performance)
 */
public void prepareData(String[] files, File dir) {
    nodes = HashBiMap.create(files.length);
    edgeSources.clear();

    String file = null;
    Map<Integer, String> parentNames = new HashMap<Integer, String>();
    XMLInputFactory xmlInputFactory = XMLInputFactory.newInstance();
    for (int i = 0; i < files.length; i++) {
        file = files[i];

        try (FileInputStream fis = new FileInputStream(new File(dir, file))) {
            XMLStreamReader xmlStreamReader = xmlInputFactory.createXMLStreamReader(fis);
            while (xmlStreamReader.hasNext()) {
                switch (xmlStreamReader.getEventType()) {
                case XMLStreamConstants.START_ELEMENT:
                    if (xmlStreamReader.getLocalName().equals("mycoreobject")) {
                        nodes.forcePut(i, xmlStreamReader.getAttributeValue(null, "ID"));
                    } else if (xmlStreamReader.getLocalName().equals("parent")) {
                        parentNames.put(i,
                                xmlStreamReader.getAttributeValue("http://www.w3.org/1999/xlink", "href"));
                    } else if (xmlStreamReader.getLocalName().equals("metadata")) {
                        break;
                    }
                    break;

                case XMLStreamConstants.END_ELEMENT:
                    if (xmlStreamReader.getLocalName().equals("parents")) {
                        break;
                    }
                    break;
                }
                xmlStreamReader.next();
            }

        } catch (XMLStreamException | IOException e) {
            e.printStackTrace();
        }
    }

    //build edges
    for (int source : parentNames.keySet()) {
        Integer target = nodes.inverse().get(parentNames.get(source));
        if (target != null) {
            addEdge(source, target);
        }
    }

    dirty = false;
}

From source file:edu.cmu.lti.oaqa.baseqa.providers.ml.classifiers.LibLinearProvider.java

@Override
public boolean initialize(ResourceSpecifier aSpecifier, Map<String, Object> aAdditionalParams)
        throws ResourceInitializationException {
    boolean ret = super.initialize(aSpecifier, aAdditionalParams);
    // feature id map
    if ((featIndexFile = new File((String) getParameterValue("feat-index-file"))).exists()) {
        try {/*from ww  w.  ja v a  2 s. co m*/
            fid2feat = ClassifierProvider.loadIdKeyMap(featIndexFile);
        } catch (IOException e) {
            throw new ResourceInitializationException(e);
        }
    }
    // label id map
    if ((labelIndexFile = new File((String) getParameterValue("label-index-file"))).exists()) {
        try {
            lid2label = HashBiMap.create(ClassifierProvider.loadIdKeyMap(labelIndexFile));
            label2lid = lid2label.inverse();
        } catch (IOException e) {
            throw new ResourceInitializationException(e);
        }
    }
    // model
    if ((modelFile = new File((String) getParameterValue("model-file"))).exists()) {
        try {
            model = Model.load(modelFile);
        } catch (IOException e) {
            throw new ResourceInitializationException(e);
        }
    }
    balanceWeight = (boolean) getParameterValue("balance-weight");
    // parameter
    SolverType solver = SolverType.valueOf((String) getParameterValue("solver-type")); // -s 0
    double C = 1.0; // cost of constraints violation
    double eps = 0.01; // stopping criteria
    parameter = new Parameter(solver, C, eps);
    Linear.disableDebugOutput();
    return ret;
}

From source file:de.unijena.bioinf.ChemistryBase.ms.ft.AbstractFragmentationGraph.java

/**
 * maps all vertices from graph1 to graph2. Returns a map (fragment a -> fragment b) where a is a fragment of
 * graph1 and b is a corresponding fragment from graph 2. Two fragments belong to each other if they have the same
 * molecular formula.//from www . ja  v  a 2 s  .c  o m
 */
public static BiMap<Fragment, Fragment> createFragmentMapping(AbstractFragmentationGraph graph1,
        AbstractFragmentationGraph graph2) {

    if (graph1.numberOfVertices() > graph2.numberOfVertices())
        return createFragmentMapping(graph2, graph1).inverse();
    final HashMap<MolecularFormula, Fragment> formulas = new HashMap<MolecularFormula, Fragment>(
            graph1.numberOfVertices());
    final BiMap<Fragment, Fragment> bimap = HashBiMap
            .create(Math.min(graph1.numberOfVertices(), graph2.numberOfVertices()));

    for (Fragment f : graph1.getFragments()) {
        formulas.put(f.getFormula(), f);
    }

    for (Fragment f : graph2.getFragmentsWithoutRoot()) {
        if (formulas.containsKey(f.getFormula())) {
            bimap.put(formulas.get(f.getFormula()), f);
        }
    }
    return bimap;
}

From source file:org.apache.mahout.classifier.sequencelearning.hmm.HmmModel.java

/**
 * Get a copy of this model/*from   ww w .  ja  v  a 2  s .co  m*/
 */
@Override
public HmmModel clone() {
    HmmModel model = new HmmModel(transitionMatrix.clone(), emissionMatrix.clone(),
            initialProbabilities.clone());
    if (hiddenStateNames != null) {
        model.hiddenStateNames = HashBiMap.create(hiddenStateNames);
    }
    if (outputStateNames != null) {
        model.outputStateNames = HashBiMap.create(outputStateNames);
    }
    return model;
}

From source file:org.janusgraph.graphdb.database.serialize.StandardSerializer.java

public StandardSerializer() {
    handlers = new HashMap<>(60);
    registrations = HashBiMap.create(60);

    //Setup//w  w  w  . j ava2  s  . com
    registerClassInternal(1, Object.class, new ObjectSerializer());

    //Primitive data types
    registerClassInternal(10, Byte.class, new ByteSerializer());
    registerClassInternal(11, Short.class, new ShortSerializer());
    registerClassInternal(12, Integer.class, new IntegerSerializer());
    registerClassInternal(13, Long.class, new LongSerializer());

    registerClassInternal(14, Character.class, new CharacterSerializer());
    registerClassInternal(15, Boolean.class, new BooleanSerializer());
    registerClassInternal(16, Date.class, new DateSerializer());

    registerClassInternal(17, Geoshape.class, new Geoshape.GeoshapeSerializer());
    registerClassInternal(18, String.class, new StringSerializer()); //supports null serialization
    registerClassInternal(19, Float.class, new FloatSerializer());
    registerClassInternal(20, Double.class, new DoubleSerializer());
    registerClassInternal(21, UUID.class, new UUIDSerializer());

    //Arrays (support null serialization)
    registerClassInternal(22, byte[].class, new ByteArraySerializer());
    registerClassInternal(23, short[].class, new ShortArraySerializer());
    registerClassInternal(24, int[].class, new IntArraySerializer());
    registerClassInternal(25, long[].class, new LongArraySerializer());
    registerClassInternal(26, float[].class, new FloatArraySerializer());
    registerClassInternal(27, double[].class, new DoubleArraySerializer());
    registerClassInternal(28, char[].class, new CharArraySerializer());
    registerClassInternal(29, boolean[].class, new BooleanArraySerializer());
    registerClassInternal(30, String[].class, new StringArraySerializer());

    //Needed by JanusGraph
    registerClassInternal(41, TypeDefinitionCategory.class, new EnumSerializer<>(TypeDefinitionCategory.class));
    registerClassInternal(42, JanusGraphSchemaCategory.class,
            new EnumSerializer<>(JanusGraphSchemaCategory.class));
    registerClassInternal(43, ParameterType.class, new EnumSerializer<>(ParameterType.class));
    registerClassInternal(44, RelationCategory.class, new EnumSerializer<>(RelationCategory.class));
    registerClassInternal(45, Order.class, new EnumSerializer<>(Order.class));
    registerClassInternal(46, Multiplicity.class, new EnumSerializer<>(Multiplicity.class));
    registerClassInternal(47, Cardinality.class, new EnumSerializer<>(Cardinality.class));
    registerClassInternal(48, Direction.class, new EnumSerializer<>(Direction.class));
    registerClassInternal(49, ElementCategory.class, new EnumSerializer<>(ElementCategory.class));
    registerClassInternal(50, ConsistencyModifier.class, new EnumSerializer<>(ConsistencyModifier.class));
    registerClassInternal(51, SchemaStatus.class, new EnumSerializer<>(SchemaStatus.class));
    registerClassInternal(52, LogTxStatus.class, new EnumSerializer<>(LogTxStatus.class));
    registerClassInternal(53, MgmtLogType.class, new EnumSerializer<>(MgmtLogType.class));
    registerClassInternal(54, TimestampProviders.class, new EnumSerializer<>(TimestampProviders.class));
    registerClassInternal(55, TimeUnit.class, new EnumSerializer<>(TimeUnit.class));
    registerClassInternal(56, Mapping.class, new EnumSerializer<>(Mapping.class));
    registerClassInternal(57, ConflictAvoidanceMode.class, new EnumSerializer<>(ConflictAvoidanceMode.class));

    registerClassInternal(60, Class.class, new ClassSerializer());
    registerClassInternal(61, Parameter.class, new ParameterSerializer());
    registerClassInternal(62, Parameter[].class, new ParameterArraySerializer());
    registerClassInternal(63, TypeDefinitionDescription.class, new TypeDefinitionDescriptionSerializer());
    //Needed for configuration and transaction logging
    registerClassInternal(64, Duration.class, new DurationSerializer());
    registerClassInternal(65, Instant.class, new InstantSerializer());
    registerClassInternal(66, StandardTransactionId.class, new StandardTransactionIdSerializer());
    registerClassInternal(67, TraverserSet.class, new SerializableSerializer());
    registerClassInternal(68, HashMap.class, new SerializableSerializer());

}

From source file:mtsar.processors.answer.KOSAggregator.java

@Override
@Nonnull//w ww.  jav  a  2  s .com
public Map<Integer, AnswerAggregation> aggregate(@Nonnull Collection<Task> tasks) {
    requireNonNull(stage, "the stage provider should not provide null");
    checkArgument(tasks.stream().allMatch(SINGLE_BINARY_TYPE),
            "tasks should be of the type single and have only two possible answers");
    if (tasks.isEmpty())
        return Collections.emptyMap();

    final List<Answer> answers = answerDAO.listForStage(stage.getId());
    if (answers.isEmpty())
        return Collections.emptyMap();

    final Map<Integer, Task> taskMap = taskDAO.listForStage(stage.getId()).stream().filter(SINGLE_BINARY_TYPE)
            .collect(Collectors.toMap(Task::getId, Function.identity()));

    final Map<Integer, BiMap<String, Short>> answerIndex = taskMap.values().stream()
            .collect(Collectors.toMap(Task::getId, task -> {
                final BiMap<String, Short> map = HashBiMap.create(2);
                map.put(task.getAnswers().get(0), (short) -1);
                map.put(task.getAnswers().get(1), (short) +1);
                return map;
            }));

    /* rows are tasks IDs, columns are worker IDs, values are answers */
    final Table<Integer, Integer, Short> graph = HashBasedTable.create();

    for (final Answer answer : answers) {
        if (!answer.getType().equalsIgnoreCase(AnswerDAO.ANSWER_TYPE_ANSWER))
            continue;
        graph.put(answer.getTaskId(), answer.getWorkerId(),
                answerIndex.get(answer.getTaskId()).get(answer.getAnswers().get(0)));
    }

    final Map<Integer, Double> estimations = converge(graph, getKMax());
    return estimations.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, estimation -> {
        final String answer = answerIndex.get(estimation.getKey()).inverse()
                .get(estimation.getValue() < 0 ? (short) -1 : (short) +1);
        return new AnswerAggregation.Builder().setTask(taskMap.get(estimation.getKey())).addAnswers(answer)
                .build();
    }));
}

From source file:cc.kave.commons.pointsto.analysis.inclusion.graph.ConstraintGraph.java

ConstraintGraph(Map<DistinctReference, SetVariable> referenceVariables, DeclarationLambdaStore declLambdaStore,
        Map<SetExpression, ConstraintNode> constraintNodes, Set<SetVariable> volatileEntities,
        ContextFactory contextFactory) {
    this.referenceVariables = HashBiMap.create(referenceVariables);
    this.declLambdaStore = new DeclarationLambdaStore(declLambdaStore, this::getVariable,
            new Allocator(constraintResolver, declLambdaStore.getVariableFactory()));
    this.constraintNodes = constraintNodes;
    this.volatileEntities = volatileEntities;
    this.contextFactory = contextFactory;
}

From source file:com.yahoo.elide.core.EntityDictionary.java

/**
 * Instantiate a new EntityDictionary with the provided set of checks. In addition all of the checks
 * in {@link com.yahoo.elide.security.checks.prefab} are mapped to {@code Prefab.CONTAINER.CHECK}
 * (e.g. {@code @ReadPermission(expression="Prefab.Role.All")}
 * or {@code @ReadPermission(expression="Prefab.Common.UpdateOnCreate")})
 * @param checks a map that links the identifiers used in the permission expression strings
 *               to their implementing classes
 *//*from   ww  w  .  ja  v a 2 s.c  o  m*/
public EntityDictionary(Map<String, Class<? extends Check>> checks) {
    checkNames = Maps.synchronizedBiMap(HashBiMap.create(checks));

    addPrefabCheck("Prefab.Role.All", Role.ALL.class);
    addPrefabCheck("Prefab.Role.None", Role.NONE.class);
    addPrefabCheck("Prefab.Collections.AppendOnly", AppendOnly.class);
    addPrefabCheck("Prefab.Collections.RemoveOnly", RemoveOnly.class);
    addPrefabCheck("Prefab.Common.UpdateOnCreate", Common.UpdateOnCreate.class);
}

From source file:org.apache.rya.accumulo.pcj.iterators.PCJKeyToCrossProductBindingSetIterator.java

public PCJKeyToCrossProductBindingSetIterator(Scanner scanner, List<BindingSet> crossProductBs,
        Map<String, Value> constantConstraints, Set<String> unAssuredVariables, Map<String, String> pcjVarMap) {
    this.crossProductBs = crossProductBs;
    this.scanner = scanner;
    this.iterator = scanner.iterator();
    this.pcjVarMap = HashBiMap.create(pcjVarMap).inverse();
    this.constantConstraints = constantConstraints;
    this.crossProductBsExist = crossProductBs.size() > 0;
    this.constantConstraintsExist = constantConstraints.size() > 0;
    this.unAssuredVariables = unAssuredVariables;
}