Example usage for com.google.common.collect Multimap size

List of usage examples for com.google.common.collect Multimap size

Introduction

In this page you can find the example usage for com.google.common.collect Multimap size.

Prototype

int size();

Source Link

Document

Returns the number of key-value pairs in this multimap.

Usage

From source file:com.android.tools.idea.gradle.editor.parser.GradleEditorModelParserFacade.java

/**
 * Processes given PSI file and fills given context
 * by {@link GradleEditorModelParseContext#getAssignments(Variable) corresponding assignments}.
 *
 * @param context  context to fill//w  ww. j a  va2  s  .c om
 * @param psiFile  psi file to parse
 */
private static void fillContext(@NotNull final GradleEditorModelParseContext context,
        @NotNull PsiFile psiFile) {
    psiFile.acceptChildren(new GroovyPsiElementVisitor(new GroovyRecursiveElementVisitor() {
        @Override
        public void visitMethodCallExpression(GrMethodCallExpression methodCallExpression) {
            Pair<String, TextRange> pair = GradleEditorValueExtractor.extractMethodName(methodCallExpression);
            GrClosableBlock[] closureArguments = methodCallExpression.getClosureArguments();
            if (pair == null || closureArguments.length > 1) {
                super.visitMethodCallExpression(methodCallExpression);
                return;
            }
            if (closureArguments.length == 0) {
                if (methodCallExpression.getArgumentList().getAllArguments().length == 0) {
                    // This is a no-args method, so, we just register it for cases like 'mavenCentral()' or 'jcenter()'.
                    context.addCachedValue(NO_ARGS_METHOD_ASSIGNMENT_VALUE, TextRange.create(
                            pair.second.getEndOffset(), methodCallExpression.getTextRange().getEndOffset()));
                    context.registerAssignmentFromCachedData(pair.first, pair.second, methodCallExpression);
                }
                return;
            }

            context.onMethodEnter(pair.getFirst());
            try {
                super.visitClosure(closureArguments[0]);
            } finally {
                context.onMethodExit();
            }
        }

        @Override
        public void visitApplicationStatement(GrApplicationStatement applicationStatement) {
            Pair<String, TextRange> methodName = GradleEditorValueExtractor
                    .extractMethodName(applicationStatement);
            if (methodName == null) {
                return;
            }
            GroovyPsiElement[] allArguments = applicationStatement.getArgumentList().getAllArguments();
            if (allArguments.length == 1) {
                context.resetCaches();
                extractValueOrVariable(allArguments[0], context);
                context.registerAssignmentFromCachedData(methodName.getFirst(), methodName.getSecond(),
                        applicationStatement.getArgumentList());
            }
        }

        @Override
        public void visitAssignmentExpression(GrAssignmentExpression expression) {
            // General idea is to try to extract variable from the given expression and, in case of success, try to extract rvalue and
            // register corresponding assignment with them.
            context.resetCaches();
            extractValueOrVariable(expression.getLValue(), context);
            Multimap<Variable, Location> vars = context.getCachedVariables();
            if (vars.size() != 1) {
                context.resetCaches();
                return;
            }
            Map.Entry<Variable, Location> entry = vars.entries().iterator().next();
            Variable lVariable = entry.getKey();
            Location lVariableLocation = entry.getValue();
            context.resetCaches();

            GrExpression rValue = expression.getRValue();
            if (rValue == null) {
                return;
            }
            extractValueOrVariable(rValue, context);
            if (context.getCachedValues().size() > 1) {
                Value value = new Value("",
                        new Location(context.getCurrentFile(), GradleEditorModelUtil.interestedRange(rValue)));
                context.setCachedValues(Collections.singletonList(value));
            }
            context.registerAssignmentFromCachedData(lVariable, lVariableLocation, rValue);
            context.resetCaches();
        }

        @Override
        public void visitVariable(GrVariable variable) {
            TextRange nameRange = null;
            boolean lookForInitializer = false;
            ParserDefinition parserDefinition = LanguageParserDefinitions.INSTANCE
                    .findSingle(GroovyLanguage.INSTANCE);
            for (PsiElement e = variable.getFirstChild(); e != null; e = e.getNextSibling()) {
                ASTNode node = e.getNode();
                if (node == null) {
                    continue;
                }
                if (!lookForInitializer) {
                    if (node.getElementType() == GroovyTokenTypes.mIDENT) {
                        nameRange = e.getTextRange();
                    } else if (node.getElementType() == GroovyTokenTypes.mASSIGN) {
                        if (nameRange == null) {
                            return;
                        }
                        lookForInitializer = true;
                    }
                    continue;
                }
                if (node.getElementType() == GroovyTokenTypes.mNLS
                        || node.getElementType() == GroovyTokenTypes.mSEMI) {
                    break;
                }
                if (parserDefinition.getWhitespaceTokens().contains(node.getElementType())) {
                    continue;
                }
                extractValueOrVariable(e, context);
                if (context.getCachedValues().size() > 1) {
                    Value value = new Value("",
                            new Location(context.getCurrentFile(), GradleEditorModelUtil.interestedRange(e)));
                    context.setCachedValues(Collections.singletonList(value));
                }
                if (context.registerAssignmentFromCachedData(variable.getName(), nameRange, e)) {
                    return;
                }
            }
        }
    }));
}

From source file:org.apache.phoenix.hbase.index.write.IndexWriter.java

/**
 * Convert the passed index updates to {@link HTableInterfaceReference}s.
 * @param indexUpdates from the index builder
 * @return pairs that can then be written by an {@link IndexWriter}.
 *//* www.j  a  v  a  2  s.  c o m*/
public static Multimap<HTableInterfaceReference, Mutation> resolveTableReferences(
        Collection<Pair<Mutation, byte[]>> indexUpdates) {
    Multimap<HTableInterfaceReference, Mutation> updates = ArrayListMultimap
            .<HTableInterfaceReference, Mutation>create();
    // simple map to make lookups easy while we build the map of tables to create
    Map<ImmutableBytesPtr, HTableInterfaceReference> tables = new HashMap<ImmutableBytesPtr, HTableInterfaceReference>(
            updates.size());
    for (Pair<Mutation, byte[]> entry : indexUpdates) {
        byte[] tableName = entry.getSecond();
        ImmutableBytesPtr ptr = new ImmutableBytesPtr(tableName);
        HTableInterfaceReference table = tables.get(ptr);
        if (table == null) {
            table = new HTableInterfaceReference(ptr);
            tables.put(ptr, table);
        }
        updates.put(table, entry.getFirst());
    }

    return updates;
}

From source file:org.sosy_lab.cpachecker.util.predicates.interpolation.CexTraceAnalysisDirection.java

private static void createLoopDrivenStateOrdering0(final List<AbstractState> pAbstractionStates,
        final Multimap<Integer, AbstractState> loopLevelsToStatesMap, Deque<CFANode> actLevelStack,
        LoopStructure loopStructure) {//  ww w.  j  av a  2  s. c om

    // we are finished with the computation
    if (loopLevelsToStatesMap.size() == pAbstractionStates.size()) {
        return;
    }

    AbstractState lastState = pAbstractionStates.get(loopLevelsToStatesMap.size() - 1);
    AbstractState actState = pAbstractionStates.get(loopLevelsToStatesMap.size());
    CFANode actCFANode = AbstractStates.EXTRACT_LOCATION.apply(actState);

    Iterator<CFANode> it = actLevelStack.descendingIterator();
    while (it.hasNext()) {
        CFANode lastLoopNode = it.next();

        // check if the functions match, if yes we can simply check if the node
        // is in the loop on this level, if not we have to check the functions entry
        // point, in order to know if the current node is in the loop on this
        // level or on a lower one
        if (actCFANode.getFunctionName().equals(lastLoopNode.getFunctionName())) {
            actCFANode = getPrevFunctionNode((ARGState) actState, (ARGState) lastState,
                    lastLoopNode.getFunctionName());
        }

        // the lastLoopNode cannot be reached from the actState
        // so decrease the actLevelStack
        if (actCFANode == null || !isNodePartOfLoop(lastLoopNode, actCFANode, loopStructure)) {
            it.remove();
            continue;

            // we have a valid path to the function of the lastLoopNode
        } else {
            loopLevelsToStatesMap.put(actLevelStack.size(), actState);

            // node itself is a loophead, too, so add it also to the levels stack
            if (loopStructure.getAllLoopHeads().contains(actCFANode)) {
                actLevelStack.push(actCFANode);
            }
            createLoopDrivenStateOrdering0(pAbstractionStates, loopLevelsToStatesMap, actLevelStack,
                    loopStructure);
            return;
        }
    }

    // coming here is possible only if the stack is empty and no matching
    // loop for the current node was found
    createLoopDrivenStateOrdering(pAbstractionStates, loopLevelsToStatesMap, actLevelStack, loopStructure);
}

From source file:grakn.core.graql.reasoner.utils.ReasonerUtils.java

/**
 * NB: assumes MATCH semantics - all types and their subs are considered
 * compute the map of compatible {@link RelationType}s for a given set of {@link Type}s
 * (intersection of allowed sets of relation types for each entry type) and compatible role types
 * @param types for which the set of compatible {@link RelationType}s is to be computed
 * @param schemaConceptConverter converter between {@link SchemaConcept} and relation type-role entries
 * @param <T> type generic//  www .j  av a2  s  .c o m
 * @return map of compatible {@link RelationType}s and their corresponding {@link Role}s
 */
public static <T extends SchemaConcept> Multimap<RelationType, Role> compatibleRelationTypesWithRoles(
        Set<T> types, SchemaConceptConverter<T> schemaConceptConverter) {
    Multimap<RelationType, Role> compatibleTypes = HashMultimap.create();
    if (types.isEmpty())
        return compatibleTypes;
    Iterator<T> typeIterator = types.iterator();
    compatibleTypes.putAll(schemaConceptConverter.toRelationMultimap(typeIterator.next()));

    while (typeIterator.hasNext() && compatibleTypes.size() > 1) {
        compatibleTypes = multimapIntersection(compatibleTypes,
                schemaConceptConverter.toRelationMultimap(typeIterator.next()));
    }
    return compatibleTypes;
}

From source file:com.palantir.atlasdb.keyvalue.cassandra.CassandraVerifier.java

static Set<String> sanityCheckDatacenters(Cassandra.Client client, int desiredRf, boolean safetyDisabled)
        throws InvalidRequestException, TException {
    ensureTestKeyspaceExists(client);/*  w w  w  . j  a va 2s  . co  m*/
    Set<String> hosts = Sets.newHashSet();

    Multimap<String, String> dataCenterToRack = HashMultimap.create();
    List<TokenRange> ring = client.describe_ring(CassandraConstants.SIMPLE_RF_TEST_KEYSPACE);
    for (TokenRange tokenRange : ring) {
        for (EndpointDetails details : tokenRange.getEndpoint_details()) {
            dataCenterToRack.put(details.datacenter, details.rack);
            hosts.add(details.host);
        }
    }

    if (dataCenterToRack.size() == 1) {
        String dc = dataCenterToRack.keySet().iterator().next();
        String rack = dataCenterToRack.values().iterator().next();
        if (dc.equals(CassandraConstants.DEFAULT_DC) && rack.equals(CassandraConstants.DEFAULT_RACK)
                && desiredRf > 1) {
            // We don't allow greater than RF=1 because they didn't set up their network.
            logErrorOrThrow(
                    "The cassandra cluster is not set up to be datacenter and rack aware.  "
                            + "Please set this up before running with a replication factor higher than 1.",
                    safetyDisabled);

        }
        if (dataCenterToRack.values().size() < desiredRf && hosts.size() > desiredRf) {
            logErrorOrThrow("The cassandra cluster only has one DC, "
                    + "and is set up with less racks than the desired number of replicas, "
                    + "and there are more hosts than the replication factor. "
                    + "It is very likely that your rack configuration is incorrect and replicas would not be placed correctly for the failure tolerance you want.",
                    safetyDisabled);
        }
    }

    return dataCenterToRack.keySet();
}

From source file:ai.grakn.graql.internal.reasoner.utils.ReasonerUtils.java

/**
 * compute the map of compatible relation types for given types (intersection of allowed sets of relation types for each entry type)
 * and compatible role types/*from   w w  w  .j  av a2 s  .c o  m*/
 * @param types for which the set of compatible relation types is to be computed
 //* @param typeMapper function mapping a type to the set of compatible relation types
 * @param <T> type generic
 * @return map of compatible relation types and their corresponding role types
 */
public static <T extends Type> Multimap<RelationType, RoleType> getCompatibleRelationTypesWithRoles(
        Set<T> types, TypeConverter<T> typeConverter) {
    Multimap<RelationType, RoleType> compatibleTypes = HashMultimap.create();
    if (types.isEmpty())
        return compatibleTypes;
    Iterator<T> it = types.iterator();
    compatibleTypes.putAll(typeConverter.toRelationMultimap(it.next()));
    while (it.hasNext() && compatibleTypes.size() > 1) {
        compatibleTypes = multimapIntersection(compatibleTypes, typeConverter.toRelationMultimap(it.next()));
    }
    return compatibleTypes;
}

From source file:org.crypto.sse.TSet.java

public static void constructEMMPar(final byte[] key1, final byte[] key2, final byte[] keyENC,
        final Multimap<String, String> lookup, final Multimap<String, String> encryptedIdToRealId)
        throws InterruptedException, ExecutionException, IOException {

    // Instantiation of B buckets in the secure inverted index
    // Initialize of the free set

    // Determination of the bucketSize B
    bucketSize = lookup.size() * spaceOverhead;
    int count = 2;
    for (int j = 1; j < 1000; j++) {
        if (bucketSize > Math.pow(2, count)) {
            count = 2 * j;//from   ww w.  ja  va 2 s .c o  m
        } else {
            break;
        }
    }

    bucketSize = (int) Math.pow(2, count);

    for (int i = 0; i < bucketSize; i++) {
        secureIndex.add(new ArrayList<Record>());
        free.add(new ArrayList<Integer>());
        // For each bucket initialize to S sub-buckets
        for (int j = 0; j < subBucketSize; j++) {
            // initialize all buckets with random values
            secureIndex.get(i).add(new Record(new byte[16], new byte[16]));
            free.get(i).add(j);
        }
    }

    List<String> listOfKeyword = new ArrayList<String>(lookup.keySet());
    int threads = 0;
    if (Runtime.getRuntime().availableProcessors() > listOfKeyword.size()) {
        threads = listOfKeyword.size();
    } else {
        threads = Runtime.getRuntime().availableProcessors();
    }

    ExecutorService service = Executors.newFixedThreadPool(threads);
    ArrayList<String[]> inputs = new ArrayList<String[]>(threads);

    for (int i = 0; i < threads; i++) {
        String[] tmp;
        if (i == threads - 1) {
            tmp = new String[listOfKeyword.size() / threads + listOfKeyword.size() % threads];
            for (int j = 0; j < listOfKeyword.size() / threads + listOfKeyword.size() % threads; j++) {
                tmp[j] = listOfKeyword.get((listOfKeyword.size() / threads) * i + j);
            }
        } else {
            tmp = new String[listOfKeyword.size() / threads];
            for (int j = 0; j < listOfKeyword.size() / threads; j++) {

                tmp[j] = listOfKeyword.get((listOfKeyword.size() / threads) * i + j);
            }
        }
        inputs.add(i, tmp);
    }

    List<Future<Integer>> futures = new ArrayList<Future<Integer>>();
    for (final String[] input : inputs) {
        Callable<Integer> callable = new Callable<Integer>() {
            public Integer call() throws Exception {

                int output = setup(key1, key2, keyENC, input, lookup, encryptedIdToRealId);
                return 1;
            }
        };
        futures.add(service.submit(callable));
    }

    service.shutdown();

}

From source file:org.sosy_lab.cpachecker.util.predicates.interpolation.CexTraceAnalysisDirection.java

private static void createLoopDrivenStateOrdering(final List<AbstractState> pAbstractionStates,
        final Multimap<Integer, AbstractState> loopLevelsToStatesMap, Deque<CFANode> actLevelStack,
        LoopStructure loopStructure) {/*from  w  ww  .j a  v a  2 s .  c o m*/
    ImmutableSet<CFANode> loopHeads = loopStructure.getAllLoopHeads();

    // in the nodeLoopLevel map there has to be for every seen ARGState one
    // key-value pair therefore we can use this as our index
    int actARGState = loopLevelsToStatesMap.size();

    AbstractState actState = null;
    CFANode actCFANode = null;

    boolean isCFANodeALoopHead = false;

    // move on as long as there occurs no loop-head in the ARG path
    while (!isCFANodeALoopHead && actLevelStack.isEmpty() && actARGState < pAbstractionStates.size()) {

        actState = pAbstractionStates.get(actARGState);
        actCFANode = AbstractStates.EXTRACT_LOCATION.apply(actState);

        loopLevelsToStatesMap.put(0, actState);

        isCFANodeALoopHead = loopHeads.contains(actCFANode);

        actARGState++;
    }

    // when not finished with computing the node levels
    if (actARGState != pAbstractionStates.size()) {
        actLevelStack.push(actCFANode);
        createLoopDrivenStateOrdering0(pAbstractionStates, loopLevelsToStatesMap, actLevelStack, loopStructure);
    }
}

From source file:org.crypto.sse.IEX2Lev.java

public static IEX2Lev setup(List<byte[]> keys, Multimap<String, String> lookup,
        Multimap<String, String> lookup2, int bigBlock, int smallBlock, int dataSize)
        throws InterruptedException, ExecutionException, IOException {

    // Instantiation of the object that contains Global MM, Local MMs and
    // the dictionary
    RR2Lev[] localMultiMap = new RR2Lev[lookup.keySet().size()];
    Multimap<String, Integer> dictionaryForMM = ArrayListMultimap.create();

    Printer.debugln("Number of (w, id) pairs " + lookup.size());

    Printer.debugln("Number of keywords " + lookup.keySet().size());

    BufferedWriter writer = new BufferedWriter(new FileWriter("logs.txt", true));

    writer.write("\n *********************Stats******* \n");
    writer.write("\n Number of (w, id) pairs " + lookup2.size());
    writer.write("\n Number of keywords " + lookup.keySet().size());

    int counter = 0;

    ///////////////////// Computing Filtering Factor and exact needed data
    ///////////////////// size/////////////////////////////

    HashMap<Integer, Integer> histogram = new HashMap<Integer, Integer>();
    Printer.debugln("Number of documents " + lookup2.keySet().size());
    for (String keyword : lookup.keySet()) {
        if (histogram.get(lookup.get(keyword).size()) != null) {
            int tmp = histogram.get(lookup.get(keyword).size());
            histogram.put(lookup.get(keyword).size(), tmp + 1);
        } else {/*  w w w .jav  a 2s.c om*/
            histogram.put(lookup.get(keyword).size(), 1);
        }

        if (dataSize < lookup.get(keyword).size()) {
            dataSize = lookup.get(keyword).size();
        }

    }

    // Construction of the global multi-map
    Printer.debugln("\nBeginning of Global MM creation \n");

    long startTime1 = System.nanoTime();

    IEX2Lev disj2 = new IEX2Lev(RR2Lev.constructEMMParGMM(keys.get(0), lookup, bigBlock, smallBlock, dataSize),
            localMultiMap, dictionaryForMM);

    long endTime1 = System.nanoTime();

    writer.write("\n Time of MM global setup time #(w, id)/#DB " + (endTime1 - startTime1) / lookup2.size());
    writer.close();

    numberPairs = numberPairs + lookup.size();

    // Construction of the local multi-map

    Printer.debugln("Start of Local Multi-Map construction");

    long startTime = System.nanoTime();

    for (String keyword : lookup.keySet()) {

        // Stats for keeping track with the evaluation

        for (int j = 0; j < 100; j++) {

            if (counter == (int) ((j + 1) * lookup.keySet().size() / 100)) {
                BufferedWriter writer2 = new BufferedWriter(new FileWriter("temp-logs.txt", true));
                writer2.write("\n Number of local multi-maps created" + j + " %");
                writer2.close();

                break;
            }
        }

        // Filter setting optional. For a setup without any filtering set
        // filterParameter to 0
        if (((double) lookup.get(keyword).size() / TextExtractPar.maxTupleSize > filterParameter)) {

            // Stats
            Printer.debugln("Keyword in LMM " + keyword);
            BufferedWriter writer3 = new BufferedWriter(new FileWriter("words-logs.txt", true));
            writer3.write("\n Keyword in LMM " + keyword);
            writer3.close();

            for (int j = 0; j < 10; j++) {

                if (counter == (int) ((j + 1) * lookup.keySet().size() / 10)) {
                    Printer.statsln("Number of total keywords processed equals " + j + "0 % \n");
                    break;
                }
            }

            // First computing V_w. Determine Doc identifiers

            Set<String> VW = new TreeSet<String>();
            for (String idDoc : lookup.get(keyword)) {
                VW.addAll(lookup2.get(idDoc));
            }

            Multimap<String, String> secondaryLookup = ArrayListMultimap.create();

            // here we are only interested in documents in the intersection
            // between "keyword" and "word"
            for (String word : VW) {
                // Filter setting optional. For a setup without any
                // filtering set filterParameter to 0
                if (((double) lookup.get(word).size() / TextExtractPar.maxTupleSize > filterParameter)) {
                    Collection<String> l1 = new ArrayList<String>(lookup.get(word));
                    Collection<String> l2 = new ArrayList<String>(lookup.get(keyword));
                    l1.retainAll(l2);
                    secondaryLookup.putAll(word, l1);
                }
            }

            // End of VW construction
            RR2Lev.counter = 0;
            // dataSize = (int) filterParameter;
            disj2.getLocalMultiMap()[counter] = RR2Lev.constructEMMParGMM(
                    CryptoPrimitives.generateCmac(keys.get(0), keyword), secondaryLookup, bigBlock, smallBlock,
                    dataSize);
            byte[] key3 = CryptoPrimitives.generateCmac(keys.get(1), 3 + keyword);
            numberPairs = numberPairs + secondaryLookup.size();
            dictionaryForMM.put(new String(key3), counter);

        }
        counter++;

    }

    long endTime = System.nanoTime();

    Printer.statsln("Time to construct LMM " + (endTime - startTime) / 1000000000);

    disj2.setDictionaryForMM(dictionaryForMM);
    return disj2;

}

From source file:org.crypto.sse.IEXRH2Lev.java

public static IEXRH2Lev setup(List<byte[]> keys, Multimap<String, String> lookup,
        Multimap<String, String> lookup2, int bigBlock, int smallBlock, int dataSize)
        throws InterruptedException, ExecutionException, IOException {

    // Instantiation of the object that contains Global MM, Local MMs and
    // the dictionary
    RH2Lev[] localMultiMap = new RH2Lev[lookup.keySet().size()];
    Multimap<String, Integer> dictionaryForMM = ArrayListMultimap.create();

    Printer.debugln("Number of (w, id) pairs " + lookup.size());

    Printer.debugln("Number of keywords " + lookup.keySet().size());

    Printer.debugln("Maximum size of |DB(w)| " + TextExtractPar.maxTupleSize);

    BufferedWriter writer = new BufferedWriter(new FileWriter("logs.txt", true));

    writer.write("\n *********************Stats******* \n");

    writer.write("\n Number of (w, id) pairs " + lookup2.size());
    writer.write("\n Number of keywords " + lookup.keySet().size());

    int counter = 0;

    ///////////////////// Computing Filtering Factor and exact needed data
    ///////////////////// size/////////////////////////////

    HashMap<Integer, Integer> histogram = new HashMap<Integer, Integer>();
    Printer.debugln("Number of documents " + lookup2.keySet().size());
    for (String keyword : lookup.keySet()) {
        if (histogram.get(lookup.get(keyword).size()) != null) {
            int tmp = histogram.get(lookup.get(keyword).size());
            histogram.put(lookup.get(keyword).size(), tmp + 1);
        } else {//from   w  ww.  ja  v  a  2  s . co m
            histogram.put(lookup.get(keyword).size(), 1);
        }

        if (dataSize < lookup.get(keyword).size()) {
            dataSize = lookup.get(keyword).size();
        }

    }

    // Construction of the global multi-map
    Printer.debugln("\nBeginning of Global MM creation \n");

    long startTime1 = System.nanoTime();

    IEXRH2Lev disj2 = new IEXRH2Lev(
            RH2Lev.constructEMMParGMM(keys.get(0), lookup, bigBlock, smallBlock, dataSize), localMultiMap,
            dictionaryForMM);

    long endTime1 = System.nanoTime();

    writer.write("\n Time of MM global setup time #(w, id)/#DB " + (endTime1 - startTime1) / lookup2.size());
    writer.close();

    numberPairs = numberPairs + lookup.size();

    // Construction of the local multi-map

    Printer.debugln("Start of Local Multi-Map construction");

    long startTime = System.nanoTime();

    for (String keyword : lookup.keySet()) {

        // Stats for keeping track with the evaluation

        for (int j = 0; j < 100; j++) {

            if (counter == (int) ((j + 1) * lookup.keySet().size() / 100)) {
                BufferedWriter writer2 = new BufferedWriter(new FileWriter("temp-logs.txt", true));
                writer2.write("\n Number of local multi-maps created" + j + " %");
                writer2.close();

                break;
            }
        }

        // Filter setting optional. For a setup without any filtering set
        // filterParameter to 0
        if (((double) lookup.get(keyword).size() / TextExtractPar.maxTupleSize > filterParameter)) {

            // Stats
            Printer.debugln("Keyword in LMM " + keyword);
            BufferedWriter writer3 = new BufferedWriter(new FileWriter("words-logs.txt", true));
            writer3.write("\n Keyword in LMM " + keyword);
            writer3.close();

            for (int j = 0; j < 10; j++) {

                if (counter == (int) ((j + 1) * lookup.keySet().size() / 10)) {
                    Printer.statsln("Number of total keywords processed equals " + j + "0 % \n");
                    break;
                }
            }

            // First computing V_w. Determine Doc identifiers

            Set<String> VW = new TreeSet<String>();
            for (String idDoc : lookup.get(keyword)) {
                VW.addAll(lookup2.get(idDoc));
            }

            Multimap<String, String> secondaryLookup = ArrayListMultimap.create();

            // here we are only interested in documents in the intersection
            // between "keyword" and "word"
            for (String word : VW) {
                // Filter setting optional. For a setup without any
                // filtering set filterParameter to 0
                if (((double) lookup.get(word).size() / TextExtractPar.maxTupleSize > filterParameter)) {
                    Collection<String> l1 = new ArrayList<String>(lookup.get(word));
                    Collection<String> l2 = new ArrayList<String>(lookup.get(keyword));
                    l1.retainAll(l2);
                    secondaryLookup.putAll(word, l1);
                }
            }

            // End of VW construction
            RH2Lev.counter = 0;

            // dataSize = (int) filterParameter;
            EMM2Lev.eval = 4 + keyword;
            EMM2Lev.lmm = true;

            disj2.getLocalMultiMap()[counter] = RH2Lev.constructEMMParGMM(
                    CryptoPrimitives.generateCmac(keys.get(0), keyword), secondaryLookup, bigBlock, smallBlock,
                    dataSize);
            byte[] key3 = CryptoPrimitives.generateCmac(keys.get(1), 3 + keyword);
            numberPairs = numberPairs + secondaryLookup.size();
            dictionaryForMM.put(new String(key3), counter);

        }
        counter++;

    }

    long endTime = System.nanoTime();

    Printer.statsln("Time to construct LMM " + (endTime - startTime) / 1000000000);

    disj2.setDictionaryForMM(dictionaryForMM);
    return disj2;

}