Example usage for org.apache.commons.lang3.tuple MutablePair of

List of usage examples for org.apache.commons.lang3.tuple MutablePair of

Introduction

In this page you can find the example usage for org.apache.commons.lang3.tuple MutablePair of.

Prototype

public static <L, R> MutablePair<L, R> of(final L left, final R right) 

Source Link

Document

Obtains an immutable pair of from two objects inferring the generic types.

This factory allows the pair to be created using inference to obtain the generic types.

Usage

From source file:forge.limited.BoosterDraftAI.java

/**
 * Sort cards by rank. Note that if pack has cards from different editions,
 * they could have the same rank. Basic lands and unrecognised cards are
 * rated worse than all other possible picks.
 *
 * @param chooseFrom//from   w  w  w.  j  av a 2  s  . com
 *            List of cards
 * @return map of rankings
 */
private static List<Pair<PaperCard, Double>> rankCards(final Iterable<PaperCard> chooseFrom) {
    final List<Pair<PaperCard, Double>> rankedCards = new ArrayList<Pair<PaperCard, Double>>();
    for (final PaperCard card : chooseFrom) {
        Double rank;
        if (MagicColor.Constant.BASIC_LANDS.contains(card.getName())) {
            rank = RANK_UNPICKABLE;
        } else {
            rank = DraftRankCache.getRanking(card.getName(), card.getEdition());
            if (rank == null) {
                System.out.println("Draft Rankings - Card Not Found: " + card.getName());
                rank = RANK_UNPICKABLE;
            }
        }

        rankedCards.add(MutablePair.of(card, rank));
    }
    return rankedCards;
}

From source file:ml.shifu.shifu.core.TreeModel.java

/**
 * Get feature importance of current model.
 * /* w w  w. ja v  a 2 s .  c o  m*/
 * @return map of feature importance, key is column index.
 */
public Map<Integer, MutablePair<String, Double>> getFeatureImportances() {
    Map<Integer, MutablePair<String, Double>> importancesSum = new HashMap<Integer, MutablePair<String, Double>>();
    Map<Integer, String> nameMapping = this.getIndependentTreeModel().getNumNameMapping();
    int treeSize = this.getIndependentTreeModel().getTrees().size();

    // such case we only support treeModel is one element list
    if (this.getIndependentTreeModel().getTrees().size() != 1) {
        throw new RuntimeException(
                "Bagging model cannot be supported in Tree Model one element feature importance computing.");
    }

    for (TreeNode tree : this.getIndependentTreeModel().getTrees().get(0)) {
        // get current tree importance at first
        Map<Integer, Double> subImportances = tree.computeFeatureImportance();
        // merge feature importance from different trees
        for (Entry<Integer, Double> entry : subImportances.entrySet()) {
            String featureName = nameMapping.get(entry.getKey());
            MutablePair<String, Double> importance = MutablePair.of(featureName, entry.getValue());
            if (!importancesSum.containsKey(entry.getKey())) {
                importance.setValue(importance.getValue() / treeSize);
                importancesSum.put(entry.getKey(), importance);
            } else {
                MutablePair<String, Double> current = importancesSum.get(entry.getKey());
                current.setValue(current.getValue() + importance.getValue() / treeSize);
                importancesSum.put(entry.getKey(), current);
            }
        }
    }
    return importancesSum;
}

From source file:forge.deck.io.OldDeckParser.java

private void convertConstructedAndSealed() {
    boolean allowDeleteUnsupportedConstructed = false;
    final Map<String, Pair<DeckGroup, MutablePair<File, File>>> sealedDecks = new TreeMap<String, Pair<DeckGroup, MutablePair<File, File>>>(
            String.CASE_INSENSITIVE_ORDER);

    for (final File f : this.deckDir.listFiles(DeckStorage.DCK_FILE_FILTER)) {
        boolean importedOk = false;

        final List<String> fileLines = FileUtil.readFile(f);
        final Map<String, List<String>> sections = FileSection.parseSections(fileLines);
        final DeckFileHeader dh = DeckSerializer.readDeckMetadata(sections);
        String name = dh.getName();

        if (dh.isCustomPool()) {
            try {
                this.cube.add(DeckSerializer.fromSections(sections));
                importedOk = true;//from ww w  .j a v a 2s .  c om
            } catch (final NoSuchElementException ex) {
                if (!allowDeleteUnsupportedConstructed) {
                    final String msg = String.format(
                            "Can not convert deck '%s' for some unsupported cards it contains. %n%s%n%nMay Forge delete all such decks?",
                            name, ex.getMessage());
                    allowDeleteUnsupportedConstructed = SOptionPane.showConfirmDialog(msg,
                            "Problem converting decks");
                }
            }
            if (importedOk || allowDeleteUnsupportedConstructed) {
                f.delete();
            }
            continue;
        }

        switch (dh.getDeckType()) {
        case Constructed:
            try {
                this.constructed.add(DeckSerializer.fromSections(sections));
                importedOk = true;
            } catch (final NoSuchElementException ex) {
                if (!allowDeleteUnsupportedConstructed) {
                    final String msg = String.format(
                            "Can not convert deck '%s' for some unsupported cards it contains. %n%s%n%nMay Forge delete all such decks?",
                            name, ex.getMessage());
                    allowDeleteUnsupportedConstructed = SOptionPane.showConfirmDialog(msg,
                            "Problem converting decks");
                }
            }
            if (importedOk || allowDeleteUnsupportedConstructed) {
                f.delete();
            }
            break;

        case Limited:
            name = name.startsWith("AI_") ? name.replace("AI_", "") : name;

            Pair<DeckGroup, MutablePair<File, File>> stored = sealedDecks.get(name);
            if (null == stored) {
                stored = ImmutablePair.of(new DeckGroup(name), MutablePair.of((File) null, (File) null));
            }

            final Deck deck = DeckSerializer.fromSections(sections);
            if (dh.isIntendedForAi()) {
                stored.getLeft().addAiDeck(deck);
                stored.getRight().setRight(f);
            } else {
                stored.getLeft().setHumanDeck(deck);
                stored.getRight().setLeft(f);
            }

            if ((stored.getLeft().getHumanDeck() != null) && !stored.getLeft().getAiDecks().isEmpty()) {
                // have both parts of sealed deck, may convert
                this.sealed.add(stored.getLeft());
                stored.getRight().getLeft().delete();
                stored.getRight().getRight().delete();

                // there stay only orphans
                sealedDecks.remove(name);
            } else {
                sealedDecks.put(name, stored);
            }
            break;
        default:
            break;
        }
    }

    // advise to kill orphaned decks
    if (!sealedDecks.isEmpty()) {
        final StringBuilder sb = new StringBuilder();
        for (final Pair<DeckGroup, MutablePair<File, File>> s : sealedDecks.values()) {
            final String missingPart = s.getRight().getLeft() == null ? "human" : "computer";
            sb.append(String.format("Sealed deck '%s' has no matching '%s' deck.%n", s.getKey().getName(),
                    missingPart));
        }
        sb.append(System.getProperty("line.separator"));
        sb.append("May Forge delete these decks?");
        if (SOptionPane.showConfirmDialog(sb.toString(), "Some of your sealed decks are orphaned")) {
            for (final Pair<DeckGroup, MutablePair<File, File>> s : sealedDecks.values()) {
                if (s.getRight().getLeft() != null) {
                    s.getRight().getLeft().delete();
                }
                if (s.getRight().getRight() != null) {
                    s.getRight().getRight().delete();
                }
            }
        }
    }
}

From source file:ml.shifu.shifu.util.CommonUtils.java

private static Map<Integer, MutablePair<String, Double>> mergeImportanceList(
        List<Map<Integer, MutablePair<String, Double>>> list) {
    Map<Integer, MutablePair<String, Double>> finalResult = new HashMap<Integer, MutablePair<String, Double>>();
    int modelSize = list.size();
    for (Map<Integer, MutablePair<String, Double>> item : list) {
        for (Entry<Integer, MutablePair<String, Double>> entry : item.entrySet()) {
            if (!finalResult.containsKey(entry.getKey())) {
                // do average on models by dividing modelSize
                MutablePair<String, Double> value = MutablePair.of(entry.getValue().getKey(),
                        entry.getValue().getValue() / modelSize);
                finalResult.put(entry.getKey(), value);
            } else {
                MutablePair<String, Double> current = finalResult.get(entry.getKey());
                double entryValue = entry.getValue().getValue();
                current.setValue(current.getValue() + (entryValue / modelSize));
                finalResult.put(entry.getKey(), current);
            }/*from www . j  av  a2  s.  c  o  m*/
        }
    }
    return TreeModel.sortByValue(finalResult, false);
}

From source file:org.apache.giraph.block_app.reducers.array.ArrayReduce.java

/**
 * Registers one new reducer, that will reduce array of objects,
 * by reducing individual elements using {@code elementReduceOp}.
 *
 * This function will return ReducerArrayHandle to it, by which
 * individual elements can be manipulated separately.
 *
 * @param fixedSize Number of elements/*from ww  w.java  2s . co m*/
 * @param elementReduceOp ReduceOperation for individual elements
 * @param createFunction Function for creating a reducer
 * @return Created ReducerArrayHandle
 */
public static <S, T extends Writable> ReducerArrayHandle<S, T> createArrayHandles(final int fixedSize,
        ReduceOperation<S, T> elementReduceOp, CreateReducerFunctionApi createFunction) {
    final ReducerHandle<Pair<IntRef, S>, ArrayWritable<T>> reduceHandle = createFunction
            .createReducer(new ArrayReduce<>(fixedSize, elementReduceOp));

    final IntRef curIndex = new IntRef(0);
    final MutablePair<IntRef, S> reusablePair = MutablePair.of(new IntRef(0), null);
    final ReducerHandle<S, T> elementReduceHandle = new ReducerHandle<S, T>() {
        @Override
        public T getReducedValue(MasterGlobalCommUsage master) {
            ArrayWritable<T> result = reduceHandle.getReducedValue(master);
            return result.get()[curIndex.value];
        }

        @Override
        public void reduce(S valueToReduce) {
            reusablePair.getLeft().value = curIndex.value;
            reusablePair.setRight(valueToReduce);
            reduceHandle.reduce(reusablePair);
        }

        @Override
        public BroadcastHandle<T> broadcastValue(BlockMasterApi master) {
            throw new UnsupportedOperationException();
        }
    };

    return new ReducerArrayHandle<S, T>() {
        @Override
        public ReducerHandle<S, T> get(int index) {
            curIndex.value = index;
            return elementReduceHandle;
        }

        @Override
        public int getStaticSize() {
            return fixedSize;
        }

        @Override
        public int getReducedSize(BlockMasterApi master) {
            return getStaticSize();
        }

        @Override
        public BroadcastArrayHandle<T> broadcastValue(BlockMasterApi master) {
            final BroadcastHandle<ArrayWritable<T>> broadcastHandle = reduceHandle.broadcastValue(master);
            final IntRef curIndex = new IntRef(0);
            final BroadcastHandle<T> elementBroadcastHandle = new BroadcastHandle<T>() {
                @Override
                public T getBroadcast(WorkerBroadcastUsage worker) {
                    ArrayWritable<T> result = broadcastHandle.getBroadcast(worker);
                    return result.get()[curIndex.value];
                }
            };
            return new BroadcastArrayHandle<T>() {
                @Override
                public BroadcastHandle<T> get(int index) {
                    curIndex.value = index;
                    return elementBroadcastHandle;
                }

                @Override
                public int getStaticSize() {
                    return fixedSize;
                }

                @Override
                public int getBroadcastedSize(WorkerBroadcastUsage worker) {
                    return getStaticSize();
                }
            };
        }
    };
}

From source file:org.apache.giraph.block_app.reducers.array.BasicArrayReduce.java

/**
 * Registers one new reducer, that will reduce BasicArray,
 * by reducing individual elements using {@code elementReduceOp},
 * with predefined size.//from   w ww .  ja v a 2 s  .co m
 *
 * This function will return ReducerArrayHandle, by which
 * individual elements can be manipulated separately.
 *
 * @param fixedSize Number of elements
 * @param typeOps TypeOps of individual elements
 * @param elementReduceOp ReduceOperation for individual elements
 * @param createFunction Function for creating a reducer
 * @return Created ReducerArrayHandle
 */
public static <S, R extends Writable> ReducerArrayHandle<S, R> createArrayHandles(final int fixedSize,
        final PrimitiveTypeOps<R> typeOps, ReduceOperation<S, R> elementReduceOp,
        CreateReducerFunctionApi createFunction) {
    final ReducerHandle<Pair<IntRef, S>, WArrayList<R>> reduceHandle = createFunction
            .createReducer(new BasicArrayReduce<>(fixedSize, typeOps, elementReduceOp));
    final IntRef curIndex = new IntRef(0);
    final R reusableValue = typeOps.create();
    final R initialValue = elementReduceOp.createInitialValue();
    final MutablePair<IntRef, S> reusablePair = MutablePair.of(new IntRef(0), null);
    final ReducerHandle<S, R> elementReduceHandle = new ReducerHandle<S, R>() {
        @Override
        public R getReducedValue(MasterGlobalCommUsage master) {
            WArrayList<R> result = reduceHandle.getReducedValue(master);
            if (fixedSize == -1 && curIndex.value >= result.size()) {
                typeOps.set(reusableValue, initialValue);
            } else {
                result.getIntoW(curIndex.value, reusableValue);
            }
            return reusableValue;
        }

        @Override
        public void reduce(S valueToReduce) {
            reusablePair.getLeft().value = curIndex.value;
            reusablePair.setRight(valueToReduce);
            reduceHandle.reduce(reusablePair);
        }

        @Override
        public BroadcastHandle<R> broadcastValue(BlockMasterApi master) {
            throw new UnsupportedOperationException();
        }
    };

    return new ReducerArrayHandle<S, R>() {
        @Override
        public ReducerHandle<S, R> get(int index) {
            curIndex.value = index;
            return elementReduceHandle;
        }

        @Override
        public int getStaticSize() {
            if (fixedSize == -1) {
                throw new UnsupportedOperationException("Cannot call size, when one is not specified upfront");
            }
            return fixedSize;
        }

        @Override
        public int getReducedSize(BlockMasterApi master) {
            return reduceHandle.getReducedValue(master).size();
        }

        @Override
        public BroadcastArrayHandle<R> broadcastValue(BlockMasterApi master) {
            final BroadcastHandle<WArrayList<R>> broadcastHandle = reduceHandle.broadcastValue(master);
            final IntRef curIndex = new IntRef(0);
            final R reusableValue = typeOps.create();
            final BroadcastHandle<R> elementBroadcastHandle = new BroadcastHandle<R>() {
                @Override
                public R getBroadcast(WorkerBroadcastUsage worker) {
                    WArrayList<R> result = broadcastHandle.getBroadcast(worker);
                    if (fixedSize == -1 && curIndex.value >= result.size()) {
                        typeOps.set(reusableValue, initialValue);
                    } else {
                        result.getIntoW(curIndex.value, reusableValue);
                    }
                    return reusableValue;
                }
            };
            return new BroadcastArrayHandle<R>() {
                @Override
                public BroadcastHandle<R> get(int index) {
                    curIndex.value = index;
                    return elementBroadcastHandle;
                }

                @Override
                public int getStaticSize() {
                    if (fixedSize == -1) {
                        throw new UnsupportedOperationException(
                                "Cannot call size, when one is not specified upfront");
                    }
                    return fixedSize;
                }

                @Override
                public int getBroadcastedSize(WorkerBroadcastUsage worker) {
                    return broadcastHandle.getBroadcast(worker).size();
                }
            };
        }
    };
}

From source file:org.apache.giraph.block_app.reducers.map.BasicMapReduce.java

/**
 * Registers one new reducer, that will reduce BasicMap,
 * by reducing individual elements corresponding to the same key
 * using {@code elementReduceOp}.// w  w w . j  a  v a2  s . com
 *
 * This function will return ReducerMapHandle, by which
 * individual elements can be manipulated separately.
 *
 * @param keyTypeOps TypeOps of keys
 * @param typeOps TypeOps of individual elements
 * @param elementReduceOp ReduceOperation for individual elements
 * @param createFunction Function for creating a reducer
 * @return Created ReducerMapHandle
 */
public static <K extends WritableComparable, S, R extends Writable> ReducerMapHandle<K, S, R> createMapHandles(
        final PrimitiveIdTypeOps<K> keyTypeOps, final PrimitiveTypeOps<R> typeOps,
        ReduceOperation<S, R> elementReduceOp, CreateReducerFunctionApi createFunction) {
    final ReducerHandle<Pair<K, S>, Basic2ObjectMap<K, R>> reduceHandle = createFunction
            .createReducer(new BasicMapReduce<>(keyTypeOps, typeOps, elementReduceOp));
    final K curIndex = keyTypeOps.create();
    final R reusableValue = typeOps.create();
    final R initialValue = elementReduceOp.createInitialValue();
    final MutablePair<K, S> reusablePair = MutablePair.of(null, null);
    final ReducerHandle<S, R> elementReduceHandle = new ReducerHandle<S, R>() {
        @Override
        public R getReducedValue(MasterGlobalCommUsage master) {
            Basic2ObjectMap<K, R> result = reduceHandle.getReducedValue(master);
            R value = result.get(curIndex);
            if (value == null) {
                typeOps.set(reusableValue, initialValue);
            } else {
                typeOps.set(reusableValue, value);
            }
            return reusableValue;
        }

        @Override
        public void reduce(S valueToReduce) {
            reusablePair.setLeft(curIndex);
            reusablePair.setRight(valueToReduce);
            reduceHandle.reduce(reusablePair);
        }

        @Override
        public BroadcastHandle<R> broadcastValue(BlockMasterApi master) {
            throw new UnsupportedOperationException();
        }
    };

    return new ReducerMapHandle<K, S, R>() {
        @Override
        public ReducerHandle<S, R> get(K key) {
            keyTypeOps.set(curIndex, key);
            return elementReduceHandle;
        }

        @Override
        public int getReducedSize(BlockMasterApi master) {
            return reduceHandle.getReducedValue(master).size();
        }

        @Override
        public BroadcastMapHandle<K, R> broadcastValue(BlockMasterApi master) {
            final BroadcastHandle<Basic2ObjectMap<K, R>> broadcastHandle = reduceHandle.broadcastValue(master);
            final K curIndex = keyTypeOps.create();
            final R reusableValue = typeOps.create();
            final BroadcastHandle<R> elementBroadcastHandle = new BroadcastHandle<R>() {
                @Override
                public R getBroadcast(WorkerBroadcastUsage worker) {
                    Basic2ObjectMap<K, R> result = broadcastHandle.getBroadcast(worker);
                    R value = result.get(curIndex);
                    if (value == null) {
                        typeOps.set(reusableValue, initialValue);
                    } else {
                        typeOps.set(reusableValue, value);
                    }
                    return reusableValue;
                }
            };
            return new BroadcastMapHandle<K, R>() {
                @Override
                public BroadcastHandle<R> get(K key) {
                    keyTypeOps.set(curIndex, key);
                    return elementBroadcastHandle;
                }

                @Override
                public int getBroadcastedSize(WorkerBroadcastUsage worker) {
                    return broadcastHandle.getBroadcast(worker).size();
                }
            };
        }
    };
}

From source file:org.apache.hyracks.storage.am.common.TreeIndexTestUtils.java

protected void addFilterField(IIndexTestContext ctx, MutablePair<ITupleReference, ITupleReference> minMax)
        throws HyracksDataException {
    //Duplicate the PK field as a filter field at the end of the tuple to be inserted.
    int filterField = ctx.getFieldCount();
    ITupleReference currTuple = ctx.getTuple();
    ArrayTupleBuilder filterBuilder = new ArrayTupleBuilder(1);
    filterBuilder.addField(currTuple.getFieldData(filterField), currTuple.getFieldStart(filterField),
            currTuple.getFieldLength(filterField));
    IBinaryComparator comparator = ctx.getComparatorFactories()[0].createBinaryComparator();
    ArrayTupleReference filterOnlyTuple = new ArrayTupleReference();
    filterOnlyTuple.reset(filterBuilder.getFieldEndOffsets(), filterBuilder.getByteArray());
    if (minMax == null) {
        minMax = MutablePair.of(filterOnlyTuple, filterOnlyTuple);
    } else if (compareFilterTuples(minMax.getLeft(), filterOnlyTuple, comparator) > 0) {
        minMax.setLeft(filterOnlyTuple);
    } else if (compareFilterTuples(minMax.getRight(), filterOnlyTuple, comparator) < 0) {
        minMax.setRight(filterOnlyTuple);
    }//  w w w . jav a  2s . co m
}

From source file:org.apache.hyracks.storage.am.lsm.btree.LSMBTreeFilterMergeTestDriver.java

@Override
protected void runTest(ISerializerDeserializer[] fieldSerdes, int numKeys, BTreeLeafFrameType leafType,
        ITupleReference lowKey, ITupleReference highKey, ITupleReference prefixLowKey,
        ITupleReference prefixHighKey) throws Exception {
    OrderedIndexTestContext ctx = createTestContext(fieldSerdes, numKeys, leafType, true);
    ctx.getIndex().create();//from  www  .  j a va  2s.  c o m
    ctx.getIndex().activate();
    // Start off with one tree bulk loaded.
    // We assume all fieldSerdes are of the same type. Check the first one
    // to determine which field types to generate.
    if (fieldSerdes[0] instanceof IntegerSerializerDeserializer) {
        orderedIndexTestUtils.bulkLoadIntTuples(ctx, numTuplesToInsert, true, getRandom());
    } else if (fieldSerdes[0] instanceof UTF8StringSerializerDeserializer) {
        orderedIndexTestUtils.bulkLoadStringTuples(ctx, numTuplesToInsert, true, getRandom());
    }

    int maxTreesToMerge = AccessMethodTestsConfig.LSM_BTREE_MAX_TREES_TO_MERGE;
    ILSMIndexAccessor accessor = (ILSMIndexAccessor) ctx.getIndexAccessor();
    IBinaryComparator comp = ctx.getComparatorFactories()[0].createBinaryComparator();
    for (int i = 0; i < maxTreesToMerge; i++) {
        int flushed = 0;
        for (; flushed < i; flushed++) {
            Pair<ITupleReference, ITupleReference> minMax = null;
            if (fieldSerdes[0] instanceof IntegerSerializerDeserializer) {
                minMax = orderedIndexTestUtils.insertIntTuples(ctx, numTuplesToInsert, true, getRandom());
            } else {
                minMax = orderedIndexTestUtils.insertStringTuples(ctx, numTuplesToInsert, true, getRandom());
            }
            if (minMax != null) {
                ILSMComponentFilter f = ((LSMBTree) ctx.getIndex()).getCurrentMemoryComponent()
                        .getLSMComponentFilter();
                Pair<ITupleReference, ITupleReference> obsMinMax = filterToMinMax(f);
                Assert.assertEquals(0,
                        TreeIndexTestUtils.compareFilterTuples(obsMinMax.getLeft(), minMax.getLeft(), comp));
                Assert.assertEquals(0,
                        TreeIndexTestUtils.compareFilterTuples(obsMinMax.getRight(), minMax.getRight(), comp));
            }

            StubIOOperationCallback stub = new StubIOOperationCallback();
            BlockingIOOperationCallbackWrapper waiter = new BlockingIOOperationCallbackWrapper(stub);
            accessor.scheduleFlush(waiter);
            waiter.waitForIO();
            if (minMax != null) {
                Pair<ITupleReference, ITupleReference> obsMinMax = filterToMinMax(
                        stub.getLastNewComponent().getLSMComponentFilter());
                Assert.assertEquals(0,
                        TreeIndexTestUtils.compareFilterTuples(obsMinMax.getLeft(), minMax.getLeft(), comp));
                Assert.assertEquals(0,
                        TreeIndexTestUtils.compareFilterTuples(obsMinMax.getRight(), minMax.getRight(), comp));
            }
        }

        List<ILSMDiskComponent> flushedComponents = ((LSMBTree) ctx.getIndex()).getImmutableComponents();
        MutablePair<ITupleReference, ITupleReference> expectedMergeMinMax = null;
        for (ILSMDiskComponent f : flushedComponents) {
            Pair<ITupleReference, ITupleReference> componentMinMax = filterToMinMax(f.getLSMComponentFilter());
            if (expectedMergeMinMax == null) {
                expectedMergeMinMax = MutablePair.of(componentMinMax.getLeft(), componentMinMax.getRight());
            }
            if (TreeIndexTestUtils.compareFilterTuples(expectedMergeMinMax.getLeft(), componentMinMax.getLeft(),
                    comp) > 0) {
                expectedMergeMinMax.setLeft(componentMinMax.getLeft());
            }
            if (TreeIndexTestUtils.compareFilterTuples(expectedMergeMinMax.getRight(),
                    componentMinMax.getRight(), comp) < 0) {
                expectedMergeMinMax.setRight(componentMinMax.getRight());
            }
        }
        accessor.scheduleMerge(NoOpIOOperationCallback.INSTANCE,
                ((LSMBTree) ctx.getIndex()).getImmutableComponents());

        flushedComponents = ((LSMBTree) ctx.getIndex()).getImmutableComponents();
        Pair<ITupleReference, ITupleReference> mergedMinMax = filterToMinMax(
                flushedComponents.get(0).getLSMComponentFilter());
        Assert.assertEquals(0, TreeIndexTestUtils.compareFilterTuples(expectedMergeMinMax.getLeft(),
                mergedMinMax.getLeft(), comp));
        Assert.assertEquals(0, TreeIndexTestUtils.compareFilterTuples(expectedMergeMinMax.getRight(),
                mergedMinMax.getRight(), comp));

        orderedIndexTestUtils.checkPointSearches(ctx);
        orderedIndexTestUtils.checkScan(ctx);
        orderedIndexTestUtils.checkDiskOrderScan(ctx);
        orderedIndexTestUtils.checkRangeSearch(ctx, lowKey, highKey, true, true);
        if (prefixLowKey != null && prefixHighKey != null) {
            orderedIndexTestUtils.checkRangeSearch(ctx, prefixLowKey, prefixHighKey, true, true);
        }
    }
    ctx.getIndex().deactivate();
    ctx.getIndex().destroy();
}

From source file:org.grouplens.lenskit.vectors.FastIntersectIterImpl.java

public FastIntersectIterImpl(SparseVector v1, SparseVector v2) {
    vec1 = v1;//ww w.  ja va  2  s .  c om
    vec2 = v2;
    // FIXME The true here slows things down
    iterA = v1.keys.activeIndexIterator(true);
    iterB = v2.keys.activeIndexIterator(true);
    idxA = iterA.hasNext() ? iterA.nextInt() : -1;
    idxB = iterB.hasNext() ? iterB.nextInt() : -1;
    leftEnt = new VectorEntry(v1, -1, 0, 0, false);
    rightEnt = new VectorEntry(v2, -1, 0, 0, false);
    pair = MutablePair.of(leftEnt, rightEnt);
}