Example usage for org.apache.commons.lang3.tuple Triple getLeft

List of usage examples for org.apache.commons.lang3.tuple Triple getLeft

Introduction

In this page you can find the example usage for org.apache.commons.lang3.tuple Triple getLeft.

Prototype

public abstract L getLeft();

Source Link

Document

Gets the left element from this triple.

Usage

From source file:alluxio.master.file.PermissionCheckTest.java

private LockedInodePath getLockedInodePath(ArrayList<Triple<String, String, Mode>> permissions)
        throws Exception {
    List<Inode<?>> inodes = new ArrayList<>();
    inodes.add(getRootInode());//  w  w w.  j av  a2  s  . c  om
    if (permissions.size() == 0) {
        return new MutableLockedInodePath(new AlluxioURI("/"), inodes, null);
    }
    String uri = "";
    for (int i = 0; i < permissions.size(); i++) {
        Triple<String, String, Mode> permission = permissions.get(i);
        String owner = permission.getLeft();
        String group = permission.getMiddle();
        Mode mode = permission.getRight();
        uri += "/" + (i + 1);
        if (i == permissions.size() - 1) {
            Inode<?> inode = InodeFile.create(i + 1, i, (i + 1) + "", CommonUtils.getCurrentMs(),
                    CreateFileOptions.defaults().setBlockSizeBytes(Constants.KB).setOwner(owner).setGroup(group)
                            .setMode(mode));
            inodes.add(inode);
        } else {
            Inode<?> inode = InodeDirectory.create(i + 1, i, (i + 1) + "",
                    CreateDirectoryOptions.defaults().setOwner(owner).setGroup(group).setMode(mode));
            inodes.add(inode);
        }
    }
    return new MutableLockedInodePath(new AlluxioURI(uri), inodes, null);
}

From source file:com.uber.hoodie.common.util.collection.converter.HoodieRecordConverter.java

@Override
public HoodieRecord getData(byte[] bytes) {
    try {/*from   w  ww . j a  v a2s  . c  o m*/
        Triple<Pair<String, String>, Pair<byte[], byte[]>, byte[]> data = SerializationUtils.deserialize(bytes);
        Optional<GenericRecord> payload = Optional.empty();
        HoodieRecordLocation currentLocation = null;
        HoodieRecordLocation newLocation = null;
        if (data.getRight().length > 0) {
            // This can happen if the record is deleted, the payload is optional with 0 bytes
            payload = Optional.of(HoodieAvroUtils.bytesToAvro(data.getRight(), schema));
        }
        // Get the currentLocation for the HoodieRecord
        if (data.getMiddle().getLeft().length > 0) {
            currentLocation = SerializationUtils.deserialize(data.getMiddle().getLeft());
        }
        // Get the newLocation for the HoodieRecord
        if (data.getMiddle().getRight().length > 0) {
            newLocation = SerializationUtils.deserialize(data.getMiddle().getRight());
        }
        HoodieRecord<? extends HoodieRecordPayload> hoodieRecord = new HoodieRecord<>(
                new HoodieKey(data.getLeft().getKey(), data.getLeft().getValue()),
                ReflectionUtils.loadPayload(payloadClazz, new Object[] { payload }, Optional.class));
        hoodieRecord.setCurrentLocation(currentLocation);
        hoodieRecord.setNewLocation(newLocation);
        return hoodieRecord;
    } catch (IOException io) {
        throw new HoodieNotSerializableException("Cannot de-serialize value from bytes", io);
    }
}

From source file:blusunrize.immersiveengineering.common.items.ItemRevolver.java

@Override
public ActionResult<ItemStack> onItemRightClick(World world, EntityPlayer player, @Nonnull EnumHand hand) {
    ItemStack revolver = player.getHeldItem(hand);
    if (!world.isRemote) {
        if (player.isSneaking()) {
            CommonProxy.openGuiForItem(player,
                    hand == EnumHand.MAIN_HAND ? EntityEquipmentSlot.MAINHAND : EntityEquipmentSlot.OFFHAND);
            return new ActionResult(EnumActionResult.SUCCESS, revolver);
        } else if (player.getCooledAttackStrength(1) >= 1) {
            if (this.getUpgrades(revolver).getBoolean("nerf"))
                world.playSound(null, player.posX, player.posY, player.posZ, SoundEvents.ENTITY_ITEM_PICKUP,
                        SoundCategory.PLAYERS, 1f, 0.6f);
            else {
                if (getShootCooldown(revolver) > 0 || ItemNBTHelper.hasKey(revolver, "reload"))
                    return new ActionResult(EnumActionResult.PASS, revolver);

                NonNullList<ItemStack> bullets = getBullets(revolver);

                if (isEmpty(revolver, false))
                    for (int i = 0; i < player.inventory.getSizeInventory(); i++) {
                        ItemStack stack = player.inventory.getStackInSlot(i);
                        if (stack.getItem() instanceof ItemSpeedloader
                                && !((ItemSpeedloader) stack.getItem()).isEmpty(stack)) {
                            for (ItemStack b : bullets)
                                if (!b.isEmpty())
                                    world.spawnEntity(
                                            new EntityItem(world, player.posX, player.posY, player.posZ, b));
                            setBullets(revolver, ((ItemSpeedloader) stack.getItem()).getContainedItems(stack));
                            ((ItemSpeedloader) stack.getItem()).setContainedItems(stack,
                                    NonNullList.withSize(8, ItemStack.EMPTY));
                            player.inventory.markDirty();
                            if (player instanceof EntityPlayerMP)
                                ImmersiveEngineering.packetHandler.sendTo(new MessageSpeedloaderSync(i, hand),
                                        (EntityPlayerMP) player);

                            ItemNBTHelper.setInt(revolver, "reload", 60);
                            return new ActionResult(EnumActionResult.SUCCESS, revolver);
                        }//from  ww w.j a v  a  2s  . c om
                    }

                if (!ItemNBTHelper.hasKey(revolver, "reload")) {
                    if (!bullets.get(0).isEmpty() && bullets.get(0).getItem() instanceof ItemBullet
                            && ItemNBTHelper.hasKey(bullets.get(0), "bullet")) {
                        String key = ItemNBTHelper.getString(bullets.get(0), "bullet");
                        IBullet bullet = BulletHandler.getBullet(key);
                        if (bullet != null) {
                            Vec3d vec = player.getLookVec();
                            boolean electro = getUpgrades(revolver).getBoolean("electro");
                            int count = bullet.getProjectileCount(player);
                            if (count == 1) {
                                Entity entBullet = getBullet(player, vec, vec, key, bullets.get(0), electro);
                                player.world.spawnEntity(
                                        bullet.getProjectile(player, bullets.get(0), entBullet, electro));
                            } else
                                for (int i = 0; i < count; i++) {
                                    Vec3d vecDir = vec.add(player.getRNG().nextGaussian() * .1,
                                            player.getRNG().nextGaussian() * .1,
                                            player.getRNG().nextGaussian() * .1);
                                    Entity entBullet = getBullet(player, vec, vecDir, key, bullets.get(0),
                                            electro);
                                    player.world.spawnEntity(
                                            bullet.getProjectile(player, bullets.get(0), entBullet, electro));
                                }
                            bullets.set(0, bullet.getCasing(bullets.get(0)).copy());

                            float noise = 1;
                            Utils.attractEnemies(player, 64 * noise);
                            SoundEvent sound = bullet.getSound();
                            if (sound == null)
                                sound = IESounds.revolverFire;
                            world.playSound(null, player.posX, player.posY, player.posZ, sound,
                                    SoundCategory.PLAYERS, noise, 1f);
                        } else
                            world.playSound(null, player.posX, player.posY, player.posZ,
                                    SoundEvents.BLOCK_NOTE_HAT, SoundCategory.PLAYERS, 1f, 1f);
                    } else
                        world.playSound(null, player.posX, player.posY, player.posZ, SoundEvents.BLOCK_NOTE_HAT,
                                SoundCategory.PLAYERS, 1f, 1f);

                    NonNullList<ItemStack> cycled = NonNullList.withSize(getBulletCount(revolver),
                            ItemStack.EMPTY);
                    for (int i = 1; i < cycled.size(); i++)
                        cycled.set(i - 1, bullets.get(i));
                    cycled.set(cycled.size() - 1, bullets.get(0));
                    setBullets(revolver, cycled);
                    player.inventory.markDirty();
                    ItemNBTHelper.setInt(revolver, "cooldown", getMaxShootCooldown(revolver));
                    return new ActionResult(EnumActionResult.SUCCESS, revolver);
                }
            }
        }
    } else if (!player.isSneaking() && revolver.getItemDamage() == 0) {
        if (getShootCooldown(revolver) > 0 || ItemNBTHelper.hasKey(revolver, "reload"))
            return new ActionResult(EnumActionResult.PASS, revolver);
        NonNullList<ItemStack> bullets = getBullets(revolver);
        if (!bullets.get(0).isEmpty() && bullets.get(0).getItem() instanceof ItemBullet
                && ItemNBTHelper.hasKey(bullets.get(0), "bullet")) {
            Triple<ItemStack, ShaderRegistryEntry, ShaderCase> shader = ShaderRegistry
                    .getStoredShaderAndCase(revolver);
            if (shader != null) {
                Vec3d pos = Utils.getLivingFrontPos(player, .75, player.height * .75,
                        hand == EnumHand.MAIN_HAND ? player.getPrimaryHand()
                                : player.getPrimaryHand().opposite(),
                        false, 1);
                shader.getMiddle().getEffectFunction().execute(world, shader.getLeft(), revolver,
                        shader.getRight().getShaderType(), pos, player.getForward(), .125f);
            }
        }
        return new ActionResult(EnumActionResult.SUCCESS, revolver);
    }
    return new ActionResult(EnumActionResult.SUCCESS, revolver);
}

From source file:com.clust4j.algo.NNHSTests.java

License:asdf

@Test
public void testKD1() {
    final Array2DRowRealMatrix mat = new Array2DRowRealMatrix(a, false);
    KDTree kd = new KDTree(mat);

    QuadTup<double[][], int[], NodeData[], double[][][]> arrays = kd.getArrays();

    assertTrue(MatUtils.equalsExactly(arrays.getFirst(), a));
    assertTrue(VecUtils.equalsExactly(new int[] { 0, 1, 2 }, arrays.getSecond()));

    Triple<Integer, Integer, Integer> stats = kd.getTreeStats();
    assertTrue(stats.getLeft() == 0);
    assertTrue(stats.getMiddle() == 0);/*from  www . ja v  a2  s .  c o  m*/
    assertTrue(stats.getRight() == 0);

    NodeData data = arrays.getThird()[0];
    assertTrue(data.idx_start == 0);
    assertTrue(data.idx_end == 3);
    assertTrue(data.is_leaf);
    assertTrue(data.radius == 1);
}

From source file:com.clust4j.algo.NNHSTests.java

License:asdf

@Test
public void testBall1() {
    final Array2DRowRealMatrix mat = new Array2DRowRealMatrix(a, false);
    BallTree ball = new BallTree(mat);

    QuadTup<double[][], int[], NodeData[], double[][][]> arrays = ball.getArrays();

    assertTrue(MatUtils.equalsExactly(arrays.getFirst(), a));
    assertTrue(VecUtils.equalsExactly(new int[] { 0, 1, 2 }, arrays.getSecond()));

    Triple<Integer, Integer, Integer> stats = ball.getTreeStats();
    assertTrue(stats.getLeft() == 0);
    assertTrue(stats.getMiddle() == 0);/*from w  ww . java2 s  .  c o  m*/
    assertTrue(stats.getRight() == 0);

    NodeData data = arrays.getThird()[0];
    assertTrue(data.idx_start == 0);
    assertTrue(data.idx_end == 3);
    assertTrue(data.is_leaf);
    assertTrue(data.radius == 6.716480559869961);

    double[][][] trip = arrays.getFourth();
    assertTrue(trip.length == 1);
    assertTrue(trip[0][0][0] == 1.6666666666666667);
    assertTrue(trip[0][0][1] == 2.3333333333333333);
    assertTrue(trip[0][0][2] == 2.6666666666666667);
    assertTrue(trip[0][0][3] == 2.6666666666666667);
}

From source file:it.acubelab.smaph.SmaphAnnotator.java

/**
 * Given a query and its gold standard, generate
 * /*from   w  ww. jav  a2s  . c o  m*/
 * @param query
 *            a query.
 * @param goldStandard
 *            the entities associated to the query.
 * @param posEFVectors
 *            where to store the positive-example (true positives) feature
 *            vectors.
 * @param negEFVectors
 *            where to store the negative-example (false positives) feature
 *            vectors.
 * @param discardNE
 *            whether to limit the output to named entities, as defined by
 *            ERDDatasetFilter.EntityIsNE.
 * @param wikiToFreeb
 *            a wikipedia to freebase-id mapping.
 * @throws Exception
 *             if something went wrong while annotating the query.
 */
public void generateExamples(String query, HashSet<Tag> goldStandard, Vector<double[]> posEFVectors,
        Vector<double[]> negEFVectors, boolean discardNE, WikipediaToFreebase wikiToFreeb) throws Exception {

    /** Search the query on bing */
    List<Pair<String, Integer>> bingBoldsAndRankNS = null;
    List<String> urls = null;
    List<String> relatedSearchRes = null;
    Triple<Integer, Double, JSONObject> resCountAndWebTotal = null;
    int resultsCount = -1;
    double webTotalNS = Double.NaN;
    List<String> filteredBolds = null;
    HashMap<Integer, Integer> rankToIdNS = null;
    if (includeSourceAnnotator || includeSourceWikiSearch || includeSourceRelatedSearch
            || includeSourceNormalSearch) {
        bingBoldsAndRankNS = new Vector<>();
        urls = new Vector<>();
        relatedSearchRes = new Vector<>();
        resCountAndWebTotal = takeBingData(query, bingBoldsAndRankNS, urls, relatedSearchRes, null,
                Integer.MAX_VALUE, false);
        resultsCount = resCountAndWebTotal.getLeft();
        webTotalNS = resCountAndWebTotal.getMiddle();
        filteredBolds = boldFilter.filterBolds(query, bingBoldsAndRankNS, resultsCount);
        rankToIdNS = urlsToRankID(urls);

        if (debugger != null) {
            debugger.addBoldPositionEditDistance(query, bingBoldsAndRankNS);
            debugger.addBoldFilterOutput(query, filteredBolds);
            debugger.addSource2SearchResult(query, rankToIdNS, urls);
            debugger.addBingResponseNormalSearch(query, resCountAndWebTotal.getRight());

        }
    }

    /** Do the wikipedia-search on bing. */
    List<String> wikiSearchUrls = new Vector<>();
    List<Pair<String, Integer>> bingBoldsAndRankWS = new Vector<>();
    HashMap<String, Pair<Integer, Integer>> annTitlesToIdAndRankWS = null;
    Triple<Integer, Double, JSONObject> resCountAndWebTotalWS = null;
    double webTotalWS = Double.NaN;
    if (includeSourceWikiSearch | includeSourceNormalSearch) {
        resCountAndWebTotalWS = takeBingData(query, bingBoldsAndRankWS, wikiSearchUrls, null, null,
                topKWikiSearch, true);
        webTotalWS = resCountAndWebTotalWS.getMiddle();
        HashMap<Integer, Integer> rankToIdWikiSearch = urlsToRankID(wikiSearchUrls);
        if (debugger != null) {
            debugger.addSource3SearchResult(query, rankToIdWikiSearch, wikiSearchUrls);
            debugger.addBingResponseWikiSearch(query, resCountAndWebTotal.getRight());

        }
        annTitlesToIdAndRankWS = adjustTitles(rankToIdWikiSearch);
    }

    /** Do the RelatedSearch on bing */
    String relatedSearch = null;
    List<String> relatedSearchUrls = null;
    List<Pair<String, Integer>> bingBoldsAndRankRS = null;
    HashMap<Integer, Integer> rankToIdRelatedSearch = null;
    HashMap<String, Pair<Integer, Integer>> annTitlesToIdAndRankRS = null;
    double webTotalRelatedSearch = Double.NaN;
    if (includeSourceRelatedSearch) {
        relatedSearch = getRelatedSearch(relatedSearchRes, query);
        relatedSearchUrls = new Vector<>();
        bingBoldsAndRankRS = new Vector<>();
        Triple<Integer, Double, JSONObject> resCountAndWebTotalRS = takeBingData(query, bingBoldsAndRankRS,
                relatedSearchUrls, null, null, topKRelatedSearch, false);
        webTotalRelatedSearch = resCountAndWebTotalRS.getMiddle();
        rankToIdRelatedSearch = urlsToRankID(relatedSearchUrls);
        annTitlesToIdAndRankRS = adjustTitles(rankToIdRelatedSearch);
    }

    /** Annotate bolds on the annotator */
    Pair<HashMap<String, HashMap<String, Double>>, HashMap<String, Annotation>> infoAndAnnotations = null;
    HashMap<String, Annotation> spotToAnnotation = null;
    HashMap<String, HashMap<String, Double>> additionalInfo = null;
    Pair<String, HashSet<Mention>> annInput = null;
    if (includeSourceAnnotator) {
        annInput = concatenateBolds(filteredBolds);
        infoAndAnnotations = disambiguateBolds(annInput.first, annInput.second);
        spotToAnnotation = infoAndAnnotations.second;
        additionalInfo = infoAndAnnotations.first;

        if (debugger != null)
            debugger.addReturnedAnnotation(query, spotToAnnotation);
    }

    List<Pair<Tag, HashMap<String, Double>>> widToEFFtrVect = new Vector<>();
    // Filter and add annotations found by the disambiguator
    if (includeSourceAnnotator) {
        for (String bold : filteredBolds) {
            if (spotToAnnotation.containsKey(bold)) {
                Annotation ann = spotToAnnotation.get(bold);
                HashMap<String, Double> ESFeatures = generateEntitySelectionFeaturesAnnotator(query,
                        resultsCount, ann, annInput, bingBoldsAndRankNS, additionalInfo);
                Tag tag = new Tag(ann.getConcept());
                widToEFFtrVect.add(new Pair<Tag, HashMap<String, Double>>(tag, ESFeatures));
            }
        }
    }

    // Filter and add entities found in the normal search
    if (includeSourceNormalSearch) {
        for (int rank : rankToIdNS.keySet()) {
            int wid = rankToIdNS.get(rank);
            HashMap<String, Double> ESFeatures = generateEntitySelectionFeaturesSearch(query, wid, rank,
                    webTotalNS, webTotalWS, bingBoldsAndRankNS, 2);
            Tag tag = new Tag(wid);
            widToEFFtrVect.add(new Pair<Tag, HashMap<String, Double>>(tag, ESFeatures));
        }
    }

    // Filter and add entities found in the WikipediaSearch
    if (includeSourceWikiSearch) {
        for (String annotatedTitleWS : annTitlesToIdAndRankWS.keySet()) {
            int wid = annTitlesToIdAndRankWS.get(annotatedTitleWS).first;
            int rank = annTitlesToIdAndRankWS.get(annotatedTitleWS).second;
            HashMap<String, Double> ESFeatures = generateEntitySelectionFeaturesSearch(query, wid, rank,
                    webTotalNS, webTotalWS, bingBoldsAndRankWS, 3);

            Tag tag = new Tag(wid);
            widToEFFtrVect.add(new Pair<Tag, HashMap<String, Double>>(tag, ESFeatures));
        }
    }

    // Filter and add entities found in the RelatedSearch
    if (includeSourceRelatedSearch) {
        for (String annotatedTitleRS : annTitlesToIdAndRankRS.keySet()) {
            int wid = annTitlesToIdAndRankRS.get(annotatedTitleRS).first;
            int rank = annTitlesToIdAndRankRS.get(annotatedTitleRS).second;
            HashMap<String, Double> ESFeatures = generateEntitySelectionFeaturesSearch(relatedSearch, wid, rank,
                    webTotalNS, webTotalRelatedSearch, bingBoldsAndRankRS, 5);

            Tag tag = new Tag(wid);
            widToEFFtrVect.add(new Pair<Tag, HashMap<String, Double>>(tag, ESFeatures));
        }
    }

    for (Pair<Tag, HashMap<String, Double>> tagAndFtrs : widToEFFtrVect) {
        Tag tag = tagAndFtrs.first;
        HashMap<String, Double> ftrs = tagAndFtrs.second;
        if (discardNE && !ERDDatasetFilter.EntityIsNE(wikiApi, wikiToFreeb, tag.getConcept()))
            continue;

        if (goldStandard.contains(tag))
            posEFVectors.add(LibSvmEntityFilter.featuresToFtrVectStatic(ftrs));
        else
            negEFVectors.add(LibSvmEntityFilter.featuresToFtrVectStatic(ftrs));
        System.out.printf("%d in query [%s] is a %s example.%n", tag.getConcept(), query,
                goldStandard.contains(tag) ? "positive" : "negative");
    }
}

From source file:it.acubelab.smaph.SmaphAnnotator.java

@Override
public HashSet<ScoredAnnotation> solveSa2W(String query) throws AnnotationException {
    if (debugger != null)
        debugger.addProcessedQuery(query);

    HashSet<ScoredAnnotation> annotations = new HashSet<>();
    try {/*w w w. j  av  a 2 s  .  co m*/

        /** Search the query on bing */
        List<Pair<String, Integer>> bingBoldsAndRankNS = null;
        List<String> urls = null;
        List<String> relatedSearchRes = null;
        Triple<Integer, Double, JSONObject> resCountAndWebTotalNS = null;
        int resultsCount = -1;
        double webTotalNS = Double.NaN;
        List<String> filteredBolds = null;
        HashMap<Integer, Integer> rankToIdNS = null;
        HashMap<Integer, HashSet<String>> rankToBoldsNS = null;
        List<Pair<String, Vector<Pair<Integer, Integer>>>> snippetsToBolds = null;
        if (includeSourceAnnotator || includeSourceWikiSearch || includeSourceRelatedSearch
                || includeSourceNormalSearch) {
            bingBoldsAndRankNS = new Vector<>();
            urls = new Vector<>();
            relatedSearchRes = new Vector<>();
            snippetsToBolds = new Vector<>();
            resCountAndWebTotalNS = takeBingData(query, bingBoldsAndRankNS, urls, relatedSearchRes,
                    snippetsToBolds, Integer.MAX_VALUE, false);
            resultsCount = resCountAndWebTotalNS.getLeft();
            webTotalNS = resCountAndWebTotalNS.getMiddle();
            filteredBolds = boldFilter.filterBolds(query, bingBoldsAndRankNS, resultsCount);
            rankToIdNS = urlsToRankID(urls);
            rankToBoldsNS = new HashMap<>();
            SmaphUtils.mapRankToBoldsLC(bingBoldsAndRankNS, rankToBoldsNS, null);

            if (debugger != null) {
                debugger.addBoldPositionEditDistance(query, bingBoldsAndRankNS);
                debugger.addSnippets(query, snippetsToBolds);
                debugger.addBoldFilterOutput(query, filteredBolds);
                debugger.addSource2SearchResult(query, rankToIdNS, urls);
                debugger.addBingResponseNormalSearch(query, resCountAndWebTotalNS.getRight());
            }
        }

        /** Do the WikipediaSearch on bing. */
        List<String> wikiSearchUrls = new Vector<>();
        List<Pair<String, Integer>> bingBoldsAndRankWS = new Vector<>();
        HashMap<String, Pair<Integer, Integer>> annTitlesToIdAndRankWS = null;
        Triple<Integer, Double, JSONObject> resCountAndWebTotalWS = null;
        HashMap<Integer, HashSet<String>> rankToBoldsWS = null;
        double webTotalWS = Double.NaN;
        if (includeSourceWikiSearch | includeSourceNormalSearch) {
            resCountAndWebTotalWS = takeBingData(query, bingBoldsAndRankWS, wikiSearchUrls, null, null,
                    topKWikiSearch, true);
            webTotalWS = resCountAndWebTotalWS.getMiddle();
            HashMap<Integer, Integer> rankToIdWikiSearch = urlsToRankID(wikiSearchUrls);
            rankToBoldsWS = new HashMap<>();
            SmaphUtils.mapRankToBoldsLC(bingBoldsAndRankWS, rankToBoldsWS, null);
            if (debugger != null) {
                debugger.addSource3SearchResult(query, rankToIdWikiSearch, wikiSearchUrls);
                debugger.addBingResponseWikiSearch(query, resCountAndWebTotalWS.getRight());

            }
            annTitlesToIdAndRankWS = adjustTitles(rankToIdWikiSearch);
        }

        /** Do the RelatedSearch on bing */
        String relatedSearch = null;
        List<String> relatedSearchUrls = null;
        List<Pair<String, Integer>> bingBoldsAndRankRS = null;
        HashMap<Integer, Integer> rankToIdRelatedSearch = null;
        HashMap<String, Pair<Integer, Integer>> annTitlesToIdAndRankRS = null;
        double webTotalRelatedSearch = Double.NaN;
        HashMap<Integer, HashSet<String>> rankToBoldsRS = null;
        if (includeSourceRelatedSearch) {
            relatedSearch = getRelatedSearch(relatedSearchRes, query);
            relatedSearchUrls = new Vector<>();
            bingBoldsAndRankRS = new Vector<>();
            Triple<Integer, Double, JSONObject> resCountAndWebTotalRS = takeBingData(query, bingBoldsAndRankRS,
                    relatedSearchUrls, null, null, topKRelatedSearch, false);
            webTotalRelatedSearch = resCountAndWebTotalRS.getMiddle();
            rankToIdRelatedSearch = urlsToRankID(relatedSearchUrls);
            annTitlesToIdAndRankRS = adjustTitles(rankToIdRelatedSearch);
            rankToBoldsRS = new HashMap<>();
            SmaphUtils.mapRankToBoldsLC(bingBoldsAndRankRS, rankToBoldsRS, null);

        }

        /** Annotate bolds on the annotator */
        Pair<HashMap<String, HashMap<String, Double>>, HashMap<String, Annotation>> infoAndAnnotations = null;
        HashMap<String, Annotation> spotToAnnotation = null;
        HashMap<String, HashMap<String, Double>> additionalInfo = null;
        Pair<String, HashSet<Mention>> annInput = null;
        if (includeSourceAnnotator) {
            annInput = concatenateBolds(filteredBolds);
            infoAndAnnotations = disambiguateBolds(annInput.first, annInput.second);
            spotToAnnotation = infoAndAnnotations.second;
            additionalInfo = infoAndAnnotations.first;

            if (debugger != null)
                debugger.addReturnedAnnotation(query, spotToAnnotation);
        }

        HashMap<String[], Tag> boldsToAcceptedEntity = new HashMap<>();

        // Filter and add annotations found by the disambiguator
        if (includeSourceAnnotator) {
            for (String bold : filteredBolds) {
                if (spotToAnnotation.containsKey(bold)) {
                    Annotation ann = spotToAnnotation.get(bold);
                    HashMap<String, Double> ESFeatures = generateEntitySelectionFeaturesAnnotator(query,
                            resultsCount, ann, annInput, bingBoldsAndRankNS, additionalInfo);
                    boolean accept = entityFilter.filterEntity(ESFeatures);
                    if (accept)
                        boldsToAcceptedEntity.put(new String[] { bold }, new Tag(ann.getConcept()));
                    if (debugger != null) {
                        HashSet<String> bolds = new HashSet<>();
                        bolds.add(bold);
                        debugger.addQueryCandidateBolds(query, "Source 1", ann.getConcept(), bolds);
                        debugger.addEntityFeaturesS1(query, bold, ann.getConcept(), ESFeatures, accept);
                        if (accept)
                            debugger.addResult(query, ann.getConcept());
                    }
                }
            }
        }

        // Filter and add entities found in the normal search
        if (includeSourceNormalSearch) {
            for (int rank : rankToIdNS.keySet()) {
                int wid = rankToIdNS.get(rank);
                HashMap<String, Double> ESFeatures = generateEntitySelectionFeaturesSearch(query, wid, rank,
                        webTotalNS, webTotalWS, bingBoldsAndRankNS, 2);
                HashSet<String> bolds = rankToBoldsNS.get(rank);
                boolean accept = entityFilter.filterEntity(ESFeatures);
                if (accept)
                    boldsToAcceptedEntity.put(bolds.toArray(new String[] {}), new Tag(wid));
                if (debugger != null) {
                    debugger.addQueryCandidateBolds(query, "Source 2", wid, bolds);
                    debugger.addEntityFeaturesS2(query, wid, ESFeatures, accept);
                    if (accept)
                        debugger.addResult(query, wid);
                }
            }
        }

        // Filter and add entities found in the WikipediaSearch
        if (includeSourceWikiSearch) {
            for (String annotatedTitleWS : annTitlesToIdAndRankWS.keySet()) {
                int wid = annTitlesToIdAndRankWS.get(annotatedTitleWS).first;
                int rank = annTitlesToIdAndRankWS.get(annotatedTitleWS).second;
                HashMap<String, Double> ESFeatures = generateEntitySelectionFeaturesSearch(query, wid, rank,
                        webTotalNS, webTotalWS, bingBoldsAndRankWS, 3);

                HashSet<String> bolds = rankToBoldsWS.get(rank);
                boolean accept = entityFilter.filterEntity(ESFeatures);
                if (accept)
                    boldsToAcceptedEntity.put(bolds.toArray(new String[] {}), new Tag(wid));
                if (debugger != null) {
                    debugger.addQueryCandidateBolds(query, "Source 3", wid, bolds);
                    debugger.addEntityFeaturesS3(query, wid, ESFeatures, accept);
                    if (accept)
                        debugger.addResult(query, wid);

                }
            }
        }

        // Filter and add entities found in the RelatedSearch
        if (includeSourceRelatedSearch) {
            for (String annotatedTitleRS : annTitlesToIdAndRankRS.keySet()) {
                int wid = annTitlesToIdAndRankRS.get(annotatedTitleRS).first;
                int rank = annTitlesToIdAndRankRS.get(annotatedTitleRS).second;
                HashMap<String, Double> ESFeatures = generateEntitySelectionFeaturesSearch(relatedSearch, wid,
                        rank, webTotalNS, webTotalRelatedSearch, bingBoldsAndRankRS, 5);

                HashSet<String> bolds = rankToBoldsRS.get(rank);
                boolean accept = entityFilter.filterEntity(ESFeatures);
                if (accept)
                    boldsToAcceptedEntity.put(bolds.toArray(new String[] {}), new Tag(wid));
            }
        }

        /** Link entities back to query mentions */

        annotations = linkBack.linkBack(query, boldsToAcceptedEntity);

    } catch (Exception e) {
        e.printStackTrace();
        throw new RuntimeException(e);
    }
    SmaphAnnotatorDebugger.out.printf("*** END :%s ***%n", query);

    return annotations;

}

From source file:de.tu_dortmund.ub.data.dswarm.TaskProcessingUnit.java

private static void executeTPUPartsOnDemand(final Optional<Boolean> optionalDoInit,
        final Optional<Boolean> optionalAllowMultipleDataModels, String[] watchFolderFiles,
        final String resourceWatchFolder, final Optional<String> optionalOutputDataModelID,
        final String serviceName, final Integer engineThreads,
        final Optional<Boolean> optionalDoTransformations, final Optional<Boolean> optionalDoIngestOnTheFly,
        final Optional<Boolean> optionalDoExportOnTheFly, final Optional<String> optionalExportMimeType,
        final Optional<String> optionalExportFileExtension, final Properties config) throws Exception {

    // keys = input data models; values = related data resources
    final Map<String, Triple<String, String, String>> inputDataModelsAndResources = new HashMap<>();

    // init/*from  w w w  . j a v a  2s.  co m*/
    if (optionalDoInit.isPresent() && optionalDoInit.get()) {

        if (optionalAllowMultipleDataModels.isPresent() && optionalAllowMultipleDataModels.get()) {

            for (int i = 0; i < watchFolderFiles.length; i++) {

                final String initResourceFileName = watchFolderFiles[i];

                doInit(resourceWatchFolder, initResourceFileName, serviceName, engineThreads, config,
                        inputDataModelsAndResources);

                // remove the file already processed during init from the files list to avoid duplicates
                watchFolderFiles = ArrayUtils.removeElement(watchFolderFiles, initResourceFileName);
            }
        } else {

            // use the first file in the folder for init
            final String initResourceFileName = watchFolderFiles[0];

            doInit(resourceWatchFolder, initResourceFileName, serviceName, engineThreads, config,
                    inputDataModelsAndResources);

            // remove the file already processed during init from the files list to avoid duplicates
            watchFolderFiles = ArrayUtils.removeElement(watchFolderFiles, initResourceFileName);
        }
    } else {

        final String inputDataModelID = config.getProperty(TPUStatics.PROTOTYPE_INPUT_DATA_MODEL_ID_IDENTIFIER);
        final String resourceID = config.getProperty(TPUStatics.PROTOTYPE_RESOURCE_ID_INDENTIFIER);

        inputDataModelsAndResources.put(inputDataModelID, Triple.of(inputDataModelID, resourceID, null));

        LOG.info("skip init part");
    }

    final Optional<Boolean> optionalDoIngest = TPUUtil.getBooleanConfigValue(TPUStatics.DO_INGEST_IDENTIFIER,
            config);

    // ingest
    if (optionalDoIngest.isPresent() && optionalDoIngest.get()) {

        final String projectName = config.getProperty(TPUStatics.PROJECT_NAME_IDENTIFIER);

        if (!optionalAllowMultipleDataModels.isPresent() || !optionalAllowMultipleDataModels.get()) {

            final Set<Map.Entry<String, Triple<String, String, String>>> entries = inputDataModelsAndResources
                    .entrySet();
            final Iterator<Map.Entry<String, Triple<String, String, String>>> iterator = entries.iterator();
            final Map.Entry<String, Triple<String, String, String>> entry = iterator.next();

            final String inputDataModelID = entry.getKey();
            final Triple<String, String, String> triple = entry.getValue();
            final String resourceID = triple.getMiddle();

            executeIngests(watchFolderFiles, inputDataModelID, resourceID, projectName, serviceName,
                    engineThreads, config);
        }
    } else {

        LOG.info("skip ingest");
    }

    if (!optionalOutputDataModelID.isPresent()) {

        throw new Exception(
                "please set an output data model ('prototype.outputDataModelID') for this TPU task");
    }

    final String outputDataModelID = optionalOutputDataModelID.get();

    // task execution
    if (optionalDoTransformations.isPresent() && optionalDoTransformations.get()) {

        if (optionalAllowMultipleDataModels.isPresent() && optionalAllowMultipleDataModels.get()) {

            final Set<Map.Entry<String, Triple<String, String, String>>> entries = inputDataModelsAndResources
                    .entrySet();

            for (final Map.Entry<String, Triple<String, String, String>> entry : entries) {

                final String inputDataModelID = entry.getKey();

                executeTransform(inputDataModelID, outputDataModelID, optionalDoIngestOnTheFly,
                        optionalDoExportOnTheFly, optionalExportMimeType, optionalExportFileExtension,
                        engineThreads, serviceName, config);
            }
        } else {

            final Set<Map.Entry<String, Triple<String, String, String>>> entries = inputDataModelsAndResources
                    .entrySet();
            final Iterator<Map.Entry<String, Triple<String, String, String>>> iterator = entries.iterator();
            final Map.Entry<String, Triple<String, String, String>> entry = iterator.next();

            final String inputDataModelID = entry.getKey();

            executeTransform(inputDataModelID, outputDataModelID, optionalDoIngestOnTheFly,
                    optionalDoExportOnTheFly, optionalExportMimeType, optionalExportFileExtension,
                    engineThreads, serviceName, config);
        }
    } else {

        LOG.info("skip transformations");
    }

    final Optional<Boolean> optionalDoExport = TPUUtil.getBooleanConfigValue(TPUStatics.DO_EXPORT_IDENTIFIER,
            config);

    // export
    if (optionalDoExport.isPresent() && optionalDoExport.get()) {

        if (!optionalAllowMultipleDataModels.isPresent() || !optionalAllowMultipleDataModels.get()) {

            final String exportDataModelID;

            if (outputDataModelID != null && !outputDataModelID.trim().isEmpty()) {

                exportDataModelID = outputDataModelID;
            } else {

                final Set<Map.Entry<String, Triple<String, String, String>>> entries = inputDataModelsAndResources
                        .entrySet();
                final Iterator<Map.Entry<String, Triple<String, String, String>>> iterator = entries.iterator();
                final Map.Entry<String, Triple<String, String, String>> entry = iterator.next();

                exportDataModelID = entry.getKey();
            }

            executeExport(exportDataModelID, optionalExportMimeType, optionalExportFileExtension, engineThreads,
                    serviceName, config);
        }
    } else {

        LOG.info("skip export");
    }

    // clean-up
    int cnt = 0;

    final String engineDswarmAPI = config.getProperty(TPUStatics.ENGINE_DSWARM_API_IDENTIFIER);

    final Set<Map.Entry<String, Triple<String, String, String>>> entries = inputDataModelsAndResources
            .entrySet();

    for (final Map.Entry<String, Triple<String, String, String>> entry : entries) {

        final Triple<String, String, String> triple = entry.getValue();

        final String inputDataModelId = triple.getLeft();
        final String resourceId = triple.getMiddle();
        final String configurationId = triple.getRight();

        TPUUtil.deleteObject(inputDataModelId, DswarmBackendStatics.DATAMODELS_ENDPOINT, serviceName,
                engineDswarmAPI, cnt);
        TPUUtil.deleteObject(resourceId, DswarmBackendStatics.RESOURCES_ENDPOINT, serviceName, engineDswarmAPI,
                cnt);
        TPUUtil.deleteObject(configurationId, DswarmBackendStatics.CONFIGURATIONS_ENDPOINT, serviceName,
                engineDswarmAPI, cnt);

        cnt++;
    }
}

From source file:np2amr.StateTest.java

private List<Token> loadToks(Path path) throws IOException {
    Triple<List<List<Token>>, Map<Integer, Set<Concept>>, Set<Integer>> t = Io.loadAlignment(path);
    Config.conceptTable = t.getMiddle();
    return t.getLeft().get(0);
}

From source file:org.apache.calcite.rel.rules.AbstractMaterializedViewRule.java

/**
 * Rewriting logic is based on "Optimizing Queries Using Materialized Views:
 * A Practical, Scalable Solution" by Goldstein and Larson.
 *
 * <p>On the query side, rules matches a Project-node chain or node, where node
 * is either an Aggregate or a Join. Subplan rooted at the node operator must
 * be composed of one or more of the following operators: TableScan, Project,
 * Filter, and Join./*ww  w  .  j  a  v  a  2s  .c  om*/
 *
 * <p>For each join MV, we need to check the following:
 * <ol>
 * <li> The plan rooted at the Join operator in the view produces all rows
 * needed by the plan rooted at the Join operator in the query.</li>
 * <li> All columns required by compensating predicates, i.e., predicates that
 * need to be enforced over the view, are available at the view output.</li>
 * <li> All output expressions can be computed from the output of the view.</li>
 * <li> All output rows occur with the correct duplication factor. We might
 * rely on existing Unique-Key - Foreign-Key relationships to extract that
 * information.</li>
 * </ol>
 *
 * <p>In turn, for each aggregate MV, we need to check the following:
 * <ol>
 * <li> The plan rooted at the Aggregate operator in the view produces all rows
 * needed by the plan rooted at the Aggregate operator in the query.</li>
 * <li> All columns required by compensating predicates, i.e., predicates that
 * need to be enforced over the view, are available at the view output.</li>
 * <li> The grouping columns in the query are a subset of the grouping columns
 * in the view.</li>
 * <li> All columns required to perform further grouping are available in the
 * view output.</li>
 * <li> All columns required to compute output expressions are available in the
 * view output.</li>
 * </ol>
 */
protected void perform(RelOptRuleCall call, Project topProject, RelNode node) {
    final RexBuilder rexBuilder = node.getCluster().getRexBuilder();
    final RelMetadataQuery mq = RelMetadataQuery.instance();
    final RelOptPlanner planner = call.getPlanner();
    final RexSimplify simplify = new RexSimplify(rexBuilder, true,
            planner.getExecutor() != null ? planner.getExecutor() : RexUtil.EXECUTOR);

    final List<RelOptMaterialization> materializations = (planner instanceof VolcanoPlanner)
            ? ((VolcanoPlanner) planner).getMaterializations()
            : ImmutableList.<RelOptMaterialization>of();

    if (!materializations.isEmpty()) {
        // 1. Explore query plan to recognize whether preconditions to
        // try to generate a rewriting are met
        if (!isValidPlan(topProject, node, mq)) {
            return;
        }

        // Obtain applicable (filtered) materializations
        // TODO: Filtering of relevant materializations needs to be
        // improved so we gather only materializations that might
        // actually generate a valid rewriting.
        final List<RelOptMaterialization> applicableMaterializations = RelOptMaterializations
                .getApplicableMaterializations(node, materializations);

        if (!applicableMaterializations.isEmpty()) {
            // 2. Initialize all query related auxiliary data structures
            // that will be used throughout query rewriting process
            // Generate query table references
            final Set<RelTableRef> queryTableRefs = mq.getTableReferences(node);
            if (queryTableRefs == null) {
                // Bail out
                return;
            }

            // Extract query predicates
            final RelOptPredicateList queryPredicateList = mq.getAllPredicates(node);
            if (queryPredicateList == null) {
                // Bail out
                return;
            }
            final RexNode pred = simplify.simplify(
                    RexUtil.composeConjunction(rexBuilder, queryPredicateList.pulledUpPredicates, false));
            final Triple<RexNode, RexNode, RexNode> queryPreds = splitPredicates(rexBuilder, pred);

            // Extract query equivalence classes. An equivalence class is a set
            // of columns in the query output that are known to be equal.
            final EquivalenceClasses qEC = new EquivalenceClasses();
            for (RexNode conj : RelOptUtil.conjunctions(queryPreds.getLeft())) {
                assert conj.isA(SqlKind.EQUALS);
                RexCall equiCond = (RexCall) conj;
                qEC.addEquivalenceClass((RexTableInputRef) equiCond.getOperands().get(0),
                        (RexTableInputRef) equiCond.getOperands().get(1));
            }

            // 3. We iterate through all applicable materializations trying to
            // rewrite the given query
            for (RelOptMaterialization materialization : applicableMaterializations) {
                final Project topViewProject;
                final RelNode viewNode;
                if (materialization.queryRel instanceof Project) {
                    topViewProject = (Project) materialization.queryRel;
                    viewNode = topViewProject.getInput();
                } else {
                    topViewProject = null;
                    viewNode = materialization.queryRel;
                }

                // 3.1. View checks before proceeding
                if (!isValidPlan(topViewProject, viewNode, mq)) {
                    // Skip it
                    continue;
                }

                // 3.2. Initialize all query related auxiliary data structures
                // that will be used throughout query rewriting process
                // Extract view predicates
                final RelOptPredicateList viewPredicateList = mq.getAllPredicates(viewNode);
                if (viewPredicateList == null) {
                    // Skip it
                    continue;
                }
                final RexNode viewPred = simplify.simplify(
                        RexUtil.composeConjunction(rexBuilder, viewPredicateList.pulledUpPredicates, false));
                final Triple<RexNode, RexNode, RexNode> viewPreds = splitPredicates(rexBuilder, viewPred);

                // Extract view table references
                final Set<RelTableRef> viewTableRefs = mq.getTableReferences(viewNode);
                if (viewTableRefs == null) {
                    // Bail out
                    return;
                }

                // Extract view tables
                MatchModality matchModality;
                Multimap<RexTableInputRef, RexTableInputRef> compensationEquiColumns = ArrayListMultimap
                        .create();
                if (!queryTableRefs.equals(viewTableRefs)) {
                    // We try to compensate, e.g., for join queries it might be
                    // possible to join missing tables with view to compute result.
                    // Two supported cases: query tables are subset of view tables (we need to
                    // check whether they are cardinality-preserving joins), or view tables are
                    // subset of query tables (add additional tables through joins if possible)
                    if (viewTableRefs.containsAll(queryTableRefs)) {
                        matchModality = MatchModality.QUERY_PARTIAL;
                        final EquivalenceClasses vEC = new EquivalenceClasses();
                        for (RexNode conj : RelOptUtil.conjunctions(viewPreds.getLeft())) {
                            assert conj.isA(SqlKind.EQUALS);
                            RexCall equiCond = (RexCall) conj;
                            vEC.addEquivalenceClass((RexTableInputRef) equiCond.getOperands().get(0),
                                    (RexTableInputRef) equiCond.getOperands().get(1));
                        }
                        if (!compensateQueryPartial(compensationEquiColumns, viewTableRefs, vEC,
                                queryTableRefs)) {
                            // Cannot rewrite, skip it
                            continue;
                        }
                    } else if (queryTableRefs.containsAll(viewTableRefs)) {
                        // TODO: implement latest case
                        matchModality = MatchModality.VIEW_PARTIAL;
                        continue;
                    } else {
                        // Skip it
                        continue;
                    }
                } else {
                    matchModality = MatchModality.COMPLETE;
                }

                // 4. We map every table in the query to a view table with the same qualified
                // name.
                final Multimap<RelTableRef, RelTableRef> multiMapTables = ArrayListMultimap.create();
                for (RelTableRef queryTableRef : queryTableRefs) {
                    for (RelTableRef viewTableRef : viewTableRefs) {
                        if (queryTableRef.getQualifiedName().equals(viewTableRef.getQualifiedName())) {
                            multiMapTables.put(queryTableRef, viewTableRef);
                        }
                    }
                }

                // If a table is used multiple times, we will create multiple mappings,
                // and we will try to rewrite the query using each of the mappings.
                // Then, we will try to map every source table (query) to a target
                // table (view), and if we are successful, we will try to create
                // compensation predicates to filter the view results further
                // (if needed).
                final List<BiMap<RelTableRef, RelTableRef>> flatListMappings = generateTableMappings(
                        multiMapTables);
                for (BiMap<RelTableRef, RelTableRef> tableMapping : flatListMappings) {
                    // 4.0. If compensation equivalence classes exist, we need to add
                    // the mapping to the query mapping
                    final EquivalenceClasses currQEC = EquivalenceClasses.copy(qEC);
                    if (matchModality == MatchModality.QUERY_PARTIAL) {
                        for (Entry<RexTableInputRef, RexTableInputRef> e : compensationEquiColumns.entries()) {
                            // Copy origin
                            RelTableRef queryTableRef = tableMapping.inverse().get(e.getKey().getTableRef());
                            RexTableInputRef queryColumnRef = RexTableInputRef.of(queryTableRef,
                                    e.getKey().getIndex(), e.getKey().getType());
                            // Add to query equivalence classes and table mapping
                            currQEC.addEquivalenceClass(queryColumnRef, e.getValue());
                            tableMapping.put(e.getValue().getTableRef(), e.getValue().getTableRef()); //identity
                        }
                    }

                    final RexNode compensationColumnsEquiPred;
                    final RexNode compensationRangePred;
                    final RexNode compensationResidualPred;

                    // 4.1. Establish relationship between view and query equivalence classes.
                    // If every view equivalence class is not a subset of a query
                    // equivalence class, we bail out.
                    // To establish relationship, we swap column references of the view predicates
                    // to point to query tables. Then, we create the equivalence classes for the
                    // view predicates and check that every view equivalence class is a subset of a
                    // query equivalence class: if it is not, we bail out.
                    final RexNode viewColumnsEquiPred = RexUtil.swapTableReferences(rexBuilder,
                            viewPreds.getLeft(), tableMapping.inverse());
                    final EquivalenceClasses queryBasedVEC = new EquivalenceClasses();
                    for (RexNode conj : RelOptUtil.conjunctions(viewColumnsEquiPred)) {
                        assert conj.isA(SqlKind.EQUALS);
                        RexCall equiCond = (RexCall) conj;
                        queryBasedVEC.addEquivalenceClass((RexTableInputRef) equiCond.getOperands().get(0),
                                (RexTableInputRef) equiCond.getOperands().get(1));
                    }
                    compensationColumnsEquiPred = generateEquivalenceClasses(rexBuilder, currQEC,
                            queryBasedVEC);
                    if (compensationColumnsEquiPred == null) {
                        // Skip it
                        continue;
                    }

                    // 4.2. We check that range intervals for the query are contained in the view.
                    // Compute compensating predicates.
                    final RexNode queryRangePred = RexUtil.swapColumnReferences(rexBuilder,
                            queryPreds.getMiddle(), currQEC.getEquivalenceClassesMap());
                    final RexNode viewRangePred = RexUtil.swapTableColumnReferences(rexBuilder,
                            viewPreds.getMiddle(), tableMapping.inverse(), currQEC.getEquivalenceClassesMap());
                    compensationRangePred = SubstitutionVisitor.splitFilter(simplify, queryRangePred,
                            viewRangePred);
                    if (compensationRangePred == null) {
                        // Skip it
                        continue;
                    }

                    // 4.3. Finally, we check that residual predicates of the query are satisfied
                    // within the view.
                    // Compute compensating predicates.
                    final RexNode queryResidualPred = RexUtil.swapColumnReferences(rexBuilder,
                            queryPreds.getRight(), currQEC.getEquivalenceClassesMap());
                    final RexNode viewResidualPred = RexUtil.swapTableColumnReferences(rexBuilder,
                            viewPreds.getRight(), tableMapping.inverse(), currQEC.getEquivalenceClassesMap());
                    compensationResidualPred = SubstitutionVisitor.splitFilter(simplify, queryResidualPred,
                            viewResidualPred);
                    if (compensationResidualPred == null) {
                        // Skip it
                        continue;
                    }

                    // 4.4. Final compensation predicate.
                    RexNode compensationPred = RexUtil.composeConjunction(rexBuilder, ImmutableList
                            .of(compensationColumnsEquiPred, compensationRangePred, compensationResidualPred),
                            false);
                    if (!compensationPred.isAlwaysTrue()) {
                        // All columns required by compensating predicates must be contained
                        // in the view output (condition 2).
                        List<RexNode> viewExprs = extractExpressions(topViewProject, viewNode, rexBuilder);
                        compensationPred = rewriteExpression(rexBuilder, viewNode, viewExprs, compensationPred,
                                tableMapping, currQEC.getEquivalenceClassesMap(), mq);
                        if (compensationPred == null) {
                            // Skip it
                            continue;
                        }
                    }

                    // 4.5. Generate final rewriting if possible.
                    // First, we add the compensation predicate (if any) on top of the view.
                    // Then, we trigger the Aggregate unifying method. This method will either create
                    // a Project or an Aggregate operator on top of the view. It will also compute the
                    // output expressions for the query.
                    RelBuilder builder = call.builder();
                    builder.push(materialization.tableRel);
                    if (!compensationPred.isAlwaysTrue()) {
                        builder.filter(simplify.simplify(compensationPred));
                    }
                    RelNode result = unify(rexBuilder, builder, builder.build(), topProject, node,
                            topViewProject, viewNode, tableMapping, currQEC.getEquivalenceClassesMap(), mq);
                    if (result == null) {
                        // Skip it
                        continue;
                    }
                    call.transformTo(result);
                }
            }
        }
    }
}