List of usage examples for org.apache.commons.lang3.tuple Triple getMiddle
public abstract M getMiddle();
Gets the middle element from this triple.
From source file:com.clust4j.algo.NNHSTests.java
License:asdf
@Test public void testBall1() { final Array2DRowRealMatrix mat = new Array2DRowRealMatrix(a, false); BallTree ball = new BallTree(mat); QuadTup<double[][], int[], NodeData[], double[][][]> arrays = ball.getArrays(); assertTrue(MatUtils.equalsExactly(arrays.getFirst(), a)); assertTrue(VecUtils.equalsExactly(new int[] { 0, 1, 2 }, arrays.getSecond())); Triple<Integer, Integer, Integer> stats = ball.getTreeStats(); assertTrue(stats.getLeft() == 0);/*from w ww . j a v a 2s .c o m*/ assertTrue(stats.getMiddle() == 0); assertTrue(stats.getRight() == 0); NodeData data = arrays.getThird()[0]; assertTrue(data.idx_start == 0); assertTrue(data.idx_end == 3); assertTrue(data.is_leaf); assertTrue(data.radius == 6.716480559869961); double[][][] trip = arrays.getFourth(); assertTrue(trip.length == 1); assertTrue(trip[0][0][0] == 1.6666666666666667); assertTrue(trip[0][0][1] == 2.3333333333333333); assertTrue(trip[0][0][2] == 2.6666666666666667); assertTrue(trip[0][0][3] == 2.6666666666666667); }
From source file:com.magnet.mmx.server.plugin.mmxmgmt.util.MMXConfigurationTest.java
/** * Set the Configuration property via JMX and check whether the REST interface returns the same value * * @throws Exception/*from w ww . j ava 2s .co m*/ */ //TODO: Use the web target JAXRS API to execute these request possibly ? @Ignore @Test public void testSetMBeanLocalGetREST() throws Exception { ObjectName name = new ObjectName(MMX_MBEAN_OBJECT_NAME); ServletTester tester = new ServletTester(); tester.addServlet(LaxConfigServlet.class, "/config"); tester.start(); for (Triple<String, String, String> triple : mbeanAttributes) { String attrName = triple.getLeft(); String attrType = triple.getRight(); Object attrValue; if (attrType.equals("int")) { attrValue = RandomUtils.nextInt(30000, 65535); } else if (attrType.equals("long")) { attrValue = RandomUtils.nextLong(10, 1000); } else { attrValue = RandomStringUtils.randomAlphabetic(10); } Attribute attr1 = new Attribute(attrName, attrValue); server.setAttribute(name, attr1); Object attr2 = server.getAttribute(name, attrName); assertEquals("Attribute values do not match", attrValue, attr2); HttpTester request = new HttpTester(); // HttpTester.Request request = HttpTester.newRequest(); request.setMethod("GET"); request.setHeader("Host", "tester"); request.setURI("/config"); request.setContent(""); HttpTester response = new HttpTester(); response.parse(tester.getResponses(request.generate())); JsonElement jelement = new JsonParser().parse(response.getContent()); JsonObject jobject = jelement.getAsJsonObject(); jobject = jobject.getAsJsonObject("configs"); String attrValueRest = jobject.get(triple.getMiddle()).getAsString(); if (attrType.equals("int")) assertEquals("Values do not match", attrValue, Integer.parseInt(attrValueRest)); else if (attrType.equals("long")) assertEquals("Values do not match", attrValue, Long.parseLong(attrValueRest)); else assertEquals("Values do not match", attrValue, attrValueRest); } }
From source file:it.acubelab.smaph.SmaphAnnotator.java
/** * Given a query and its gold standard, generate * /*from w ww. ja v a 2 s . c o m*/ * @param query * a query. * @param goldStandard * the entities associated to the query. * @param posEFVectors * where to store the positive-example (true positives) feature * vectors. * @param negEFVectors * where to store the negative-example (false positives) feature * vectors. * @param discardNE * whether to limit the output to named entities, as defined by * ERDDatasetFilter.EntityIsNE. * @param wikiToFreeb * a wikipedia to freebase-id mapping. * @throws Exception * if something went wrong while annotating the query. */ public void generateExamples(String query, HashSet<Tag> goldStandard, Vector<double[]> posEFVectors, Vector<double[]> negEFVectors, boolean discardNE, WikipediaToFreebase wikiToFreeb) throws Exception { /** Search the query on bing */ List<Pair<String, Integer>> bingBoldsAndRankNS = null; List<String> urls = null; List<String> relatedSearchRes = null; Triple<Integer, Double, JSONObject> resCountAndWebTotal = null; int resultsCount = -1; double webTotalNS = Double.NaN; List<String> filteredBolds = null; HashMap<Integer, Integer> rankToIdNS = null; if (includeSourceAnnotator || includeSourceWikiSearch || includeSourceRelatedSearch || includeSourceNormalSearch) { bingBoldsAndRankNS = new Vector<>(); urls = new Vector<>(); relatedSearchRes = new Vector<>(); resCountAndWebTotal = takeBingData(query, bingBoldsAndRankNS, urls, relatedSearchRes, null, Integer.MAX_VALUE, false); resultsCount = resCountAndWebTotal.getLeft(); webTotalNS = resCountAndWebTotal.getMiddle(); filteredBolds = boldFilter.filterBolds(query, bingBoldsAndRankNS, resultsCount); rankToIdNS = urlsToRankID(urls); if (debugger != null) { debugger.addBoldPositionEditDistance(query, bingBoldsAndRankNS); debugger.addBoldFilterOutput(query, filteredBolds); debugger.addSource2SearchResult(query, rankToIdNS, urls); debugger.addBingResponseNormalSearch(query, resCountAndWebTotal.getRight()); } } /** Do the wikipedia-search on bing. */ List<String> wikiSearchUrls = new Vector<>(); List<Pair<String, Integer>> bingBoldsAndRankWS = new Vector<>(); HashMap<String, Pair<Integer, Integer>> annTitlesToIdAndRankWS = null; Triple<Integer, Double, JSONObject> resCountAndWebTotalWS = null; double webTotalWS = Double.NaN; if (includeSourceWikiSearch | includeSourceNormalSearch) { resCountAndWebTotalWS = takeBingData(query, bingBoldsAndRankWS, wikiSearchUrls, null, null, topKWikiSearch, true); webTotalWS = resCountAndWebTotalWS.getMiddle(); HashMap<Integer, Integer> rankToIdWikiSearch = urlsToRankID(wikiSearchUrls); if (debugger != null) { debugger.addSource3SearchResult(query, rankToIdWikiSearch, wikiSearchUrls); debugger.addBingResponseWikiSearch(query, resCountAndWebTotal.getRight()); } annTitlesToIdAndRankWS = adjustTitles(rankToIdWikiSearch); } /** Do the RelatedSearch on bing */ String relatedSearch = null; List<String> relatedSearchUrls = null; List<Pair<String, Integer>> bingBoldsAndRankRS = null; HashMap<Integer, Integer> rankToIdRelatedSearch = null; HashMap<String, Pair<Integer, Integer>> annTitlesToIdAndRankRS = null; double webTotalRelatedSearch = Double.NaN; if (includeSourceRelatedSearch) { relatedSearch = getRelatedSearch(relatedSearchRes, query); relatedSearchUrls = new Vector<>(); bingBoldsAndRankRS = new Vector<>(); Triple<Integer, Double, JSONObject> resCountAndWebTotalRS = takeBingData(query, bingBoldsAndRankRS, relatedSearchUrls, null, null, topKRelatedSearch, false); webTotalRelatedSearch = resCountAndWebTotalRS.getMiddle(); rankToIdRelatedSearch = urlsToRankID(relatedSearchUrls); annTitlesToIdAndRankRS = adjustTitles(rankToIdRelatedSearch); } /** Annotate bolds on the annotator */ Pair<HashMap<String, HashMap<String, Double>>, HashMap<String, Annotation>> infoAndAnnotations = null; HashMap<String, Annotation> spotToAnnotation = null; HashMap<String, HashMap<String, Double>> additionalInfo = null; Pair<String, HashSet<Mention>> annInput = null; if (includeSourceAnnotator) { annInput = concatenateBolds(filteredBolds); infoAndAnnotations = disambiguateBolds(annInput.first, annInput.second); spotToAnnotation = infoAndAnnotations.second; additionalInfo = infoAndAnnotations.first; if (debugger != null) debugger.addReturnedAnnotation(query, spotToAnnotation); } List<Pair<Tag, HashMap<String, Double>>> widToEFFtrVect = new Vector<>(); // Filter and add annotations found by the disambiguator if (includeSourceAnnotator) { for (String bold : filteredBolds) { if (spotToAnnotation.containsKey(bold)) { Annotation ann = spotToAnnotation.get(bold); HashMap<String, Double> ESFeatures = generateEntitySelectionFeaturesAnnotator(query, resultsCount, ann, annInput, bingBoldsAndRankNS, additionalInfo); Tag tag = new Tag(ann.getConcept()); widToEFFtrVect.add(new Pair<Tag, HashMap<String, Double>>(tag, ESFeatures)); } } } // Filter and add entities found in the normal search if (includeSourceNormalSearch) { for (int rank : rankToIdNS.keySet()) { int wid = rankToIdNS.get(rank); HashMap<String, Double> ESFeatures = generateEntitySelectionFeaturesSearch(query, wid, rank, webTotalNS, webTotalWS, bingBoldsAndRankNS, 2); Tag tag = new Tag(wid); widToEFFtrVect.add(new Pair<Tag, HashMap<String, Double>>(tag, ESFeatures)); } } // Filter and add entities found in the WikipediaSearch if (includeSourceWikiSearch) { for (String annotatedTitleWS : annTitlesToIdAndRankWS.keySet()) { int wid = annTitlesToIdAndRankWS.get(annotatedTitleWS).first; int rank = annTitlesToIdAndRankWS.get(annotatedTitleWS).second; HashMap<String, Double> ESFeatures = generateEntitySelectionFeaturesSearch(query, wid, rank, webTotalNS, webTotalWS, bingBoldsAndRankWS, 3); Tag tag = new Tag(wid); widToEFFtrVect.add(new Pair<Tag, HashMap<String, Double>>(tag, ESFeatures)); } } // Filter and add entities found in the RelatedSearch if (includeSourceRelatedSearch) { for (String annotatedTitleRS : annTitlesToIdAndRankRS.keySet()) { int wid = annTitlesToIdAndRankRS.get(annotatedTitleRS).first; int rank = annTitlesToIdAndRankRS.get(annotatedTitleRS).second; HashMap<String, Double> ESFeatures = generateEntitySelectionFeaturesSearch(relatedSearch, wid, rank, webTotalNS, webTotalRelatedSearch, bingBoldsAndRankRS, 5); Tag tag = new Tag(wid); widToEFFtrVect.add(new Pair<Tag, HashMap<String, Double>>(tag, ESFeatures)); } } for (Pair<Tag, HashMap<String, Double>> tagAndFtrs : widToEFFtrVect) { Tag tag = tagAndFtrs.first; HashMap<String, Double> ftrs = tagAndFtrs.second; if (discardNE && !ERDDatasetFilter.EntityIsNE(wikiApi, wikiToFreeb, tag.getConcept())) continue; if (goldStandard.contains(tag)) posEFVectors.add(LibSvmEntityFilter.featuresToFtrVectStatic(ftrs)); else negEFVectors.add(LibSvmEntityFilter.featuresToFtrVectStatic(ftrs)); System.out.printf("%d in query [%s] is a %s example.%n", tag.getConcept(), query, goldStandard.contains(tag) ? "positive" : "negative"); } }
From source file:it.acubelab.smaph.SmaphAnnotator.java
@Override public HashSet<ScoredAnnotation> solveSa2W(String query) throws AnnotationException { if (debugger != null) debugger.addProcessedQuery(query); HashSet<ScoredAnnotation> annotations = new HashSet<>(); try {// w w w . j a va 2s. com /** Search the query on bing */ List<Pair<String, Integer>> bingBoldsAndRankNS = null; List<String> urls = null; List<String> relatedSearchRes = null; Triple<Integer, Double, JSONObject> resCountAndWebTotalNS = null; int resultsCount = -1; double webTotalNS = Double.NaN; List<String> filteredBolds = null; HashMap<Integer, Integer> rankToIdNS = null; HashMap<Integer, HashSet<String>> rankToBoldsNS = null; List<Pair<String, Vector<Pair<Integer, Integer>>>> snippetsToBolds = null; if (includeSourceAnnotator || includeSourceWikiSearch || includeSourceRelatedSearch || includeSourceNormalSearch) { bingBoldsAndRankNS = new Vector<>(); urls = new Vector<>(); relatedSearchRes = new Vector<>(); snippetsToBolds = new Vector<>(); resCountAndWebTotalNS = takeBingData(query, bingBoldsAndRankNS, urls, relatedSearchRes, snippetsToBolds, Integer.MAX_VALUE, false); resultsCount = resCountAndWebTotalNS.getLeft(); webTotalNS = resCountAndWebTotalNS.getMiddle(); filteredBolds = boldFilter.filterBolds(query, bingBoldsAndRankNS, resultsCount); rankToIdNS = urlsToRankID(urls); rankToBoldsNS = new HashMap<>(); SmaphUtils.mapRankToBoldsLC(bingBoldsAndRankNS, rankToBoldsNS, null); if (debugger != null) { debugger.addBoldPositionEditDistance(query, bingBoldsAndRankNS); debugger.addSnippets(query, snippetsToBolds); debugger.addBoldFilterOutput(query, filteredBolds); debugger.addSource2SearchResult(query, rankToIdNS, urls); debugger.addBingResponseNormalSearch(query, resCountAndWebTotalNS.getRight()); } } /** Do the WikipediaSearch on bing. */ List<String> wikiSearchUrls = new Vector<>(); List<Pair<String, Integer>> bingBoldsAndRankWS = new Vector<>(); HashMap<String, Pair<Integer, Integer>> annTitlesToIdAndRankWS = null; Triple<Integer, Double, JSONObject> resCountAndWebTotalWS = null; HashMap<Integer, HashSet<String>> rankToBoldsWS = null; double webTotalWS = Double.NaN; if (includeSourceWikiSearch | includeSourceNormalSearch) { resCountAndWebTotalWS = takeBingData(query, bingBoldsAndRankWS, wikiSearchUrls, null, null, topKWikiSearch, true); webTotalWS = resCountAndWebTotalWS.getMiddle(); HashMap<Integer, Integer> rankToIdWikiSearch = urlsToRankID(wikiSearchUrls); rankToBoldsWS = new HashMap<>(); SmaphUtils.mapRankToBoldsLC(bingBoldsAndRankWS, rankToBoldsWS, null); if (debugger != null) { debugger.addSource3SearchResult(query, rankToIdWikiSearch, wikiSearchUrls); debugger.addBingResponseWikiSearch(query, resCountAndWebTotalWS.getRight()); } annTitlesToIdAndRankWS = adjustTitles(rankToIdWikiSearch); } /** Do the RelatedSearch on bing */ String relatedSearch = null; List<String> relatedSearchUrls = null; List<Pair<String, Integer>> bingBoldsAndRankRS = null; HashMap<Integer, Integer> rankToIdRelatedSearch = null; HashMap<String, Pair<Integer, Integer>> annTitlesToIdAndRankRS = null; double webTotalRelatedSearch = Double.NaN; HashMap<Integer, HashSet<String>> rankToBoldsRS = null; if (includeSourceRelatedSearch) { relatedSearch = getRelatedSearch(relatedSearchRes, query); relatedSearchUrls = new Vector<>(); bingBoldsAndRankRS = new Vector<>(); Triple<Integer, Double, JSONObject> resCountAndWebTotalRS = takeBingData(query, bingBoldsAndRankRS, relatedSearchUrls, null, null, topKRelatedSearch, false); webTotalRelatedSearch = resCountAndWebTotalRS.getMiddle(); rankToIdRelatedSearch = urlsToRankID(relatedSearchUrls); annTitlesToIdAndRankRS = adjustTitles(rankToIdRelatedSearch); rankToBoldsRS = new HashMap<>(); SmaphUtils.mapRankToBoldsLC(bingBoldsAndRankRS, rankToBoldsRS, null); } /** Annotate bolds on the annotator */ Pair<HashMap<String, HashMap<String, Double>>, HashMap<String, Annotation>> infoAndAnnotations = null; HashMap<String, Annotation> spotToAnnotation = null; HashMap<String, HashMap<String, Double>> additionalInfo = null; Pair<String, HashSet<Mention>> annInput = null; if (includeSourceAnnotator) { annInput = concatenateBolds(filteredBolds); infoAndAnnotations = disambiguateBolds(annInput.first, annInput.second); spotToAnnotation = infoAndAnnotations.second; additionalInfo = infoAndAnnotations.first; if (debugger != null) debugger.addReturnedAnnotation(query, spotToAnnotation); } HashMap<String[], Tag> boldsToAcceptedEntity = new HashMap<>(); // Filter and add annotations found by the disambiguator if (includeSourceAnnotator) { for (String bold : filteredBolds) { if (spotToAnnotation.containsKey(bold)) { Annotation ann = spotToAnnotation.get(bold); HashMap<String, Double> ESFeatures = generateEntitySelectionFeaturesAnnotator(query, resultsCount, ann, annInput, bingBoldsAndRankNS, additionalInfo); boolean accept = entityFilter.filterEntity(ESFeatures); if (accept) boldsToAcceptedEntity.put(new String[] { bold }, new Tag(ann.getConcept())); if (debugger != null) { HashSet<String> bolds = new HashSet<>(); bolds.add(bold); debugger.addQueryCandidateBolds(query, "Source 1", ann.getConcept(), bolds); debugger.addEntityFeaturesS1(query, bold, ann.getConcept(), ESFeatures, accept); if (accept) debugger.addResult(query, ann.getConcept()); } } } } // Filter and add entities found in the normal search if (includeSourceNormalSearch) { for (int rank : rankToIdNS.keySet()) { int wid = rankToIdNS.get(rank); HashMap<String, Double> ESFeatures = generateEntitySelectionFeaturesSearch(query, wid, rank, webTotalNS, webTotalWS, bingBoldsAndRankNS, 2); HashSet<String> bolds = rankToBoldsNS.get(rank); boolean accept = entityFilter.filterEntity(ESFeatures); if (accept) boldsToAcceptedEntity.put(bolds.toArray(new String[] {}), new Tag(wid)); if (debugger != null) { debugger.addQueryCandidateBolds(query, "Source 2", wid, bolds); debugger.addEntityFeaturesS2(query, wid, ESFeatures, accept); if (accept) debugger.addResult(query, wid); } } } // Filter and add entities found in the WikipediaSearch if (includeSourceWikiSearch) { for (String annotatedTitleWS : annTitlesToIdAndRankWS.keySet()) { int wid = annTitlesToIdAndRankWS.get(annotatedTitleWS).first; int rank = annTitlesToIdAndRankWS.get(annotatedTitleWS).second; HashMap<String, Double> ESFeatures = generateEntitySelectionFeaturesSearch(query, wid, rank, webTotalNS, webTotalWS, bingBoldsAndRankWS, 3); HashSet<String> bolds = rankToBoldsWS.get(rank); boolean accept = entityFilter.filterEntity(ESFeatures); if (accept) boldsToAcceptedEntity.put(bolds.toArray(new String[] {}), new Tag(wid)); if (debugger != null) { debugger.addQueryCandidateBolds(query, "Source 3", wid, bolds); debugger.addEntityFeaturesS3(query, wid, ESFeatures, accept); if (accept) debugger.addResult(query, wid); } } } // Filter and add entities found in the RelatedSearch if (includeSourceRelatedSearch) { for (String annotatedTitleRS : annTitlesToIdAndRankRS.keySet()) { int wid = annTitlesToIdAndRankRS.get(annotatedTitleRS).first; int rank = annTitlesToIdAndRankRS.get(annotatedTitleRS).second; HashMap<String, Double> ESFeatures = generateEntitySelectionFeaturesSearch(relatedSearch, wid, rank, webTotalNS, webTotalRelatedSearch, bingBoldsAndRankRS, 5); HashSet<String> bolds = rankToBoldsRS.get(rank); boolean accept = entityFilter.filterEntity(ESFeatures); if (accept) boldsToAcceptedEntity.put(bolds.toArray(new String[] {}), new Tag(wid)); } } /** Link entities back to query mentions */ annotations = linkBack.linkBack(query, boldsToAcceptedEntity); } catch (Exception e) { e.printStackTrace(); throw new RuntimeException(e); } SmaphAnnotatorDebugger.out.printf("*** END :%s ***%n", query); return annotations; }
From source file:np2amr.StateTest.java
private List<Token> loadToks(Path path) throws IOException { Triple<List<List<Token>>, Map<Integer, Set<Concept>>, Set<Integer>> t = Io.loadAlignment(path); Config.conceptTable = t.getMiddle(); return t.getLeft().get(0); }
From source file:org.apache.calcite.rel.rules.AbstractMaterializedViewRule.java
/** * Rewriting logic is based on "Optimizing Queries Using Materialized Views: * A Practical, Scalable Solution" by Goldstein and Larson. * * <p>On the query side, rules matches a Project-node chain or node, where node * is either an Aggregate or a Join. Subplan rooted at the node operator must * be composed of one or more of the following operators: TableScan, Project, * Filter, and Join.//from w ww.jav a 2 s.c om * * <p>For each join MV, we need to check the following: * <ol> * <li> The plan rooted at the Join operator in the view produces all rows * needed by the plan rooted at the Join operator in the query.</li> * <li> All columns required by compensating predicates, i.e., predicates that * need to be enforced over the view, are available at the view output.</li> * <li> All output expressions can be computed from the output of the view.</li> * <li> All output rows occur with the correct duplication factor. We might * rely on existing Unique-Key - Foreign-Key relationships to extract that * information.</li> * </ol> * * <p>In turn, for each aggregate MV, we need to check the following: * <ol> * <li> The plan rooted at the Aggregate operator in the view produces all rows * needed by the plan rooted at the Aggregate operator in the query.</li> * <li> All columns required by compensating predicates, i.e., predicates that * need to be enforced over the view, are available at the view output.</li> * <li> The grouping columns in the query are a subset of the grouping columns * in the view.</li> * <li> All columns required to perform further grouping are available in the * view output.</li> * <li> All columns required to compute output expressions are available in the * view output.</li> * </ol> */ protected void perform(RelOptRuleCall call, Project topProject, RelNode node) { final RexBuilder rexBuilder = node.getCluster().getRexBuilder(); final RelMetadataQuery mq = RelMetadataQuery.instance(); final RelOptPlanner planner = call.getPlanner(); final RexSimplify simplify = new RexSimplify(rexBuilder, true, planner.getExecutor() != null ? planner.getExecutor() : RexUtil.EXECUTOR); final List<RelOptMaterialization> materializations = (planner instanceof VolcanoPlanner) ? ((VolcanoPlanner) planner).getMaterializations() : ImmutableList.<RelOptMaterialization>of(); if (!materializations.isEmpty()) { // 1. Explore query plan to recognize whether preconditions to // try to generate a rewriting are met if (!isValidPlan(topProject, node, mq)) { return; } // Obtain applicable (filtered) materializations // TODO: Filtering of relevant materializations needs to be // improved so we gather only materializations that might // actually generate a valid rewriting. final List<RelOptMaterialization> applicableMaterializations = RelOptMaterializations .getApplicableMaterializations(node, materializations); if (!applicableMaterializations.isEmpty()) { // 2. Initialize all query related auxiliary data structures // that will be used throughout query rewriting process // Generate query table references final Set<RelTableRef> queryTableRefs = mq.getTableReferences(node); if (queryTableRefs == null) { // Bail out return; } // Extract query predicates final RelOptPredicateList queryPredicateList = mq.getAllPredicates(node); if (queryPredicateList == null) { // Bail out return; } final RexNode pred = simplify.simplify( RexUtil.composeConjunction(rexBuilder, queryPredicateList.pulledUpPredicates, false)); final Triple<RexNode, RexNode, RexNode> queryPreds = splitPredicates(rexBuilder, pred); // Extract query equivalence classes. An equivalence class is a set // of columns in the query output that are known to be equal. final EquivalenceClasses qEC = new EquivalenceClasses(); for (RexNode conj : RelOptUtil.conjunctions(queryPreds.getLeft())) { assert conj.isA(SqlKind.EQUALS); RexCall equiCond = (RexCall) conj; qEC.addEquivalenceClass((RexTableInputRef) equiCond.getOperands().get(0), (RexTableInputRef) equiCond.getOperands().get(1)); } // 3. We iterate through all applicable materializations trying to // rewrite the given query for (RelOptMaterialization materialization : applicableMaterializations) { final Project topViewProject; final RelNode viewNode; if (materialization.queryRel instanceof Project) { topViewProject = (Project) materialization.queryRel; viewNode = topViewProject.getInput(); } else { topViewProject = null; viewNode = materialization.queryRel; } // 3.1. View checks before proceeding if (!isValidPlan(topViewProject, viewNode, mq)) { // Skip it continue; } // 3.2. Initialize all query related auxiliary data structures // that will be used throughout query rewriting process // Extract view predicates final RelOptPredicateList viewPredicateList = mq.getAllPredicates(viewNode); if (viewPredicateList == null) { // Skip it continue; } final RexNode viewPred = simplify.simplify( RexUtil.composeConjunction(rexBuilder, viewPredicateList.pulledUpPredicates, false)); final Triple<RexNode, RexNode, RexNode> viewPreds = splitPredicates(rexBuilder, viewPred); // Extract view table references final Set<RelTableRef> viewTableRefs = mq.getTableReferences(viewNode); if (viewTableRefs == null) { // Bail out return; } // Extract view tables MatchModality matchModality; Multimap<RexTableInputRef, RexTableInputRef> compensationEquiColumns = ArrayListMultimap .create(); if (!queryTableRefs.equals(viewTableRefs)) { // We try to compensate, e.g., for join queries it might be // possible to join missing tables with view to compute result. // Two supported cases: query tables are subset of view tables (we need to // check whether they are cardinality-preserving joins), or view tables are // subset of query tables (add additional tables through joins if possible) if (viewTableRefs.containsAll(queryTableRefs)) { matchModality = MatchModality.QUERY_PARTIAL; final EquivalenceClasses vEC = new EquivalenceClasses(); for (RexNode conj : RelOptUtil.conjunctions(viewPreds.getLeft())) { assert conj.isA(SqlKind.EQUALS); RexCall equiCond = (RexCall) conj; vEC.addEquivalenceClass((RexTableInputRef) equiCond.getOperands().get(0), (RexTableInputRef) equiCond.getOperands().get(1)); } if (!compensateQueryPartial(compensationEquiColumns, viewTableRefs, vEC, queryTableRefs)) { // Cannot rewrite, skip it continue; } } else if (queryTableRefs.containsAll(viewTableRefs)) { // TODO: implement latest case matchModality = MatchModality.VIEW_PARTIAL; continue; } else { // Skip it continue; } } else { matchModality = MatchModality.COMPLETE; } // 4. We map every table in the query to a view table with the same qualified // name. final Multimap<RelTableRef, RelTableRef> multiMapTables = ArrayListMultimap.create(); for (RelTableRef queryTableRef : queryTableRefs) { for (RelTableRef viewTableRef : viewTableRefs) { if (queryTableRef.getQualifiedName().equals(viewTableRef.getQualifiedName())) { multiMapTables.put(queryTableRef, viewTableRef); } } } // If a table is used multiple times, we will create multiple mappings, // and we will try to rewrite the query using each of the mappings. // Then, we will try to map every source table (query) to a target // table (view), and if we are successful, we will try to create // compensation predicates to filter the view results further // (if needed). final List<BiMap<RelTableRef, RelTableRef>> flatListMappings = generateTableMappings( multiMapTables); for (BiMap<RelTableRef, RelTableRef> tableMapping : flatListMappings) { // 4.0. If compensation equivalence classes exist, we need to add // the mapping to the query mapping final EquivalenceClasses currQEC = EquivalenceClasses.copy(qEC); if (matchModality == MatchModality.QUERY_PARTIAL) { for (Entry<RexTableInputRef, RexTableInputRef> e : compensationEquiColumns.entries()) { // Copy origin RelTableRef queryTableRef = tableMapping.inverse().get(e.getKey().getTableRef()); RexTableInputRef queryColumnRef = RexTableInputRef.of(queryTableRef, e.getKey().getIndex(), e.getKey().getType()); // Add to query equivalence classes and table mapping currQEC.addEquivalenceClass(queryColumnRef, e.getValue()); tableMapping.put(e.getValue().getTableRef(), e.getValue().getTableRef()); //identity } } final RexNode compensationColumnsEquiPred; final RexNode compensationRangePred; final RexNode compensationResidualPred; // 4.1. Establish relationship between view and query equivalence classes. // If every view equivalence class is not a subset of a query // equivalence class, we bail out. // To establish relationship, we swap column references of the view predicates // to point to query tables. Then, we create the equivalence classes for the // view predicates and check that every view equivalence class is a subset of a // query equivalence class: if it is not, we bail out. final RexNode viewColumnsEquiPred = RexUtil.swapTableReferences(rexBuilder, viewPreds.getLeft(), tableMapping.inverse()); final EquivalenceClasses queryBasedVEC = new EquivalenceClasses(); for (RexNode conj : RelOptUtil.conjunctions(viewColumnsEquiPred)) { assert conj.isA(SqlKind.EQUALS); RexCall equiCond = (RexCall) conj; queryBasedVEC.addEquivalenceClass((RexTableInputRef) equiCond.getOperands().get(0), (RexTableInputRef) equiCond.getOperands().get(1)); } compensationColumnsEquiPred = generateEquivalenceClasses(rexBuilder, currQEC, queryBasedVEC); if (compensationColumnsEquiPred == null) { // Skip it continue; } // 4.2. We check that range intervals for the query are contained in the view. // Compute compensating predicates. final RexNode queryRangePred = RexUtil.swapColumnReferences(rexBuilder, queryPreds.getMiddle(), currQEC.getEquivalenceClassesMap()); final RexNode viewRangePred = RexUtil.swapTableColumnReferences(rexBuilder, viewPreds.getMiddle(), tableMapping.inverse(), currQEC.getEquivalenceClassesMap()); compensationRangePred = SubstitutionVisitor.splitFilter(simplify, queryRangePred, viewRangePred); if (compensationRangePred == null) { // Skip it continue; } // 4.3. Finally, we check that residual predicates of the query are satisfied // within the view. // Compute compensating predicates. final RexNode queryResidualPred = RexUtil.swapColumnReferences(rexBuilder, queryPreds.getRight(), currQEC.getEquivalenceClassesMap()); final RexNode viewResidualPred = RexUtil.swapTableColumnReferences(rexBuilder, viewPreds.getRight(), tableMapping.inverse(), currQEC.getEquivalenceClassesMap()); compensationResidualPred = SubstitutionVisitor.splitFilter(simplify, queryResidualPred, viewResidualPred); if (compensationResidualPred == null) { // Skip it continue; } // 4.4. Final compensation predicate. RexNode compensationPred = RexUtil.composeConjunction(rexBuilder, ImmutableList .of(compensationColumnsEquiPred, compensationRangePred, compensationResidualPred), false); if (!compensationPred.isAlwaysTrue()) { // All columns required by compensating predicates must be contained // in the view output (condition 2). List<RexNode> viewExprs = extractExpressions(topViewProject, viewNode, rexBuilder); compensationPred = rewriteExpression(rexBuilder, viewNode, viewExprs, compensationPred, tableMapping, currQEC.getEquivalenceClassesMap(), mq); if (compensationPred == null) { // Skip it continue; } } // 4.5. Generate final rewriting if possible. // First, we add the compensation predicate (if any) on top of the view. // Then, we trigger the Aggregate unifying method. This method will either create // a Project or an Aggregate operator on top of the view. It will also compute the // output expressions for the query. RelBuilder builder = call.builder(); builder.push(materialization.tableRel); if (!compensationPred.isAlwaysTrue()) { builder.filter(simplify.simplify(compensationPred)); } RelNode result = unify(rexBuilder, builder, builder.build(), topProject, node, topViewProject, viewNode, tableMapping, currQEC.getEquivalenceClassesMap(), mq); if (result == null) { // Skip it continue; } call.transformTo(result); } } } } }
From source file:org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveTableScan.java
private HiveTableScan(RelOptCluster cluster, RelTraitSet traitSet, RelOptHiveTable table, String alias, String concatQbIDAlias, RelDataType newRowtype, boolean useQBIdInDigest, boolean insideView) { super(cluster, TraitsUtil.getDefaultTraitSet(cluster), table); assert getConvention() == HiveRelNode.CONVENTION; this.tblAlias = alias; this.concatQbIDAlias = concatQbIDAlias; this.hiveTableScanRowType = newRowtype; Triple<ImmutableList<Integer>, ImmutableSet<Integer>, ImmutableSet<Integer>> colIndxPair = buildColIndxsFrmReloptHT( table, newRowtype);/*w w w . j a v a 2s .c o m*/ this.neededColIndxsFrmReloptHT = colIndxPair.getLeft(); this.virtualOrPartColIndxsInTS = colIndxPair.getMiddle(); this.virtualColIndxsInTS = colIndxPair.getRight(); this.useQBIdInDigest = useQBIdInDigest; this.insideView = insideView; }
From source file:org.apache.kylin.rest.util.AdHocUtil.java
static String restoreComputedColumnToExpr(String sql, CCInfo ccInfo) { String ccName = ccInfo.getComputedColumnDesc().getColumnName(); List<Triple<Integer, Integer, String>> replacements = Lists.newArrayList(); Matcher matcher = identifierInSqlPattern.matcher(sql); while (matcher.find()) { if (matcher.group(1) != null) { //with quote case: "TABLE"."COLUMN" String quotedColumnName = matcher.group(3); Preconditions.checkNotNull(quotedColumnName); String columnName = StringUtils.strip(quotedColumnName, "\""); if (!columnName.equalsIgnoreCase(ccName)) { continue; }/*from ww w . java 2 s. c o m*/ if (matcher.group(2) != null) { // table name exist String quotedTableAlias = StringUtils.strip(matcher.group(2), "."); String tableAlias = StringUtils.strip(quotedTableAlias, "\""); replacements.add(Triple.of(matcher.start(1), matcher.end(1), replaceIdentifierInExpr( ccInfo.getComputedColumnDesc().getExpression(), tableAlias, true))); } else { //only column if (endWithAsPattern.matcher(sql.substring(0, matcher.start(1))).find()) { //select DEAL_AMOUNT as "deal_amount" case continue; } replacements.add(Triple.of(matcher.start(1), matcher.end(1), replaceIdentifierInExpr(ccInfo.getComputedColumnDesc().getExpression(), null, true))); } } else if (matcher.group(4) != null) { //without quote case: table.column or simply column String columnName = matcher.group(6); Preconditions.checkNotNull(columnName); if (!columnName.equalsIgnoreCase(ccName)) { continue; } if (matcher.group(5) != null) { //table name exist String tableAlias = StringUtils.strip(matcher.group(5), "."); replacements.add(Triple.of(matcher.start(4), matcher.end(4), replaceIdentifierInExpr( ccInfo.getComputedColumnDesc().getExpression(), tableAlias, false))); } else { //only column if (endWithAsPattern.matcher(sql.substring(0, matcher.start(4))).find()) { //select DEAL_AMOUNT as deal_amount case continue; } replacements.add(Triple.of(matcher.start(4), matcher.end(4), replaceIdentifierInExpr(ccInfo.getComputedColumnDesc().getExpression(), null, false))); } } } Collections.reverse(replacements); for (Triple<Integer, Integer, String> triple : replacements) { sql = sql.substring(0, triple.getLeft()) + "(" + triple.getRight() + ")" + sql.substring(triple.getMiddle()); } return sql; }
From source file:org.apache.kylin.rest.util.AdHocUtil.java
static String replaceIdentifierInExpr(String expr, String tableAlias, boolean quoted) { List<Triple<Integer, Integer, String>> replacements = Lists.newArrayList(); Matcher matcher = identifierInExprPattern.matcher(expr); while (matcher.find()) { String t = tableAlias == null ? StringUtils.strip(matcher.group(3), ".") : tableAlias; String c = matcher.group(4); String replacement = quoted ? "\"" + t.toUpperCase() + "\".\"" + c.toUpperCase() + "\"" : t + "." + c; replacements.add(Triple.of(matcher.start(1), matcher.end(1), replacement)); }//from w ww.jav a 2 s . c o m Collections.reverse(replacements); for (Triple<Integer, Integer, String> triple : replacements) { expr = expr.substring(0, triple.getLeft()) + triple.getRight() + expr.substring(triple.getMiddle()); } return expr; }
From source file:org.apache.kylin.rest.util.PushDownUtil.java
static String restoreComputedColumnToExpr(String sql, ComputedColumnDesc computedColumnDesc) { String ccName = computedColumnDesc.getColumnName(); List<Triple<Integer, Integer, String>> replacements = Lists.newArrayList(); Matcher matcher = identifierInSqlPattern.matcher(sql); while (matcher.find()) { if (matcher.group(1) != null) { //with quote case: "TABLE"."COLUMN" String quotedColumnName = matcher.group(3); Preconditions.checkNotNull(quotedColumnName); String columnName = StringUtils.strip(quotedColumnName, "\""); if (!columnName.equalsIgnoreCase(ccName)) { continue; }// w w w .jav a2 s.co m if (matcher.group(2) != null) { // table name exist String quotedTableAlias = StringUtils.strip(matcher.group(2), "."); String tableAlias = StringUtils.strip(quotedTableAlias, "\""); replacements.add(Triple.of(matcher.start(1), matcher.end(1), replaceIdentifierInExpr(computedColumnDesc.getExpression(), tableAlias, true))); } else { //only column if (endWithAsPattern.matcher(sql.substring(0, matcher.start(1))).find()) { //select DEAL_AMOUNT as "deal_amount" case continue; } replacements.add(Triple.of(matcher.start(1), matcher.end(1), replaceIdentifierInExpr(computedColumnDesc.getExpression(), null, true))); } } else if (matcher.group(4) != null) { //without quote case: table.column or simply column String columnName = matcher.group(6); Preconditions.checkNotNull(columnName); if (!columnName.equalsIgnoreCase(ccName)) { continue; } if (matcher.group(5) != null) { //table name exist String tableAlias = StringUtils.strip(matcher.group(5), "."); replacements.add(Triple.of(matcher.start(4), matcher.end(4), replaceIdentifierInExpr(computedColumnDesc.getExpression(), tableAlias, false))); } else { //only column if (endWithAsPattern.matcher(sql.substring(0, matcher.start(4))).find()) { //select DEAL_AMOUNT as deal_amount case continue; } replacements.add(Triple.of(matcher.start(4), matcher.end(4), replaceIdentifierInExpr(computedColumnDesc.getExpression(), null, false))); } } } Collections.reverse(replacements); for (Triple<Integer, Integer, String> triple : replacements) { sql = sql.substring(0, triple.getLeft()) + "(" + triple.getRight() + ")" + sql.substring(triple.getMiddle()); } return sql; }