List of usage examples for org.antlr.v4.runtime CommonTokenStream CommonTokenStream
public CommonTokenStream(TokenSource tokenSource)
From source file:AST.DesignPatternDetection.java
private void btRunActionPerformed(ActionEvent e) throws FileNotFoundException, IOException, InterruptedException { if (tfProjectName.getText().equals("") || tfProjectName.getText().equals(null)) { JOptionPane.showMessageDialog(null, "You have to enter the Project's name!"); return;//from w w w.j a v a2s . co m } overlap = ""; programPath = ""; Cursor hourglassCursor = new Cursor(Cursor.WAIT_CURSOR); setCursor(hourglassCursor); //Baslangic parametreleri, mutlaka gir... Boolean sourceCodeGraph = true; Boolean sourceCodeGraphDetail = true; Boolean designPatternGraph = true; Boolean OnlyTerminalCommands = true; String designpatternName = cbSelectionDP.getSelectedItem().toString(); String projectName = tfProjectName.getText(); Double threshold = Double.parseDouble((tfThreshold.getText())); if (chbOverlap.isSelected() == true) { overlap = " -overlap "; } else { overlap = ""; } if (chbInnerClass.isSelected() == true) { includeInnerClasses = "Yes"; } else { includeInnerClasses = "No"; } programPath = tfProgramPath.getText(); //create "project" directory String directoryNameProject = programPath + "/Projects/" + projectName + "/"; File directoryProject = new File(String.valueOf(directoryNameProject)); if (!directoryProject.exists()) { directoryProject.mkdir(); } //create "source" directory String directoryName = programPath + "/Projects/" + projectName + "/source/"; File directory = new File(String.valueOf(directoryName)); if (!directory.exists()) { directory.mkdir(); } else { FileUtils.deleteDirectory(new File(directoryName)); directory.mkdir(); } //create "inputs" directory String directoryName2 = programPath + "/Projects/" + projectName + "/inputs/"; File directory2 = new File(String.valueOf(directoryName2)); if (!directory2.exists()) { directory2.mkdir(); } //create "outputs" directory String directoryName3 = programPath + "/Projects/" + projectName + "/outputs/"; File directory3 = new File(String.valueOf(directoryName3)); if (!directory3.exists()) { directory3.mkdir(); } //create "batch" directory String directoryName4 = programPath + "/Projects/" + projectName + "/batch/"; File directory4 = new File(String.valueOf(directoryName4)); if (!directory4.exists()) { directory4.mkdir(); } else { FileUtils.deleteDirectory(new File(directoryName4)); directory4.mkdir(); } //create "designpatternName+inputs" directory String directoryName5 = programPath + "/Projects/" + projectName + "/inputs/" + designpatternName + "_inputs/"; File directory5 = new File(String.valueOf(directoryName5)); if (!directory5.exists()) { directory5.mkdir(); } else { FileUtils.deleteDirectory(new File(directoryName5)); directory5.mkdir(); } //create "designpatternName+outputs" directory String directoryName6 = programPath + "/Projects/" + projectName + "/outputs/" + designpatternName + "_outputs/"; File directory6 = new File(String.valueOf(directoryName6)); if (!directory6.exists()) { directory6.mkdir(); } else { FileUtils.deleteDirectory(new File(directoryName6)); directory6.mkdir(); } File dir = new File(tfPath.getText()); FileWalker fw = new FileWalker(); List<String> directoryListing = new ArrayList<String>(); directoryListing.clear(); directoryListing = fw.displayDirectoryContents(dir); // File[] directoryListing = dir.listFiles(); //1. visit if (directoryListing != null) { for (String child : directoryListing) { if (child.toString().contains(".java") && !child.toString().contains("package-info")) { System.out.println(child); ANTLRInputStream input = new ANTLRInputStream(new FileInputStream(child)); // we'll parse this file JavaLexer lexer = new JavaLexer(input); CommonTokenStream tokens = new CommonTokenStream(lexer); JavaParser parser = new JavaParser(tokens); ParseTree tree = parser.compilationUnit(); // see the grammar MyVisitorBase visitorbase = new MyVisitorBase(); // extends JavaBaseVisitor<Void> // and overrides the methods // you're interested visitorbase.visit(tree); } } } else { JOptionPane.showMessageDialog(null, "Could not find the path..."); return; } //2. visit if (directoryListing != null) { for (String child : directoryListing) { if (child.toString().contains(".java") && !child.toString().contains("package-info")) { //System.out.println(child); ANTLRInputStream input = new ANTLRInputStream(new FileInputStream(child)); // we'll parse this file JavaLexer lexer = new JavaLexer(input); CommonTokenStream tokens = new CommonTokenStream(lexer); JavaParser parser = new JavaParser(tokens); ParseTree tree = parser.compilationUnit(); // see the grammar MyVisitor visitor = new MyVisitor(); // extends JavaBaseVisitor<Void> // and overrides the methods // you're interested visitor.includeInnerClasses = includeInnerClasses; visitor.modifiersSet(); visitor.visit(tree); } } } //3.visit if (directoryListing != null) { for (String child : directoryListing) { if (child.toString().contains(".java") && !child.toString().contains("package-info")) { //System.out.println(child); ANTLRInputStream input = new ANTLRInputStream(new FileInputStream(child)); // we'll parse this file JavaLexer lexer = new JavaLexer(input); CommonTokenStream tokens = new CommonTokenStream(lexer); JavaParser parser = new JavaParser(tokens); ParseTree tree = parser.compilationUnit(); // see the grammar MyVisitor2 visitor2 = new MyVisitor2(); visitor2.modifiersSet(); visitor2.visit(tree); } } } try { Proba p = new Proba(); p.start(sourceCodeGraph, sourceCodeGraphDetail, designPatternGraph, designpatternName, OnlyTerminalCommands, projectName, threshold, overlap, programPath); } catch (IOException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } taInfo.setText("----------" + designpatternName + " PATTERN (" + projectName + ")-------------" + "\n"); taInfo.append("1. Project's classes ASTs created." + "\n"); taInfo.append( "2. After treewalk of ASTs, Graph Model is created.(/Projects/" + projectName + "/source)" + "\n"); taInfo.append("3. Pool of Desing Pattern Templates is created.(/Projects/" + projectName + "/inputs/" + designpatternName + "_inputs)" + "\n"); taInfo.append("4. Heuristic function of shell script file is created.(/Projects/" + projectName + "/batch)" + "\n"); //p.start2(); Cursor normalCursor = new Cursor(Cursor.DEFAULT_CURSOR); setCursor(normalCursor); vertices.clear(); dugumler.clear(); methods.clear(); JOptionPane.showMessageDialog(null, "Graph Model is successfully completed!"); }
From source file:at.ac.tuwien.dsg.cloud.salsa.engine.smartdeployment.main.SmartDeploymentService.java
License:Apache License
private String enrich_CAMF_CSAR_Process(String csarTmp, String serviceName) { String extractedFolder = csarTmp + ".extracted"; String toscaFile = extractedFolder + "/Definitions/Application.tosca"; String scriptDir = extractedFolder + "/Scripts/"; try {/* w w w.j a v a 2 s .c o m*/ // extract CSAR CSARParser.extractCsar(new File(csarTmp), extractedFolder); // enrich with QUELLE for String toscaXML = FileUtils.readFileToString(new File(toscaFile)); EngineLogger.logger.debug("Read tosca string done. 100 first characters: {}", toscaXML); EngineLogger.logger.debug("Now trying to enrich with QUELLE...."); //enrichCAMFToscaWithQuelle(toscaXML, serviceName, new String[]{EnrichFunctions.QuelleCloudServiceRecommendation.toString(), EnrichFunctions.SalsaInfoCompletion.toString()}); SmartDeploymentService sds = new SmartDeploymentService(); String result = sds.enrichCAMFToscaWithQuelle(toscaXML, serviceName, new String[] { EnrichFunctions.QuelleCloudServiceRecommendation.toString() }); EngineLogger.logger.debug("After enrich with QUELLE, the result is: {}", result); // write back to right place FileUtils.writeStringToFile(new File(toscaFile), result); // read software requirement in TOSCA for each node, put in a map + artifact // a map between node ID and full requirement in Tag Map<String, String> allRequirements = new HashMap<>(); TDefinitions def = ToscaXmlProcess.readToscaFile(toscaFile); for (TNodeTemplate node : ToscaStructureQuery.getNodeTemplateList(def)) { EngineLogger.logger.debug("Checking node: {}", node.getId()); String policiesStr = new String(); if (node.getPolicies() != null) { EngineLogger.logger.debug("Found policies of node: " + node.getId() + "/" + node.getName()); List<TPolicy> policies = node.getPolicies().getPolicy(); for (TPolicy p : policies) { if (p.getPolicyType().getLocalPart().equals("Requirement") && p.getPolicyType().getPrefix().equals("SmartDeployment")) { if (p.getName().startsWith("CONSTRAINT")) { // TODO: parse SYBL policies } else { policiesStr += p.getName().trim(); if (!p.getName().trim().endsWith(";")) { policiesStr += ";"; EngineLogger.logger.debug("polociesStr = {}", policiesStr); } } } } } EngineLogger.logger.debug("Collected policies for node {} is : {}", node.getId(), policiesStr); allRequirements.put(node.getId(), policiesStr); } EngineLogger.logger.debug("In total, we got following requirements: " + allRequirements.toString()); // Load dependency graph knowledge base String dependencyDataFile = SmartDeploymentService.class.getResource("/data/salsa.dependencygraph.xml") .getFile(); SalsaStackDependenciesGraph depGraph = SalsaStackDependenciesGraph .fromXML(FileUtils.readFileToString(new File(dependencyDataFile))); // ENRICH SCRIPT // extract all the requirement, put into the hashmap for (Map.Entry<String, String> entry : allRequirements.entrySet()) { EngineLogger.logger.debug("Analyzing node: {}. Full policies string is: *** {} ***", entry.getKey(), entry.getValue()); // extract CARL Strings CharStream stream = new ANTLRInputStream(entry.getValue()); CARLLexer lexer = new CARLLexer(stream); CommonTokenStream tokens = new CommonTokenStream(lexer); CARLParser parser = new CARLParser(tokens); RequirementsContext requirementsContext = parser.requirements(); ParseTreeWalker walker = new ParseTreeWalker(); // create standard walker CARLProgramListener extractor = new CARLProgramListener(parser); walker.walk(extractor, requirementsContext); // initiate walk of tree with listener org.eclipse.camf.carl.model.Requirements requirements = extractor.getRequirements(); HashMap<String, String> allReqsOfNode = new HashMap<>(); ArrayList<String> checkList = new ArrayList<>(); // os=Ubuntu; os:ver=12.04; sw=jre:1.7 ==> os=Ubuntu, // here flat all the requirement of the node for (IRequirement req : requirements.getRequirements()) { EngineLogger.logger.debug("Irequirement: " + req.toString()); if (req.getCategory().equals(RequirementCategory.SOFTWARE)) { SoftwareRequirement swr = (SoftwareRequirement) req; allReqsOfNode.put("sw", removeQuote(swr.getName())); allReqsOfNode.put(removeQuote(swr.getName()) + ":ver", swr.getVersion().getVersion()); checkList.add(swr.getName()); } else { if (req.getCategory().equals(RequirementCategory.OPERATING_SYSTEM)) { // the system part is generated by quelle OSRequirement osReq = (OSRequirement) req; if (osReq.getName() != null) { allReqsOfNode.put("os", removeQuote(osReq.getName())); } if (osReq.getVersion() != null) { allReqsOfNode.put("os:ver", osReq.getVersion().getVersion()); } } } } // find all the deploymet script of all "sw" requirements LinkedList<String> listOfScripts = new LinkedList<>(); EngineLogger.logger.debug("The node {} will be enriched based-on the requirements: {}", entry.getKey(), checkList.toString()); for (String swReq : checkList) { EngineLogger.logger.debug("Searching deployment script for software req: {}", swReq); SalsaStackDependenciesGraph theNode = depGraph.findNodeByName(swReq); EngineLogger.logger.debug("Node found: {}", theNode.getName()); EngineLogger.logger.debug("All requirements: {}", allReqsOfNode.toString()); LinkedList<String> tmp = theNode.searchDeploymentScriptTemplate(allReqsOfNode); if (tmp != null) { listOfScripts.addAll(tmp); } } EngineLogger.logger.debug(listOfScripts.toString()); // create a script to solve all dependencies first String nodeID = entry.getKey(); String theDependencyScript = "#!/bin/bash \n\n######## Generated by the Decision Module to solve the software dependencies ######## \n\n"; for (String appendScript : listOfScripts) { String theAppend = SmartDeploymentService.class.getResource("/scriptRepo/" + appendScript) .getFile(); String stringToAppend = FileUtils.readFileToString(new File(theAppend)); theDependencyScript += stringToAppend + "\n"; } theDependencyScript += "######## End of generated script ########"; String tmpScriptFile = scriptDir + "/" + nodeID + ".salsatmp"; // read original script, remove the #!/bin/bash if having String originalScriptFile = null; TNodeTemplate node = ToscaStructureQuery.getNodetemplateById(nodeID, def); EngineLogger.logger.debug("Getting artifact template of node: {}", node.getId()); for (TDeploymentArtifact art : node.getDeploymentArtifacts().getDeploymentArtifact()) { EngineLogger.logger.debug("Checking art.Name: {}, type: {}", art.getName(), art.getArtifactType().getLocalPart()); if (art.getArtifactType().getLocalPart().equals("ScriptArtifactPropertiesType")) { String artTemplateID = art.getArtifactRef().getLocalPart(); TArtifactTemplate artTemplate = ToscaStructureQuery.getArtifactTemplateById(artTemplateID, def); if (artTemplate != null) { originalScriptFile = artTemplate.getArtifactReferences().getArtifactReference().get(0) .getReference(); originalScriptFile = extractedFolder + "/" + originalScriptFile; } } } if (originalScriptFile != null) { String originalScript = FileUtils.readFileToString(new File(originalScriptFile)); originalScript = originalScript.replace("#!/bin/bash", ""); originalScript = originalScript.replace("#!/bin/sh", ""); theDependencyScript += originalScript; FileUtils.writeStringToFile(new File(tmpScriptFile), theDependencyScript); EngineLogger.logger.debug("originalScript: {}, moveto: {}", originalScriptFile, originalScriptFile + ".original"); FileUtils.moveFile(FileUtils.getFile(originalScriptFile), FileUtils.getFile(originalScriptFile + ".original")); FileUtils.moveFile(FileUtils.getFile(tmpScriptFile), FileUtils.getFile(originalScriptFile)); } else { // TODO: there is no original script, just add new template, add tmpScript into that } } // end for each node in allRequirements analysis // repack the CSAR FileUtils.deleteQuietly(FileUtils.getFile(csarTmp)); File directory = new File(extractedFolder); File[] fList = directory.listFiles(); //CSARParser.buildCSAR(fList, csarTmp); String builtCSAR = SalsaConfiguration.getToscaTemplateStorage() + "/" + serviceName + ".csar"; CSARParser.buildCSAR(extractedFolder, builtCSAR); } catch (IOException ex) { EngineLogger.logger.error("Error when enriching CSAR: " + csarTmp, ex); return "Error"; } catch (JAXBException ex) { EngineLogger.logger.error("Cannot parse the Tosca definition in CSAR file: " + toscaFile, ex); return "Error"; } // return the link to the CSAR String csarURLReturn = SalsaConfiguration.getSalsaCenterEndpoint() + "/rest/smart/CAMFTosca/enrich/CSAR/" + serviceName; EngineLogger.logger.info("Enrich CSAR done. URL to download is: {}", csarURLReturn); return csarURLReturn; }
From source file:at.ac.tuwien.dsg.cloud.salsa.engine.smartdeployment.main.SmartDeploymentService.java
License:Apache License
private List<Requirement> CAMFPolicyToQuelleRequirement(String theReq, TNodeTemplate node, TPolicy p) { List<Requirement> listReq = new ArrayList<>(); // extract CARL Strings CharStream stream = new ANTLRInputStream(theReq); CARLLexer lexer = new CARLLexer(stream); CommonTokenStream tokens = new CommonTokenStream(lexer); CARLParser parser = new CARLParser(tokens); RequirementsContext requirementsContext = parser.requirements(); ParseTreeWalker walker = new ParseTreeWalker(); // create standard walker CARLProgramListener extractor = new CARLProgramListener(parser); walker.walk(extractor, requirementsContext); // initiate walk of tree with listener org.eclipse.camf.carl.model.Requirements requirements = extractor.getRequirements(); HashMap<String, String> allReqsOfNode = new HashMap<>(); // os=Ubuntu; os:ver=12.04; sw=jre:1.7 ==> os=Ubuntu, // here flat all the requirement of the node for (IRequirement req : requirements.getRequirements()) { EngineLogger.logger.debug("Irequirement: " + req.toString()); // QUELLE only cares about system requirements if (req.getCategory().equals(RequirementCategory.SYSTEM)) { SystemRequirement sys = (SystemRequirement) req; RangeAttribute range = sys.getRange(); String metric;// www. j a va 2 s .c om if (sys instanceof CPURequirement) { metric = "VCPU"; } else if (sys instanceof DiskRequirement) { metric = "StorageDisks"; } else if (sys instanceof MemoryRequirement) { metric = "Memory"; } else if (sys instanceof NetworkRequirement) { metric = "NetworkPerformance"; } else { metric = "unknown"; } EngineLogger.logger.debug("Parsing CAMF, mapping to metric: {}", metric); Condition.Type type; if (range != null) { int fromRance = range.from(); int toRange = range.to(); EngineLogger.logger.debug("We are parsing a range value: [{} --> {}]", fromRance, toRange); Requirement quellReq = new Requirement(node.getName() + "_req_" + metric); quellReq.addTargetServiceID(node.getId()); EngineLogger.logger.debug("Before setting metric. Metric: " + quellReq.getMetric()); quellReq.setMetric(new Metric(metric)); EngineLogger.logger.debug("After setting metric. Metric: " + quellReq.getMetric()); quellReq.addCondition( new Condition(Condition.Type.GREATER_EQUAL, null, new MetricValue(fromRance))); quellReq.addCondition(new Condition(Condition.Type.LESS_EQUAL, null, new MetricValue(toRange))); listReq.add(quellReq); } else { EngineLogger.logger.debug("We are parsing equal, there is no range value !"); switch (sys.getOperator()) { case "lt": type = Condition.Type.LESS_THAN; break; case "le": type = Condition.Type.LESS_EQUAL; break; case "gt": type = Condition.Type.GREATER_THAN; break; case "ge": type = Condition.Type.GREATER_EQUAL; break; case "eq": type = Condition.Type.EQUAL; break; default: type = Condition.Type.EQUAL; break; } Requirement quellReq = new Requirement(node.getName() + "_req_" + metric); quellReq.addTargetServiceID(node.getId()); EngineLogger.logger.debug("Before setting metric. Metric: " + quellReq.getMetric()); quellReq.setMetric(new Metric(metric)); EngineLogger.logger.debug("Before setting metric. Metric: " + quellReq.getMetric()); quellReq.addCondition(new Condition(type, null, new MetricValue(sys.getValue()))); listReq.add(quellReq); } } } return listReq; }
From source file:at.plechinger.scrapeql.parser.QueryParser.java
License:Open Source License
public Query parse(String query) { ScrapeQLLexer lexer = new ScrapeQLLexer(new ANTLRInputStream(query)); ScrapeQLParser parser = new ScrapeQLParser(new CommonTokenStream(lexer)); ParseTree tree = parser.parse();/* w w w . jav a 2 s .c o m*/ QueryParseTreeListener listener = new QueryParseTreeListener(); ParseTreeWalker.DEFAULT.walk(listener, tree); Query resultQuery = listener.getQuery(); return resultQuery; }
From source file:bacter.ConversionGraph.java
License:Open Source License
/** * Read in an ACG from a string in extended newick format. Assumes * that the network is stored with exactly the same metadata as written * by the getExtendedNewick() method./*from ww w .jav a 2s.c om*/ * * @param string extended newick representation of ACG */ public void fromExtendedNewick(String string) { // Spin up ANTLR ANTLRInputStream input = new ANTLRInputStream(string); ExtendedNewickLexer lexer = new ExtendedNewickLexer(input); CommonTokenStream tokens = new CommonTokenStream(lexer); ExtendedNewickParser parser = new ExtendedNewickParser(tokens); ParseTree parseTree = parser.tree(); Map<String, Conversion> convIDMap = new HashMap<>(); Node root = new ExtendedNewickBaseVisitor<Node>() { /** * Convert branch lengths to node heights for all nodes in clade. * * @param node clade parent * @return minimum height assigned in clade. */ private double branchLengthsToHeights(Node node) { if (node.isRoot()) node.setHeight(0.0); else node.setHeight(node.getParent().getHeight() - node.getHeight()); double minHeight = node.getHeight(); for (Node child : node.getChildren()) { minHeight = Math.min(minHeight, branchLengthsToHeights(child)); } return minHeight; } /** * Remove height offset from all nodes in clade * @param node parent of clade * @param offset offset to remove */ private void removeOffset(Node node, double offset) { node.setHeight(node.getHeight() - offset); for (Node child : node.getChildren()) removeOffset(child, offset); } private Node getTrueNode(Node node) { if (node.isLeaf()) { assert !convIDMap.containsKey(node.getID()); return node; } if (convIDMap.containsKey(node.getID())) return getTrueNode(node.getChild(0)); int hybridIdx = -1; int nonHybridIdx = -1; for (int i = 0; i < node.getChildCount(); i++) { if (node.getChild(i).isLeaf() && convIDMap.containsKey(node.getChild(i).getID())) hybridIdx = i; else nonHybridIdx = i; } if (hybridIdx > 0) return getTrueNode(node.getChild(nonHybridIdx)); return node; } /** * Traverse the newly constructed tree looking for * hybrid nodes and using these to set the heights of * Conversion objects. * * @param node parent of clade */ private void findConversionAttachments(Node node) { if (convIDMap.containsKey(node.getID())) { Conversion conv = convIDMap.get(node.getID()); if (node.isLeaf()) { conv.setHeight1(node.getHeight()); conv.setHeight2(node.getParent().getHeight()); conv.setNode2(getTrueNode(node.getParent())); } else conv.setNode1(getTrueNode(node)); } for (Node child : node.getChildren()) findConversionAttachments(child); } /** * Remove all conversion-associated nodes, leaving only * the clonal frame. * * @param node parent of clade * @return new parent of same clade */ private Node stripHybridNodes(Node node) { Node trueNode = getTrueNode(node); List<Node> trueChildren = new ArrayList<>(); for (Node child : trueNode.getChildren()) { trueChildren.add(stripHybridNodes(child)); } trueNode.removeAllChildren(false); for (Node trueChild : trueChildren) trueNode.addChild(trueChild); return trueNode; } private int numberInternalNodes(Node node, int nextNr) { if (node.isLeaf()) return nextNr; for (Node child : node.getChildren()) nextNr = numberInternalNodes(child, nextNr); node.setNr(nextNr); return nextNr + 1; } @Override public Node visitTree(@NotNull ExtendedNewickParser.TreeContext ctx) { Node root = visitNode(ctx.node()); double minHeight = branchLengthsToHeights(root); removeOffset(root, minHeight); findConversionAttachments(root); root = stripHybridNodes(root); root.setParent(null); numberInternalNodes(root, root.getAllLeafNodes().size()); return root; } @Override public Node visitNode(@NotNull ExtendedNewickParser.NodeContext ctx) { Node node = new Node(); if (ctx.post().hybrid() != null) { String convID = ctx.post().hybrid().getText(); node.setID(convID); Conversion conv; if (convIDMap.containsKey(convID)) conv = convIDMap.get(convID); else { conv = new Conversion(); convIDMap.put(convID, conv); } if (ctx.node().isEmpty()) { String locusID; for (ExtendedNewickParser.AttribContext attribCtx : ctx.post().meta().attrib()) { switch (attribCtx.attribKey.getText()) { case "region": conv.setStartSite(Integer .parseInt(attribCtx.attribValue().vector().attribValue(0).getText())); conv.setEndSite(Integer .parseInt(attribCtx.attribValue().vector().attribValue(1).getText())); break; case "locus": locusID = attribCtx.attribValue().getText(); if (locusID.startsWith("\"")) locusID = locusID.substring(1, locusID.length() - 1); Locus locus = null; for (Locus thisLocus : getLoci()) { if (thisLocus.getID().equals(locusID)) locus = thisLocus; } if (locus == null) throw new IllegalArgumentException("Locus with ID " + locusID + " not found."); conv.setLocus(locus); break; default: break; } } } } for (ExtendedNewickParser.NodeContext childCtx : ctx.node()) node.addChild(visitNode(childCtx)); if (ctx.post().label() != null) { node.setID(ctx.post().label().getText()); node.setNr(Integer.parseInt(ctx.post().label().getText()) - taxaTranslationOffset); } node.setHeight(Double.parseDouble(ctx.post().length.getText())); return node; } }.visit(parseTree); m_nodes = root.getAllChildNodes().toArray(m_nodes); nodeCount = m_nodes.length; leafNodeCount = root.getAllLeafNodes().size(); setRoot(root); initArrays(); for (Locus locus : getLoci()) convs.get(locus).clear(); for (Conversion conv : convIDMap.values()) addConversion(conv); }
From source file:bam.web.dtc.compiler.Compiler.java
public void compile() throws Exception { outputDirectory.mkdirs();// ww w . j a v a2s. co m String src = new String(Files.readAllBytes(typescriptDefinitionFile.toPath())); TypeScriptLexer lexer = new TypeScriptLexer(new ANTLRInputStream(src)); TypeScriptParser parser = new TypeScriptParser(new CommonTokenStream(lexer)); ParseTreeWalker walker = new ParseTreeWalker(); // create standard walker CodeModelGenerator generator = new CodeModelGenerator(); walker.walk(generator, parser.definitionsFile()); // initiate walk of tree with listener // TypeScriptDefinitionsLexer lexer = new TypeScriptDefinitionsLexer(new ANTLRInputStream(src)); // TypeScriptDefinitionsParser parser = new TypeScriptDefinitionsParser(new CommonTokenStream(lexer)); // TypeScriptDefinitionsParser.DtsContext dts = parser.dts(); // // JCodeModel model = new JCodeModel(); // new CodeModelProcessor().appendDefinitionFile(dts, model, "jsn.core"); // try (FileOutputStream fileOutputStream = new FileOutputStream(new File("output"))) { // CodeWriter cw = new SingleStreamCodeWriter(fileOutputStream); // model.build(cw); // } }
From source file:basicintast.Run.java
/** * @param args the command line arguments *///from w ww . j a v a2 s.c o m public static void main(String[] args) throws IOException { ANTLRInputStream input = new ANTLRFileStream("input.basic"); BasicLexer lexer = new BasicLexer(input); CommonTokenStream tokens = new CommonTokenStream(lexer); BasicParser parser = new BasicParser(tokens); ParseTree tree = parser.program(); BasicVisitor eval = new BasicVisitorImpl(); eval.visit(tree); }
From source file:bayesiannetwork.BayesianNetwork.java
/** * @param args the command line arguments *///from ww w . jav a 2 s .c om public static void main(String[] args) { // TODO code application logic here File inputFile = new File("input.txt"); FileManager manager = new FileManager(); String in; in = manager.leerArchivo(inputFile); //System.out.println(in); CharStream cs = new ANTLRInputStream(in); BayesGrammarLexer lexer = new BayesGrammarLexer(cs); //lexer.removeErrorListeners(); CommonTokenStream tokens = new CommonTokenStream(lexer); BayesGrammarParser parser = new BayesGrammarParser(tokens); BayesGrammarParser.ProgramContext contexto = parser.program(); ParseTree tree = contexto; int errorsCount = parser.getNumberOfSyntaxErrors(); System.out.println(errorsCount); if (errorsCount == 0) { System.out.println("Parseo Exitoso"); BayesVisitor visitor = new BayesVisitor(); visitor.visit(tree); BayesStructureVisitor structure = new BayesStructureVisitor(); structure.visit(tree); String pTotal = structure.totalExpression(); structure.completeTable(); System.out.println(pTotal + " pTotal"); System.out.println(""); boolean valid = manager.getCantLines() == visitor.validateNetwork(); boolean validR = visitor.validateUnique(in); System.out.println(visitor.getNetwork()); GraphViz graphViz = new GraphViz(visitor.getNetwork()); ArrayList<Node> network = visitor.getNetwork(); ArrayList<Node> completeNetwork = structure.getStructure(); if (valid && validR) { JFrame frame = new JFrame("Console Demo"); frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); frame.setSize(800, 600); Console console = new Console(Color.BLACK, Color.GREEN, new Font(Font.MONOSPACED, Font.BOLD, 14), "$ "); console.setPreferredSize(new Dimension(800, 600)); console.setCompletionSource(new DefaultCompletionSource("help", "list", "die", "dinosaurs")); console.setProcessor(new InputProcessor() { public void process(String expression, Console console) { System.out.println("You typed: '" + expression + "'"); expression = expression.toUpperCase(); CharStream cs = new ANTLRInputStream(expression); BayesGrammarLexer lexer = new BayesGrammarLexer(cs); CommonTokenStream tokens = new CommonTokenStream(lexer); BayesGrammarParser parser = new BayesGrammarParser(tokens); BayesGrammarParser.CliBayesContext context = parser.cliBayes(); ParseTree tree = context; int errorsCount = parser.getNumberOfSyntaxErrors(); if (errorsCount > 0) { JOptionPane.showMessageDialog(null, "Expresin mal ingresada", "Error", JOptionPane.ERROR_MESSAGE); } else { EnumerationVisitor enumeration = new EnumerationVisitor(); enumeration.visit(tree); boolean validVars = enumeration.validateExpression(visitor.getNetwork()); if (!validVars) { JOptionPane.showMessageDialog(null, "Hay una variable no existente", "Error", JOptionPane.ERROR_MESSAGE); } else { enumeration.getHiddenVars(network); // pTotal = enumeration.includeExpression(pTotal, expression); double answer = enumeration.enumerate(pTotal, completeNetwork, visitor.getNetwork()); // JOptionPane.showMessageDialog(null, expression+" = "+answer); console.write("> " + answer + "\n"); } } } }); frame.add(console); frame.pack(); frame.setVisible(true); /*while (true) { try { System.out.println(""); System.out.println("Enter expression: "); // Con caja de texto String expression = JOptionPane.showInputDialog( null, "Expression to evaluate", "", JOptionPane.QUESTION_MESSAGE); // el icono sera un iterrogante expression = expression.toUpperCase(); cs = new ANTLRInputStream(expression); lexer = new BayesGrammarLexer(cs); tokens = new CommonTokenStream( lexer); parser = new BayesGrammarParser(tokens); BayesGrammarParser.CliBayesContext context = parser.cliBayes(); tree = context; errorsCount = parser.getNumberOfSyntaxErrors(); if (errorsCount > 0) { JOptionPane.showMessageDialog(null, "Expresin mal ingresada", "Error", JOptionPane.ERROR_MESSAGE); } else { EnumerationVisitor enumeration = new EnumerationVisitor(); enumeration.visit(tree); boolean validVars = enumeration.validateExpression(visitor.getNetwork()); if (!validVars) { JOptionPane.showMessageDialog(null, "Hay una variable no existente", "Error", JOptionPane.ERROR_MESSAGE); } else { enumeration.getHiddenVars(network); // pTotal = enumeration.includeExpression(pTotal, expression); double answer = enumeration.enumerate(pTotal, completeNetwork, visitor.getNetwork()); JOptionPane.showMessageDialog(null, expression+" = "+answer); } } } catch(Exception e) { break; } }*/ } else { JOptionPane.showMessageDialog(null, "Error: not valid", "", JOptionPane.ERROR_MESSAGE); } } }
From source file:bayesnetwork.BayesNetwork.java
public static void main(String[] args) { FIleManager fileMan = new FIleManager(); String networkStr = fileMan.leerArchivo(new File("network.txt")); CharStream stream = new ANTLRInputStream(networkStr); // Lexer//from ww w . j a v a 2s.c o m grammarBayesLexer bayesLexer = new grammarBayesLexer(stream); // Tokens (lexer) CommonTokenStream bayesTokens = new CommonTokenStream(bayesLexer); // Parser (lexer, tokens) grammarBayesParser bayesParser = new grammarBayesParser(bayesTokens); grammarBayesParser.ProgramContext networkCtx = bayesParser.program(); // Arbol semantico ParseTree networkParseTree = networkCtx; // total de errores en el arbol semantico (este bien escrito y tenga sentido) int errorsCount = bayesParser.getNumberOfSyntaxErrors(); System.out.println("Errors: " + errorsCount); // Aqui termina la validacion de la gramatica // Si no hay errores, esta bien descrita if (errorsCount == 0) { // Validamos la red de bayes ingresada BayesNetworkValidator validate = new BayesNetworkValidator(); validate.visit(networkParseTree); // Generamos el la tabla de nodos (variables en red bayesiana) BayesNetworkTable table = new BayesNetworkTable(); table.visit(networkParseTree); table.generateAllPossibilitiesTable(); // Obtenemos el string total que describe toda nuestra red bayesiana String redBayesianaProbs = table.getTableExp(); // Imprimimos la tabla de nodos System.out.println("**************************************" + "\nPrint tabla de nodos"); for (BayesNode bn : validate.getNetwork()) { System.out.println(bn.specialToString()); } System.out.println("**************************************\n"); // Imprimimos la tabla de probabilidades System.out .println("**************************************" + "\nPrint tabla de probabilidades completa"); for (BayesNode bn : table.getBayesTable()) { System.out.println(bn.toDisplay()); } System.out.println("**************************************\n"); // Validamos que la cantidad de probabilidades parseadas sea la cantidad de probabilides ingresadas boolean validCantProbs = fileMan.getCantLines() == validate.validNetwork(); System.out.println("Is valid " + (validCantProbs ? "Si" : "No")); // Validamos que la red bayesiana cumpla con las probabilidades ingresadas boolean validProbsEquality = validate.validateUnique(networkStr); System.out.println("Is valid " + (validProbsEquality ? "Si" : "No")); // network: Nodos de la red bayesiana ArrayList<BayesNode> network = validate.getNetwork(); // probs: Nodos con las probabilidades (tabla de probs) ArrayList<BayesNode> completeNetwork = table.getBayesTable(); // Si la tabla esta completa con todas las probabilidades y verificamos que tenemos // las probabilidades bien while (validCantProbs && validProbsEquality) { String expression = JOptionPane .showInputDialog(null, "Ingrese expresion a calcular", "", JOptionPane.QUESTION_MESSAGE) .toUpperCase(); // Solo para mostrar error errorsCount = 1; if (!expression.isEmpty()) { System.out.println("Expresion a calcular: " + expression); // Validamos que la expresion a evaluar este bien escrita stream = new ANTLRInputStream(expression); bayesLexer = new grammarBayesLexer(stream); bayesTokens = new CommonTokenStream(bayesLexer); bayesParser = new grammarBayesParser(bayesTokens); grammarBayesParser.CliBayesContext context = bayesParser.cliBayes(); networkParseTree = context; errorsCount = bayesParser.getNumberOfSyntaxErrors(); // terminamos la validacion } // Si tenemos errores en la expresion (mal escrita) entonces pedimos otra vez if (errorsCount > 0) { JOptionPane.showMessageDialog(null, "Expresin mal ingresada", "Error", JOptionPane.ERROR_MESSAGE); } else { BayesNetworkEnumeration enumeration = new BayesNetworkEnumeration(); enumeration.visit(networkParseTree); enumeration.getHiddenVars(network); // pTotal = enumeration.includeExpression(pTotal, expression); double answer = enumeration.calcEnumerationTotal( enumeration.includeExpression(redBayesianaProbs, expression), completeNetwork); JOptionPane.showMessageDialog(null, expression + " = " + answer); } } } }
From source file:beast.util.TreeParser.java
License:Open Source License
/** * Parse a newick-ish string and generate the BEAST tree it describes. * * @param newick string to parse/*from w w w . j av a 2s . com*/ * @return root node of tree */ public Node parseNewick(String newick) { CharStream charStream = CharStreams.fromString(newick); // Custom parse/lexer error listener BaseErrorListener errorListener = new BaseErrorListener() { @Override public void syntaxError(Recognizer<?, ?> recognizer, Object offendingSymbol, int line, int charPositionInLine, String msg, RecognitionException e) { throw new TreeParsingException(msg, charPositionInLine, line); } }; // Use lexer to produce token stream NewickLexer lexer = new NewickLexer(charStream); lexer.removeErrorListeners(); lexer.addErrorListener(errorListener); CommonTokenStream tokens = new CommonTokenStream(lexer); // Parse token stream to produce parse tree NewickParser parser = new NewickParser(tokens); parser.removeErrorListeners(); parser.addErrorListener(errorListener); ParseTree parseTree = parser.tree(); // Traverse parse tree, constructing BEAST tree along the way NewickASTVisitor visitor = new NewickASTVisitor(); return visitor.visit(parseTree); }