Example usage for org.antlr.v4.runtime CommonTokenStream CommonTokenStream

List of usage examples for org.antlr.v4.runtime CommonTokenStream CommonTokenStream

Introduction

In this page you can find the example usage for org.antlr.v4.runtime CommonTokenStream CommonTokenStream.

Prototype

public CommonTokenStream(TokenSource tokenSource) 

Source Link

Document

Constructs a new CommonTokenStream using the specified token source and the default token channel ( Token#DEFAULT_CHANNEL ).

Usage

From source file:gov.nasa.jpf.inspector.client.parser.CommandParserFactory.java

License:Apache License

/**
 * Package-private method that creates an ANTLR parser from a command string to be parsed.
 * @param expr Command as a string (that should be parsed).
 * @return The ANTLR parser./* w w  w.j av a  2s . c o  m*/
 */
static ConsoleGrammarParser getParser(String expr) {
    org.antlr.v4.runtime.Lexer lexer = new ConsoleGrammarLexer(new ANTLRInputStream(expr));
    lexer.removeErrorListeners();
    lexer.addErrorListener(ThrowingErrorListener.getInstance());
    CommonTokenStream tokens = new CommonTokenStream(lexer);
    ConsoleGrammarParser parser = new ConsoleGrammarParser(tokens);
    parser.removeErrorListeners();
    parser.addErrorListener(ThrowingErrorListener.getInstance());
    return parser;
}

From source file:greycat.language.Model.java

License:Open Source License

private void internal_parse(CharStream in, Resolver resolver) {
    if (md == null) {
        try {/*from  w  ww  .  j av a2 s. c o  m*/
            md = MessageDigest.getInstance("SHA-1");
        } catch (NoSuchAlgorithmException e) {
            e.printStackTrace();
        }
    }
    if (in.size() == 0) {
        return;
    }

    String sha1 = convertToHex(md.digest(in.getText(new Interval(0, in.size())).getBytes()));
    if (alreadyLoaded.contains(sha1)) {
        return;
    } else {
        //circular dependency protection
        alreadyLoaded.add(sha1);
    }
    BufferedTokenStream tokens = new CommonTokenStream(new GreyCatModelLexer(in));
    GreyCatModelParser parser = new GreyCatModelParser(tokens);
    GreyCatModelParser.ModelDclContext modelDclCtx = parser.modelDcl();
    //first subImport
    modelDclCtx.importDcl().forEach(importDclContext -> {
        String subPath = cleanString(importDclContext.path);
        CharStream subStream = resolver.resolver(subPath);
        if (subStream == null) {
            throw new RuntimeException("Import not resolved " + subPath);
        }
        try {
            internal_parse(subStream, resolver);
        } catch (Exception e) {
            throw new RuntimeException("Parse Error while parsing " + subPath, e);
        }
    });
    // constants
    for (GreyCatModelParser.ConstDclContext constDclCtx : modelDclCtx.constDcl()) {
        String const_name = constDclCtx.name.getText();
        Constant c = constants.get(const_name);
        if (c == null) {
            c = new Constant(const_name);
            constants.put(const_name, c);
        }
        c.setType(constDclCtx.type.getText());
        String value = null;
        if (constDclCtx.constValueDcl() != null) {
            if (constDclCtx.constValueDcl().simpleValueDcl() != null) {
                GreyCatModelParser.SimpleValueDclContext sVc = constDclCtx.constValueDcl().simpleValueDcl();
                if (sVc.STRING() != null) {
                    value = cleanString(sVc.STRING().getSymbol());
                } else {
                    value = constDclCtx.constValueDcl().simpleValueDcl().getText();
                }
            } else if (constDclCtx.constValueDcl().taskValueDcl() != null) {
                GreyCatModelParser.TaskValueDclContext taskDcl = constDclCtx.constValueDcl().taskValueDcl();
                value = taskDcl.getText();
            }
        }
        c.setValue(value);
    }
    // classes
    for (GreyCatModelParser.ClassDclContext classDclCtx : modelDclCtx.classDcl()) {
        String classFqn = classDclCtx.name.getText();
        Class newClass = getOrAddClass(classFqn);
        // parents
        if (classDclCtx.parentDcl() != null) {
            final Class parentClass = getOrAddClass(classDclCtx.parentDcl().IDENT().getText());
            newClass.setParent(parentClass);
        }
        // annotations
        for (GreyCatModelParser.AnnotationDclContext annotationDcl : classDclCtx.annotationDcl()) {
            addAnnotation(newClass, annotationDcl);
        }
        // attributes
        for (GreyCatModelParser.AttributeDclContext attDcl : classDclCtx.attributeDcl()) {
            addAttribute(newClass, attDcl);
        }
        // relations
        for (GreyCatModelParser.RelationDclContext relDclCtx : classDclCtx.relationDcl()) {
            addRelation(newClass, relDclCtx);
        }
        // references
        for (GreyCatModelParser.ReferenceDclContext refDclCtx : classDclCtx.referenceDcl()) {
            addReference(newClass, refDclCtx);
        }
        // local indexes
        for (GreyCatModelParser.LocalIndexDclContext localIndexDclCtx : classDclCtx.localIndexDcl()) {
            addLocalIndex(newClass, localIndexDclCtx);
        }
        // local constants
        for (GreyCatModelParser.ConstDclContext constDclCtx : classDclCtx.constDcl()) {
            addLocalConstant(newClass, constDclCtx);
        }
    }

    // opposite management
    for (GreyCatModelParser.ClassDclContext classDclCtx : modelDclCtx.classDcl()) {
        String classFqn = classDclCtx.name.getText();
        Class classType = classes.get(classFqn);

        // relations
        for (GreyCatModelParser.RelationDclContext relDclCtx : classDclCtx.relationDcl()) {
            linkOppositeRelations(classType, relDclCtx);
        }
        // references
        for (GreyCatModelParser.ReferenceDclContext refDclCtx : classDclCtx.referenceDcl()) {
            linkOppositeReferences(classType, refDclCtx);
        }
        // local indexes
        for (GreyCatModelParser.LocalIndexDclContext idxDclCtx : classDclCtx.localIndexDcl()) {
            linkOppositeLocalIndexes(classType, idxDclCtx);
        }
    }

    // global indexes
    for (GreyCatModelParser.GlobalIndexDclContext globalIdxDclContext : modelDclCtx.globalIndexDcl()) {
        final String name = globalIdxDclContext.name.getText();
        final String type = globalIdxDclContext.type.getText();
        final Index index = getOrAddGlobalIndex(name, type);
        final Class indexedClass = getOrAddClass(index.type());
        for (TerminalNode idxDclIdent : globalIdxDclContext.indexAttributesDcl().IDENT()) {
            index.addAttributeRef(new AttributeRef(indexedClass.getOrCreateAttribute(idxDclIdent.getText())));
        }
    }

    // custom types
    for (GreyCatModelParser.CustomTypeDclContext customTypeDclCtx : modelDclCtx.customTypeDcl()) {
        String customTypeName = customTypeDclCtx.name.getText();
        final CustomType newCustomType = getOrAddCustomType(customTypeName);
        // parents
        if (customTypeDclCtx.parentDcl() != null) {
            final CustomType parentCustomType = getOrAddCustomType(
                    customTypeDclCtx.parentDcl().IDENT().getText());
            newCustomType.setParent(parentCustomType);
        }
        // attributes
        for (GreyCatModelParser.AttributeDclContext attDcl : customTypeDclCtx.attributeDcl()) {
            addAttribute(newCustomType, attDcl);
        }
        //            // relations
        //            for (GreyCatModelParser.RelationDclContext relDcl : customTypeDclCtx.relationDcl()) {
        //                addRelation(newCustomType, relDcl);
        //            }
        //            // references
        //            for (GreyCatModelParser.ReferenceDclContext refDcl : customTypeDclCtx.referenceDcl()) {
        //                addReference(newCustomType, refDcl);
        //            }
        //            // local indexes
        //            for (GreyCatModelParser.LocalIndexDclContext localIndexDcl : customTypeDclCtx.localIndexDcl()) {
        //                addLocalIndex(newCustomType, localIndexDcl);
        //            }
        // local constants
        for (GreyCatModelParser.ConstDclContext constDclCtx : customTypeDclCtx.constDcl()) {
            addLocalConstant(newCustomType, constDclCtx);
        }
        //            // opposite management
        //            for (GreyCatModelParser.CustomTypeDclContext typeDclCtx : modelDclCtx.customTypeDcl()) {
        //                String typeFqn = typeDclCtx.name.getText();
        //                CustomType customType = customTypes.get(typeFqn);
        //
        //                // relations
        //                for (GreyCatModelParser.RelationDclContext relDclCtx : typeDclCtx.relationDcl()) {
        //                    linkOppositeRelations(customType, relDclCtx);
        //                }
        //                // references

        //                for (GreyCatModelParser.ReferenceDclContext refDclCtx : typeDclCtx.referenceDcl()) {
        //                    linkOppositeReferences(customType, refDclCtx);
        //                }
        //                // local indexes
        //                for (GreyCatModelParser.LocalIndexDclContext idxDclCtx : typeDclCtx.localIndexDcl()) {
        //                    linkOppositeLocalIndexes(customType, idxDclCtx);
        //                }
        //            }

    }
}

From source file:groovy.ui.text.SmartDocumentFilter.java

License:Apache License

private void parseDocument() throws BadLocationException {
    GroovyLangLexer lexer;//from   w ww .  j a v  a 2 s .c  om
    try {
        lexer = createLexer(styledDocument.getText(0, styledDocument.getLength()));
    } catch (IOException e) {
        e.printStackTrace();
        return;
    }

    CommonTokenStream tokenStream = new CommonTokenStream(lexer);

    try {
        tokenStream.fill();
    } catch (LexerNoViableAltException | GroovySyntaxError e) {
        // ignore
        return;
    } catch (Exception e) {
        e.printStackTrace();
        return;
    }

    List<Token> tokenList = tokenStream.getTokens();
    List<Token> tokenListToRender = findTokensToRender(tokenList);

    for (Token token : tokenListToRender) {
        int tokenType = token.getType();

        //                if (token instanceof CommonToken) {
        //                    System.out.println(((CommonToken) token).toString(lexer));
        //                }

        if (EOF == tokenType) {
            continue;
        }

        int tokenStartIndex = token.getStartIndex();
        int tokenStopIndex = token.getStopIndex();
        int tokenLength = tokenStopIndex - tokenStartIndex + 1;

        styledDocument.setCharacterAttributes(tokenStartIndex, tokenLength, findStyleByTokenType(tokenType),
                true);

        if (GStringBegin == tokenType || GStringPart == tokenType) {
            styledDocument.setCharacterAttributes(tokenStartIndex + tokenLength - 1, 1, defaultStyle, true);
        }
    }

    this.latestTokenList = tokenList;
}

From source file:gsp.GSPMain.java

/**
 * @param args the command line arguments
 * @throws java.io.IOException//from   w w  w .j  a v a 2 s.  c  om
 */
public static void main(String[] args) throws IOException {
    // TODO code application logic here

    CharStream cs = CharStreams.fromFileName("inputBlocks.txt");

    gspGrammarLexer lexer = new gspGrammarLexer(cs);
    //lexer.removeErrorListeners();

    CommonTokenStream tokens = new CommonTokenStream(lexer);
    gspGrammarParser parser = new gspGrammarParser(tokens);
    gspGrammarParser.ProgramContext contexto = parser.program();

    ParseTree tree = contexto;

    int errorsCount = parser.getNumberOfSyntaxErrors();
    System.out.println(errorsCount);
    if (errorsCount == 0) {
        ParserGSP gsp = new ParserGSP();
        gsp.visit(tree);

        System.out.println(gsp.getPredicates());
        System.out.println("Actions");
        for (Action a : gsp.getActions()) {
            System.out.println("----------");
            System.out.println(a);
            System.out.println("----------");
        }

        GSP algorithim = new GSP(gsp.getInitialState(), gsp.getGoalState(), gsp.getActions());
    }
}

From source file:gui.Interfaz.java

private void Correr(String texto) {
    CharStream input = new ANTLRInputStream(texto);
    SqlLexer lexer = new SqlLexer(input);
    CommonTokenStream tokens = new CommonTokenStream(lexer);
    SqlParser parser = new SqlParser(tokens);

    //parser.removeErrorListeners();
    //ContextListener errorListener = new ContextListener();
    //parser.addErrorListener(errorListener);

    ParseTree tree = parser.program();/*from   w w w.  j ava2s  .c  o m*/
    SqlParser.ProgramContext arbol = parser.program();

    System.out.println(tree.toStringTree(parser));
    System.out.println(arbol.toStringTree(parser));
    recorrido.visitProgram(arbol);

    //String t =arbol.toStringTree(parser);
    //System.out.print(t);

    //show AST in GUI
    JFrame frame = new JFrame("Antlr AST");
    JPanel panel = new JPanel();
    TreeViewer viewr = new TreeViewer(Arrays.asList(parser.getRuleNames()), tree);
    viewr.setScale(1.5);//scale a little
    panel.add(viewr);
    frame.add(panel);
    //frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
    frame.setSize(250, 300);
    frame.setVisible(true);

    //String err = errorListener.getError();
    //this.salidaError.setText(err);
}

From source file:hydrograph.engine.expression.api.ValidationAPI.java

License:Apache License

private ExpressionEditorParser.BlockContext generateAntlrTree() {
    ANTLRInputStream stream = new ANTLRInputStream(expr);
    ExpressionEditorLexer lexer = new ExpressionEditorLexer(stream);
    CommonTokenStream tokenStream = new CommonTokenStream(lexer);
    ExpressionEditorParser parser = new ExpressionEditorParser(tokenStream);
    parser.removeErrorListeners();//from w w w. j  a va  2 s. c om
    parser.addErrorListener(DescriptiveErrorListener.INSTANCE);
    lexer.removeErrorListeners();
    lexer.addErrorListener(DescriptiveErrorListener.INSTANCE);
    return parser.block();
}

From source file:hydrograph.server.service.HydrographService.java

License:Apache License

private void start() {
    int portNumber = Constants.DEFAULT_PORT_NUMBER;
    try {/*from  w  ww  . j a  v  a  2 s  .  c  o  m*/
        portNumber = Integer
                .parseInt(ServiceUtilities.getServiceConfigResourceBundle().getString(Constants.PORT_ID));
        LOG.debug("Port number '" + portNumber + "' fetched from properties file");
    } catch (Exception e) {
        LOG.error("Error fetching port number. Defaulting to " + Constants.DEFAULT_PORT_NUMBER, e);
    }

    /**
     * Setting Port number to the server
     */
    Spark.port(portNumber);

    /**
     * Creating Websocket on Server for Execution tracking service.
     */
    webSocket("/executionTracking", ExecutionTrackingWebsocketHandler.class);

    Spark.post("/getConnectionStatus", new Route() {
        @SuppressWarnings({ "unchecked", "rawtypes", "unused" })
        @Override
        public Object handle(Request request, Response response)
                throws InstantiationException, IllegalAccessException, ClassNotFoundException, JSONException {
            LOG.info("************************getConnectionStatus endpoint - started************************");
            LOG.info("+++ Start: " + new Timestamp((new Date()).getTime()));
            ObjectMapper objectMapper = new ObjectMapper();
            String requestParameters = request.queryParams(Constants.REQUEST_PARAMETERS), dbClassName = null,
                    objectAsString = null;
            JSONObject requestParameterValues = new JSONObject(requestParameters);
            Map metadataProperties = extractingJsonObjects(requestParameterValues);
            String dbTypeToTest = metadataProperties
                    .getOrDefault(Constants.dbType,
                            new ParamsCannotBeNullOrEmpty(Constants.dbType + " Cannot be null or empty"))
                    .toString();
            switch (dbTypeToTest.toLowerCase()) {
            case Constants.ORACLE:
                try {
                    if (ServiceUtilities.getConnectionStatus(metadataProperties,
                            Constants.ORACLE_JDBC_CLASSNAME, Constants.QUERY_TO_TEST)) {
                        LOG.trace("Connection Successful");
                        objectAsString = objectMapper
                                .writeValueAsString("Connection To Oracle database is Successful");
                    } else {
                        LOG.trace("Connection UnSuccessful");
                        objectAsString = objectMapper
                                .writeValueAsString("Connection To Oracle database UnSuccessful");
                    }
                } catch (Exception e) {
                    LOG.error("Connection fails with exception : " + e);
                    objectAsString = e.getLocalizedMessage();
                }
                break;
            case Constants.MYSQL:
                try {
                    if (ServiceUtilities.getConnectionStatus(metadataProperties, Constants.MYSQL_JDBC_CLASSNAME,
                            Constants.QUERY_TO_TEST)) {
                        LOG.trace("Connection Successful");
                        objectAsString = objectMapper
                                .writeValueAsString("Connection To MySQL database is Successful");
                    } else {
                        LOG.trace("Connection UnSuccessful");
                        objectAsString = objectMapper
                                .writeValueAsString("Connection To MySQL database UnSuccessful");
                    }
                } catch (Exception e) {
                    LOG.error("Connection fails with exception : " + e);
                    objectAsString = e.getLocalizedMessage();
                }
                break;

            case Constants.REDSHIFT:
                try {
                    if (ServiceUtilities.getConnectionStatus(metadataProperties,
                            Constants.REDSHIFT_JDBC_CLASSNAME, Constants.DEFAULT_REDRESHIFT_QUERY_TO_TEST)) {
                        LOG.trace("Connection Successful");
                        objectAsString = objectMapper
                                .writeValueAsString("Connection To Redshift database is Successful");
                    } else {
                        LOG.trace("Connection UnSuccessful");
                        objectAsString = objectMapper
                                .writeValueAsString("Connection To Redshift database UnSuccessful");
                    }
                } catch (Exception e) {
                    LOG.error("Connection fails with exception : " + e);
                    objectAsString = e.getLocalizedMessage();
                }
                break;
            case Constants.TERADATA:
                try {
                    if (ServiceUtilities.getConnectionStatus(metadataProperties,
                            Constants.TERADATA_JDBC_CLASSNAME, Constants.QUERY_TO_TEST_TERADATA)) {
                        LOG.trace("Connection Successful");
                        objectAsString = objectMapper
                                .writeValueAsString("Connection To Teradata database is Successful");
                    } else {
                        LOG.trace("Connection UnSuccessful");
                        objectAsString = objectMapper
                                .writeValueAsString("Connection To Teradata database UnSuccessful");
                    }
                } catch (Exception e) {
                    LOG.error("Connection fails with exception : " + e);
                    objectAsString = e.getLocalizedMessage();
                }
                break;
            }
            return objectAsString;
        }

        @SuppressWarnings({ "unchecked", "rawtypes" })
        private Map extractingJsonObjects(JSONObject requestParameterValues) throws JSONException {

            String dbType = null, userId = null, password = null, host = null, port = null, sid = null,
                    driverType = null, query = null, tableName = null, database = null;
            Map metadataProperties = new HashMap();
            if (!requestParameterValues.isNull(Constants.dbType)) {
                dbType = requestParameterValues.getString(Constants.dbType);
                metadataProperties.put(Constants.dbType, dbType);
            }
            if (!requestParameterValues.isNull(Constants.USERNAME)) {
                userId = requestParameterValues.getString(Constants.USERNAME);
                metadataProperties.put(Constants.USERNAME, userId);
            }
            if (!requestParameterValues.isNull(Constants.SERVICE_PWD)) {
                password = requestParameterValues.getString(Constants.SERVICE_PWD);
                metadataProperties.put(Constants.SERVICE_PWD, password);
            }
            if (!requestParameterValues.isNull(Constants.HOST_NAME)) {
                host = requestParameterValues.getString(Constants.HOST_NAME);
                metadataProperties.put(Constants.HOST_NAME, host);
            }
            if (!requestParameterValues.isNull(Constants.PORT_NUMBER)) {
                port = requestParameterValues.getString(Constants.PORT_NUMBER);
                metadataProperties.put(Constants.PORT_NUMBER, port);
            } else {
                if (metadataProperties.get(Constants.dbType).toString().equalsIgnoreCase("mysql")) {
                    port = Constants.MYSQL_DEFAULT_PORT;
                    metadataProperties.put(Constants.PORT_NUMBER, port);

                } else if (metadataProperties.get(Constants.dbType).toString().equalsIgnoreCase("oracle")) {
                    port = Constants.ORACLE_DEFAULT_PORT;
                    metadataProperties.put(Constants.PORT_NUMBER, port);
                }
                LOG.info("Connecting " + dbType + " port is not provided using default port : " + port);
            }
            if (!requestParameterValues.isNull(Constants.SID)) {
                sid = requestParameterValues.getString(Constants.SID);
                metadataProperties.put(Constants.SID, sid);
            }
            if (!requestParameterValues.isNull(Constants.DRIVER_TYPE)) {
                driverType = requestParameterValues.getString(Constants.DRIVER_TYPE);
                metadataProperties.put(Constants.DRIVER_TYPE, driverType);
            }
            if (!requestParameterValues.isNull(Constants.QUERY)) {
                query = requestParameterValues.getString(Constants.QUERY);
                metadataProperties.put(Constants.QUERY, query);
            }
            if (!requestParameterValues.isNull(Constants.TABLENAME)) {
                tableName = requestParameterValues.getString(Constants.TABLENAME);
                metadataProperties.put(Constants.TABLENAME, tableName);
            }
            if (!requestParameterValues.isNull(Constants.DATABASE_NAME)) {
                database = requestParameterValues.getString(Constants.DATABASE_NAME);
                metadataProperties.put(Constants.DATABASE_NAME, database);
            }

            LOG.info("Fetched request parameters are: " + Constants.dbType + " => " + dbType + " "
                    + Constants.USERNAME + " => " + userId + " " + Constants.HOST_NAME + " => " + host + " "
                    + Constants.PORT_NUMBER + " => " + port + " " + Constants.SID + " => " + sid + " "
                    + Constants.DRIVER_TYPE + " => " + driverType + " " + Constants.QUERY + " => " + query + " "
                    + Constants.TABLENAME + " => " + tableName + " " + Constants.DATABASE_NAME + " => "
                    + database + " ");
            return metadataProperties;
        }
    });

    Spark.post("readFromMetastore", new Route() {

        @Override
        public Object handle(Request request, Response response)
                throws ParamsCannotBeNullOrEmpty, ClassNotFoundException, IllegalAccessException, JSONException,
                JsonProcessingException, TableOrQueryParamNotFound, SQLException, InstantiationException {
            LOG.info("************************readFromMetastore endpoint - started************************");
            LOG.info("+++ Start: " + new Timestamp((new Date()).getTime()));
            ObjectMapper objectMapper = new ObjectMapper();
            String requestParameters = request.queryParams(Constants.REQUEST_PARAMETERS), objectAsString = null,
                    dbClassName = null;
            JSONObject requestParameterValues = new JSONObject(requestParameters);
            // Method to extracting request parameter details from input
            // json.
            Map metadataProperties = extractingJsonObjects(requestParameterValues);

            String dbType = metadataProperties
                    .getOrDefault(Constants.dbType,
                            new ParamsCannotBeNullOrEmpty(Constants.dbType + " Cannot be null or empty"))
                    .toString();
            LOG.info("Retrieving schema for " + dbType + " Database.");
            try {
                switch (dbType.toLowerCase()) {
                case Constants.ORACLE:
                    dbClassName = Constants.oracle;
                    OracleMetadataStrategy oracleMetadataHelper = (OracleMetadataStrategy) Class
                            .forName(dbClassName).newInstance();
                    oracleMetadataHelper.setConnection(metadataProperties);
                    objectAsString = objectMapper
                            .writeValueAsString(oracleMetadataHelper.fillComponentSchema(metadataProperties));
                    LOG.trace("Schema json for oracle : " + objectAsString);
                    LOG.info("+++ Stop: " + new Timestamp((new Date()).getTime()));
                    break;
                case Constants.HIVE:
                    dbClassName = Constants.hive;
                    HiveMetadataStrategy hiveMetadataHelper = (HiveMetadataStrategy) Class.forName(dbClassName)
                            .newInstance();
                    hiveMetadataHelper.setConnection(metadataProperties);
                    objectAsString = objectMapper
                            .writeValueAsString(hiveMetadataHelper.fillComponentSchema(metadataProperties));
                    LOG.trace("Schema json for hive : " + objectAsString);
                    LOG.info("+++ Stop: " + new Timestamp((new Date()).getTime()));
                    break;
                case Constants.REDSHIFT:
                    dbClassName = Constants.redshift;
                    RedshiftMetadataStrategy redShiftMetadataHelper = (RedshiftMetadataStrategy) Class
                            .forName(dbClassName).newInstance();
                    redShiftMetadataHelper.setConnection(metadataProperties);
                    objectAsString = objectMapper
                            .writeValueAsString(redShiftMetadataHelper.fillComponentSchema(metadataProperties));
                    LOG.trace("Schema json for redshift : " + objectAsString);
                    LOG.info("+++ Stop: " + new Timestamp((new Date()).getTime()));
                    break;
                case Constants.MYSQL:
                    dbClassName = Constants.mysql;
                    MysqlMetadataStrategy mysqlMetadataHelper = (MysqlMetadataStrategy) Class
                            .forName(dbClassName).newInstance();
                    mysqlMetadataHelper.setConnection(metadataProperties);
                    objectAsString = objectMapper
                            .writeValueAsString(mysqlMetadataHelper.fillComponentSchema(metadataProperties));
                    LOG.trace("Schema json for mysql : " + objectAsString);
                    LOG.info("+++ Stop: " + new Timestamp((new Date()).getTime()));
                    break;
                case Constants.TERADATA:
                    dbClassName = Constants.teradata;
                    TeradataMetadataStrategy teradataMetadataHelper = (TeradataMetadataStrategy) Class
                            .forName(dbClassName).newInstance();
                    teradataMetadataHelper.setConnection(metadataProperties);
                    objectAsString = objectMapper
                            .writeValueAsString(teradataMetadataHelper.fillComponentSchema(metadataProperties));
                    LOG.trace("Schema json for teradata : " + objectAsString);
                    LOG.info("+++ Stop: " + new Timestamp((new Date()).getTime()));
                    break;
                }
            } catch (Exception e) {
                LOG.error("Metadata read for database  '" + dbType + "' not completed.");
                LOG.error("Exception : " + e);
                response.status(400);
                return "Metadata read for database '" + dbType + "' not completed.";
            }
            LOG.info("Class Name used for " + dbType + " Is : " + dbClassName);
            LOG.debug("Json for " + dbType + " : " + objectAsString);
            return objectAsString;
        }

        @SuppressWarnings({ "unchecked", "rawtypes" })
        private Map extractingJsonObjects(JSONObject requestParameterValues) throws JSONException {

            String dbType = null, userId = null, password = null, host = null, port = null, sid = null,
                    driverType = null, query = null, tableName = null, database = null;
            Map metadataProperties = new HashMap();
            if (!requestParameterValues.isNull(Constants.dbType)) {
                dbType = requestParameterValues.getString(Constants.dbType);
                metadataProperties.put(Constants.dbType, dbType);
            }
            if (!requestParameterValues.isNull(Constants.USERNAME)) {
                userId = requestParameterValues.getString(Constants.USERNAME);
                metadataProperties.put(Constants.USERNAME, userId);
            }
            if (!requestParameterValues.isNull(Constants.SERVICE_PWD)) {
                password = requestParameterValues.getString(Constants.SERVICE_PWD);
                metadataProperties.put(Constants.SERVICE_PWD, password);
            }
            if (!requestParameterValues.isNull(Constants.HOST_NAME)) {
                host = requestParameterValues.getString(Constants.HOST_NAME);
                metadataProperties.put(Constants.HOST_NAME, host);
            }
            if (!requestParameterValues.isNull(Constants.PORT_NUMBER)) {
                port = requestParameterValues.getString(Constants.PORT_NUMBER);
                metadataProperties.put(Constants.PORT_NUMBER, port);
            } else {
                if (metadataProperties.get(Constants.dbType).toString().equalsIgnoreCase("mysql")) {
                    port = Constants.MYSQL_DEFAULT_PORT;
                    metadataProperties.put(Constants.PORT_NUMBER, port);

                } else if (metadataProperties.get(Constants.dbType).toString().equalsIgnoreCase("oracle")) {
                    port = Constants.ORACLE_DEFAULT_PORT;
                    metadataProperties.put(Constants.PORT_NUMBER, port);
                }
                LOG.info("Connecting " + dbType + " port is not provided using default port : " + port);
            }
            if (!requestParameterValues.isNull(Constants.SID)) {
                sid = requestParameterValues.getString(Constants.SID);
                metadataProperties.put(Constants.SID, sid);
            }
            if (!requestParameterValues.isNull(Constants.DRIVER_TYPE)) {
                driverType = requestParameterValues.getString(Constants.DRIVER_TYPE);
                metadataProperties.put(Constants.DRIVER_TYPE, driverType);
            }
            if (!requestParameterValues.isNull(Constants.QUERY)) {
                query = requestParameterValues.getString(Constants.QUERY);
                metadataProperties.put(Constants.QUERY, query);
            }
            if (!requestParameterValues.isNull(Constants.TABLENAME)) {
                tableName = requestParameterValues.getString(Constants.TABLENAME);
                metadataProperties.put(Constants.TABLENAME, tableName);
            }
            if (!requestParameterValues.isNull(Constants.DATABASE_NAME)) {
                database = requestParameterValues.getString(Constants.DATABASE_NAME);
                metadataProperties.put(Constants.DATABASE_NAME, database);
            }

            LOG.info("Fetched request parameters are: " + Constants.dbType + " => " + dbType + " "
                    + Constants.USERNAME + " => " + userId + " " + Constants.HOST_NAME + " => " + host + " "
                    + Constants.PORT_NUMBER + " => " + port + " " + Constants.SID + " => " + sid + " "
                    + Constants.DRIVER_TYPE + " => " + driverType + " " + Constants.QUERY + " => " + query + " "
                    + Constants.TABLENAME + " => " + tableName + " " + Constants.DATABASE_NAME + " => "
                    + database + " ");
            return metadataProperties;
        }
    });

    Spark.post("/read", new Route() {
        @Override
        public Object handle(Request request, Response response) {
            LOG.info("************************read endpoint - started************************");
            LOG.info("+++ Start: " + new Timestamp((new Date()).getTime()));
            String jobId = request.queryParams(Constants.JOB_ID);
            String componentId = request.queryParams(Constants.COMPONENT_ID);
            String socketId = request.queryParams(Constants.SOCKET_ID);
            String basePath = request.queryParams(Constants.BASE_PATH);

            // String host = request.queryParams(Constants.HOST);
            String userID = request.queryParams(Constants.USER_ID);
            String password = request.queryParams(Constants.SERVICE_PWD);

            double sizeOfData = Double.parseDouble(request.queryParams(Constants.FILE_SIZE)) * 1024 * 1024;
            LOG.info("Base Path: {}, Job Id: {}, Component Id: {}, Socket ID: {}, User ID:{}, DataSize:{}",
                    basePath, jobId, componentId, socketId, userID, sizeOfData);

            String batchID = jobId + "_" + componentId + "_" + socketId;
            String tempLocationPath = ServiceUtilities.getServiceConfigResourceBundle()
                    .getString(Constants.TEMP_LOCATION_PATH);
            String filePath = tempLocationPath + "/" + batchID + ".csv";
            try {
                readFileFromHDFS(basePath + "/debug/" + jobId + "/" + componentId + "_" + socketId, sizeOfData,
                        filePath, userID, password);
                LOG.info("+++ Stop: " + new Timestamp((new Date()).getTime()));
            } catch (Exception e) {
                LOG.error("Error in reading debug files", e);
                return "error";
            }
            return filePath;
        }

        /**
         * This method will read the HDFS file, fetch the records from it
         * and write its records to a local file on edge node with size <=
         * {@code sizeOfData} passed in parameter.
         *
         * @param hdfsFilePath   path of HDFS file from where records to be read
         * @param sizeOfData     defines the size of data (in bytes) to be read from
         *                       HDFS file
         * @param remoteFileName after reading the data of {@code sizeOfData} bytes
         *                       from HDFS file, it will be written to local file on
         *                       edge node with file name {@code remoteFileName}
         * @param userId
         * @param password
         */
        private void readFileFromHDFS(String hdfsFilePath, double sizeOfData, String remoteFileName,
                String userId, String password) {
            try {
                Path path = new Path(hdfsFilePath);
                LOG.debug("Reading Debug file:" + hdfsFilePath);
                Configuration conf = new Configuration();

                // load hdfs-site.xml and core-site.xml
                String hdfsConfigPath = ServiceUtilities.getServiceConfigResourceBundle()
                        .getString(Constants.HDFS_SITE_CONFIG_PATH);
                String coreSiteConfigPath = ServiceUtilities.getServiceConfigResourceBundle()
                        .getString(Constants.CORE_SITE_CONFIG_PATH);
                LOG.debug("Loading hdfs-site.xml:" + hdfsConfigPath);
                conf.addResource(new Path(hdfsConfigPath));
                LOG.debug("Loading hdfs-site.xml:" + coreSiteConfigPath);
                conf.addResource(new Path(coreSiteConfigPath));

                KerberosUtilities kerberosUtilities = new KerberosUtilities();
                // apply kerberos token
                kerberosUtilities.applyKerberosToken(userId, password, conf);

                listAndWriteFiles(remoteFileName, path, conf, sizeOfData);
            } catch (Exception e) {
                throw new RuntimeException(e);
            }
        }

        /**
         * This method will list all files for {@code path}, read all files
         * and writes its data to a local file on edge node with size <=
         * {@code sizeOfData} passed in parameter.
         *
         * @param remoteFileName
         * @param path
         * @param conf
         * @param sizeOfData
         * @throws IOException
         */
        private void listAndWriteFiles(String remoteFileName, Path path, Configuration conf, double sizeOfData)
                throws IOException {
            FileSystem fs = FileSystem.get(conf);
            FileStatus[] status = fs.listStatus(path);
            File remoteFile = new File(remoteFileName);

            OutputStream os = new FileOutputStream(remoteFileName);
            try {

                int numOfBytes = 0;
                for (int i = 0; i < status.length; i++) {
                    BufferedReader br = new BufferedReader(new InputStreamReader(fs.open(status[i].getPath())));
                    String line = "";
                    line = br.readLine();
                    if (line != null) {
                        // header will only get fetch from first part file
                        // and it
                        // will skip header from remaining files
                        if (numOfBytes == 0) {
                            os.write((line + "\n").toString().getBytes());
                            numOfBytes += line.toString().length();
                        }
                        while ((line = br.readLine()) != null) {
                            numOfBytes += line.toString().length();
                            // line = br.readLine();
                            if (numOfBytes <= sizeOfData) {
                                os.write((line + "\n").toString().getBytes());
                            } else {
                                break;
                            }
                        }
                    }
                    br.close();
                    remoteFile.setReadable(true, false);
                }
            } catch (Exception e) {
                throw new RuntimeException(e);
            } finally {
                os.close();
                fs.close();
            }
        }

    });

    Spark.post("/delete", new Route() {
        @Override
        public Object handle(Request request, Response response) {
            LOG.info("************************delete endpoint - started************************");
            LOG.info("+++ Start: " + new Timestamp((new Date()).getTime()));
            response.type("text/json");
            String jobId = request.queryParams(Constants.JOB_ID);
            String basePath = request.queryParams(Constants.BASE_PATH);
            String componentId = request.queryParams(Constants.COMPONENT_ID);
            String socketId = request.queryParams(Constants.SOCKET_ID);
            String userID = request.queryParams(Constants.USER_ID);
            String password = request.queryParams(Constants.SERVICE_PWD);

            LOG.info("Base Path: {}, Job Id: {}, Component Id: {}, Socket ID: {}, User ID:{}", basePath, jobId,
                    componentId, socketId, userID);

            try {
                removeDebugFiles(basePath, jobId, componentId, socketId, userID, password);
                LOG.info("+++ Stop: " + new Timestamp((new Date()).getTime()));
            } catch (Exception e) {
                LOG.error("Error in deleting debug files", e);
            }
            return "error";
        }

        private void removeDebugFiles(String basePath, String jobId, String componentId, String socketId,
                String userID, String password) {
            try {
                // DebugFilesReader debugFilesReader = new
                // DebugFilesReader(basePath, jobId, componentId, socketId,
                // userID,
                // password);
                delete(basePath, jobId, componentId, socketId, userID, password);
            } catch (Exception e) {
                LOG.error("Error while deleting the debug file", e);
                throw new RuntimeException(e);
            }
        }

        /**
         * Deletes the jobId directory
         *
         * @param password
         * @param userID
         * @param socketId
         * @param componentId
         * @param jobId
         * @param basePath
         * @throws IOException
         */
        public void delete(String basePath, String jobId, String componentId, String socketId, String userID,
                String password) throws IOException {
            LOG.trace("Entering method delete()");
            String deletePath = basePath + "/debug/" + jobId;
            Configuration configuration = new Configuration();
            FileSystem fileSystem = FileSystem.get(configuration);
            Path deletingFilePath = new Path(deletePath);
            if (!fileSystem.exists(deletingFilePath)) {
                throw new PathNotFoundException(deletingFilePath.toString());
            } else {
                // Delete file
                fileSystem.delete(deletingFilePath, true);
                LOG.info("Deleted path : " + deletePath);
            }
            fileSystem.close();
        }
    });

    Spark.post("/deleteLocalDebugFile", new Route() {
        @Override
        public Object handle(Request request, Response response) {
            String error = "";
            LOG.info("+++ Start: " + new Timestamp((new Date()).getTime()));
            LOG.info("************************deleteLocalDebugFile endpoint - started************************");
            try {
                String jobId = request.queryParams(Constants.JOB_ID);
                String componentId = request.queryParams(Constants.COMPONENT_ID);
                String socketId = request.queryParams(Constants.SOCKET_ID);
                String batchID = jobId + "_" + componentId + "_" + socketId;
                String tempLocationPath = ServiceUtilities.getServiceConfigResourceBundle()
                        .getString(Constants.TEMP_LOCATION_PATH);

                LOG.info("Job Id: {}, Component Id: {}, Socket ID: {}, TemporaryPath: {}", jobId, componentId,
                        socketId, tempLocationPath);
                LOG.debug("File to be deleted: " + tempLocationPath + "/" + batchID + ".csv");
                File file = new File(tempLocationPath + "/" + batchID + ".csv");
                file.delete();
                LOG.trace("Local debug file deleted successfully.");
                return "Success";
            } catch (Exception e) {
                LOG.error("Error in deleting local debug file.", e);
                error = e.getMessage();
            }
            LOG.info("+++ Stop: " + new Timestamp((new Date()).getTime()));
            return "Local file delete failed. Error: " + error;
        }
    });

    // TODO : Keep this for test
    Spark.post("/post", new Route() {

        @Override
        public Object handle(Request request, Response response) {
            LOG.info("****TEST SPARK POST STARTED**********");
            response.type("text/json");
            return "calling post...";
        }
    });

    // TODO : Keep this for test
    Spark.get("/test", new Route() {

        @Override
        public Object handle(Request request, Response response) {
            LOG.info("****TEST SPARK GET STARTED**********");
            response.type("text/json");
            response.status(200);
            response.body("Test successful!");
            return "Test successful!";
        }
    });

    Spark.post("/filter", new Route() {
        @Override
        public Object handle(Request request, Response response) {

            LOG.info("************************filter - started************************");
            LOG.info("+++ Start: " + new Timestamp((new Date()).getTime()));

            Gson gson = new Gson();
            String json = request.queryParams(Constants.REQUEST_PARAMETERS);
            RemoteFilterJson remoteFilterJson = gson.fromJson(json, RemoteFilterJson.class);

            String jobId = remoteFilterJson.getJobDetails().getUniqueJobID();
            String componentId = remoteFilterJson.getJobDetails().getComponentID();
            String socketId = remoteFilterJson.getJobDetails().getComponentSocketID();
            String basePath = remoteFilterJson.getJobDetails().getBasepath();
            String username = remoteFilterJson.getJobDetails().getUsername();
            String password = remoteFilterJson.getJobDetails().getService_pwd();
            double outputFileSizeInMB = remoteFilterJson.getFileSize();
            double sizeOfDataInByte = outputFileSizeInMB * 1024 * 1024;

            String condition = parseSQLQueryToLingualQuery(remoteFilterJson);

            LOG.info("Base Path: {}, Job Id: {}, Component Id: {}, Socket ID: {}, User ID:{}, DataSize:{}",
                    basePath, jobId, componentId, socketId, username, sizeOfDataInByte);

            String batchID = jobId + "_" + componentId + "_" + socketId;

            String tempLocationPath = ServiceUtilities.getServiceConfigResourceBundle()
                    .getString(Constants.TEMP_LOCATION_PATH);

            String filePath = tempLocationPath + "/" + batchID + ".csv";
            String UUID = generateUUID();
            String uniqueId = batchID + "_" + UUID;
            String linugalMetaDataPath = basePath + "/filter/" + UUID;

            String fieldNames[] = getHeader(basePath + "/debug/" + jobId + "/" + componentId + "_" + socketId,
                    username, password);
            try {
                HashMap<String, Type> fieldNameAndDatatype = getFieldNameAndType(remoteFilterJson);
                Type[] fieldTypes = getFieldTypeFromMap(fieldNames, fieldNameAndDatatype);
                Configuration conf = getConfiguration(username, password);

                new LingualFilter().filterData(linugalMetaDataPath, uniqueId,
                        basePath + "/debug/" + jobId + "/" + componentId + "_" + socketId, sizeOfDataInByte,
                        filePath, condition, fieldNames, fieldTypes, conf);

                LOG.info("debug output path : " + filePath);
                LOG.info("+++ Stop: " + new Timestamp((new Date()).getTime()));
            } catch (Exception e) {
                LOG.error("Error in reading debug files", e);
                return "error";
            } finally {
                try {
                    System.gc();
                    deleteLingualResult(linugalMetaDataPath);
                } catch (Exception e) {
                    LOG.error("Error in deleting lingual result", e);
                    return "Error in deleting lingual result: " + e.getMessage();
                }
            }

            return filePath;
        }

        private Type[] getFieldTypeFromMap(String[] fieldNames, HashMap<String, Type> fieldNameAndDatatype) {
            Type[] type = new Type[fieldNameAndDatatype.size()];
            int i = 0;
            for (String eachFieldName : fieldNames) {
                type[i++] = fieldNameAndDatatype.get(eachFieldName);
            }
            return type;
        }

        private String[] getHeader(String path, String username, String password) {
            String[] header = readFile(path, username, password);
            return header;
        }

        private String[] readFile(String hdfsFilePath, String username, String password) {
            String[] header = null;
            try {
                Path path = new Path(hdfsFilePath);
                LOG.debug("Reading Debug file:" + hdfsFilePath);
                Configuration conf = getConfiguration(username, password);

                header = getHeaderArray(path, conf);
            } catch (Exception e) {
                throw new RuntimeException(e);
            }
            return header;
        }

        private Path filterOutSuccessFile(FileStatus[] fileStatus) {
            for (FileStatus status : fileStatus) {
                if (status.getPath().getName().toUpperCase().contains("_SUCCESS"))
                    continue;
                else
                    return status.getPath();
            }
            return null;
        }

        private String[] getHeaderArray(Path path, Configuration conf) throws IOException {
            FileSystem fs = FileSystem.get(conf);
            FileStatus[] status = fs.listStatus(path);
            String line = "";
            try {
                BufferedReader br = new BufferedReader(
                        new InputStreamReader(fs.open(filterOutSuccessFile(status))));

                line = br.readLine();
                br.close();

            } catch (Exception e) {
                throw new RuntimeException(e);
            } finally {
                fs.close();
            }
            return line.split(",");
        }

        private Configuration getConfiguration(String userId, String password)
                throws LoginException, IOException {
            Configuration conf = new Configuration();

            // load hdfs-site.xml and core-site.xml
            String hdfsConfigPath = ServiceUtilities.getServiceConfigResourceBundle()
                    .getString(Constants.HDFS_SITE_CONFIG_PATH);
            String coreSiteConfigPath = ServiceUtilities.getServiceConfigResourceBundle()
                    .getString(Constants.CORE_SITE_CONFIG_PATH);
            LOG.debug("Loading hdfs-site.xml:" + hdfsConfigPath);
            conf.addResource(new Path(hdfsConfigPath));
            LOG.debug("Loading hdfs-site.xml:" + coreSiteConfigPath);
            conf.addResource(new Path(coreSiteConfigPath));

            KerberosUtilities kerberosUtilities = new KerberosUtilities();
            // apply kerberos token
            kerberosUtilities.applyKerberosToken(userId, password, conf);
            return conf;
        }

        private void deleteLingualResult(String deletePath) throws IOException {
            Configuration configuration = new Configuration();
            FileSystem fileSystem = FileSystem.get(configuration);
            Path deletingFilePath = new Path(deletePath);

            if (!fileSystem.exists(deletingFilePath)) {
                throw new PathNotFoundException(deletingFilePath.toString());
            } else {
                boolean isDeleted = fileSystem.delete(deletingFilePath, true);
                if (isDeleted) {
                    fileSystem.deleteOnExit(deletingFilePath);
                }
                LOG.info("Deleted path : " + deletePath);
            }

            fileSystem.close();
        }

        private String generateUUID() {
            return String.valueOf(UUID.randomUUID());
        }

        private String parseSQLQueryToLingualQuery(RemoteFilterJson remoteFilterJson) {
            ANTLRInputStream stream = new ANTLRInputStream(remoteFilterJson.getCondition());
            QueryParserLexer lexer = new QueryParserLexer(stream);
            CommonTokenStream tokenStream = new CommonTokenStream(lexer);
            QueryParserParser parser = new QueryParserParser(tokenStream);
            parser.removeErrorListeners();
            LingualQueryCreator customVisitor = new LingualQueryCreator(remoteFilterJson.getSchema());
            String condition = customVisitor.visit(parser.eval());
            return condition;
        }

        private HashMap<String, Type> getFieldNameAndType(RemoteFilterJson remoteFilterJson)
                throws ClassNotFoundException {
            HashMap<String, Type> fieldDataTypeMap = new HashMap<>();
            Type type;
            for (int i = 0; i < remoteFilterJson.getSchema().size(); i++) {
                Class clazz = Class.forName(remoteFilterJson.getSchema().get(i).getDataTypeValue());
                if (clazz.getSimpleName().toString().equalsIgnoreCase("Date")) {
                    type = new SQLTimestampCoercibleType();
                } else {
                    type = clazz;
                }
                fieldDataTypeMap.put(remoteFilterJson.getSchema().get(i).getFieldName(), type);
            }
            return fieldDataTypeMap;
        }

    });
}

From source file:ic.doc.ipandora.parser.fol.AstGenerator.java

License:Open Source License

public static FirstOrderLogicFormula generateAst(String line) {
    ANTLRInputStream input = new ANTLRInputStream(line);
    FormulaLexer lexer = new FormulaLexer(input);
    CommonTokenStream tokens = new CommonTokenStream(lexer);

    FormulaParser parser = new FormulaParser(tokens);
    parser.setErrorHandler(new BailErrorStrategy());

    ParseTree tree;//from   w ww .  j ava2s .  c om

    try {
        tree = parser.prog();
    } catch (Exception e) {
        return null;
    }

    return new AstGenerator().visit(tree);
}

From source file:ic.doc.ipandora.parser.justification.JustificationGenerator.java

License:Open Source License

public static Justification generateJustification(MainProof proof, String justification) {
    ANTLRInputStream input = new ANTLRInputStream(justification);
    JustificationLexer lexer = new JustificationLexer(input);
    CommonTokenStream tokens = new CommonTokenStream(lexer);

    JustificationParser parser = new JustificationParser(tokens);
    parser.setErrorHandler(new BailErrorStrategy());

    ParseTree tree;//from www  .  ja  v a  2s.c  o  m

    try {
        tree = parser.prog();
    } catch (Exception e) {
        return null;
    }

    return new JustificationGenerator(proof).visit(tree);
}

From source file:illarion.easynpc.Parser.java

License:Open Source License

@Nonnull
private static ParsedNpc parseScript(@Nonnull CharStream stream) {
    EasyNpcLexer lexer = new EasyNpcLexer(stream);
    EasyNpcParser parser = new EasyNpcParser(new CommonTokenStream(lexer));

    ParsedNpcVisitor visitor = new ParsedNpcVisitor();
    lexer.removeErrorListeners();//  w w w.j  av  a  2s  . c o m
    lexer.addErrorListener(visitor);
    EasyNpcParser.ScriptContext context = parser.script();

    context.accept(visitor);

    return visitor.getParsedNpc();
}