Example usage for org.apache.spark.launcher SparkAppHandle getState

List of usage examples for org.apache.spark.launcher SparkAppHandle getState

Introduction

In this page you can find the example usage for org.apache.spark.launcher SparkAppHandle getState.

Prototype

State getState();

Source Link

Document

Returns the current application state.

Usage

From source file:org.flowable.decision.DecisionAnalysisService.java

License:Apache License

private void submitSparkAppsForTasks(ProcessDefinition processDefinition, Map<String, List<String>> outcomesMap,
        List<UserTask> matchingUserTasks, Map<String, Map<String, List<String>>> possibleValueCounts) {
    for (UserTask matchingUserTask : matchingUserTasks) {
        LOGGER.info("Submitting Spark ML app for task " + matchingUserTask.getId() + "...");
        try {/*from   w w  w  .j  a  va  2s  . com*/

            // Not so pretty: generating a long argument string to pass info to spark job. Should be handled with a persistent store really.

            /*
             * Format (separated by # character):
             * 
             * - processDefinitionId
             * - taskKey
             * - outcome variable
             * - outcome variable possibilities
             * - variable names
             * - variable possibilities
             */

            StringBuilder argumentBuilder = new StringBuilder();
            argumentBuilder.append(processDefinition.getId()).append("#") // process definition id
                    .append(matchingUserTask.getId()).append("#") // task key
                    .append("form_" + matchingUserTask.getFormKey() + "_outcome").append("#"); // outcome variable

            List<String> outcomes = outcomesMap.get(matchingUserTask.getId());
            for (int i = 0; i < outcomes.size(); i++) {
                argumentBuilder.append(outcomes.get(i)); // outcome variable output possibilities
                if (i != outcomes.size() - 1) {
                    argumentBuilder.append(";");
                }
            }
            argumentBuilder.append("#");

            Map<String, List<String>> variableToPotentialValues = possibleValueCounts
                    .get(matchingUserTask.getId());
            List<String> variableNames = new ArrayList<>(variableToPotentialValues.keySet());
            for (int i = 0; i < variableNames.size(); i++) {
                argumentBuilder.append(variableNames.get(i)); // variable names
                if (i != variableNames.size() - 1) {
                    argumentBuilder.append(";");
                }
            }
            argumentBuilder.append("#");
            for (int i = 0; i < variableNames.size(); i++) {
                List<String> possibleValues = variableToPotentialValues.get(variableNames.get(i));
                for (int j = 0; j < possibleValues.size(); j++) {
                    argumentBuilder.append(possibleValues.get(j)); // variable possibilities
                    if (j != possibleValues.size() - 1) {
                        argumentBuilder.append("&");
                    }
                }
                if (i != variableNames.size() - 1) {
                    argumentBuilder.append(";");
                }
            }

            LOGGER.info("Arguments for Spark app: " + argumentBuilder.toString());

            SparkAppHandle sparkAppHandle = new SparkLauncher().setSparkHome(System.getProperty("sparkHome"))
                    .setAppResource(System.getProperty("appResource"))
                    .setMainClass("org.flowable.AnalyseDecisions").setMaster("local[4]")
                    //                        .setVerbose(true)
                    .addAppArgs(argumentBuilder.toString()).redirectOutput(Redirect.INHERIT)
                    .startApplication(new SparkAppHandle.Listener() {

                        @Override
                        public void stateChanged(SparkAppHandle handle) {
                            LOGGER.info(handle.getState() + " new  state");
                        }

                        @Override
                        public void infoChanged(SparkAppHandle handle) {
                            LOGGER.info(handle.getState() + " new  state");
                        }
                    });

            // For demo: make sure the tasks are processed sequentially to not have the console output mixed for all tasks 
            while (!sparkAppHandle.getState().equals(State.FINISHED)
                    && !sparkAppHandle.getState().equals(State.FAILED)) {
                Thread.sleep(5000L);
            }

        } catch (IOException e) {
            LOGGER.error("Could not submit app to Spark", e);
        } catch (InterruptedException e) {
            e.printStackTrace();
        }

    }
}