Example usage for org.apache.spark SparkStageInfo numCompletedTasks

List of usage examples for org.apache.spark SparkStageInfo numCompletedTasks

Introduction

In this page you can find the example usage for org.apache.spark SparkStageInfo numCompletedTasks.

Prototype

int numCompletedTasks();

Source Link

Usage

From source file:cn.com.bsfit.frms.spark.StatusTrackerDemo.java

License:Apache License

public static void main(String[] args) throws Exception {
    SparkSession spark = SparkSession.builder().appName(APP_NAME).getOrCreate();

    final JavaSparkContext jsc = new JavaSparkContext(spark.sparkContext());

    // Example of implementing a progress reporter for a simple job.
    JavaRDD<Integer> rdd = jsc.parallelize(Arrays.asList(1, 2, 3, 4, 5), 5)
            .map(new IdentityWithDelay<Integer>());
    JavaFutureAction<List<Integer>> jobFuture = rdd.collectAsync();
    while (!jobFuture.isDone()) {
        Thread.sleep(1000); // 1 second
        List<Integer> jobIds = jobFuture.jobIds();
        if (jobIds.isEmpty()) {
            continue;
        }/*from  w  ww  . j a  va2  s.  c om*/
        int currentJobId = jobIds.get(jobIds.size() - 1);
        SparkJobInfo jobInfo = jsc.statusTracker().getJobInfo(currentJobId);
        SparkStageInfo stageInfo = jsc.statusTracker().getStageInfo(jobInfo.stageIds()[0]);
        System.out.println(stageInfo.numTasks() + " tasks total: " + stageInfo.numActiveTasks() + " active, "
                + stageInfo.numCompletedTasks() + " complete");
    }

    System.out.println("Job results are: " + jobFuture.get());
    jsc.close();
    spark.stop();
}

From source file:com.andado.spark.examples.JavaStatusTrackerDemo.java

License:Apache License

public static void main(String[] args) throws Exception {
    SparkSession spark = SparkSession.builder().appName(APP_NAME).getOrCreate();

    final JavaSparkContext jsc = new JavaSparkContext(spark.sparkContext());

    // Example of implementing a progress reporter for a simple job.
    JavaRDD<Integer> rdd = jsc.parallelize(Arrays.asList(1, 2, 3, 4, 5), 5)
            .map(new IdentityWithDelay<Integer>());
    JavaFutureAction<List<Integer>> jobFuture = rdd.collectAsync();
    while (!jobFuture.isDone()) {
        Thread.sleep(1000); // 1 second
        List<Integer> jobIds = jobFuture.jobIds();
        if (jobIds.isEmpty()) {
            continue;
        }//from   ww  w. j  a v a  2  s  . c  om
        int currentJobId = jobIds.get(jobIds.size() - 1);
        SparkJobInfo jobInfo = jsc.statusTracker().getJobInfo(currentJobId);
        SparkStageInfo stageInfo = jsc.statusTracker().getStageInfo(jobInfo.stageIds()[0]);
        System.out.println(stageInfo.numTasks() + " tasks total: " + stageInfo.numActiveTasks() + " active, "
                + stageInfo.numCompletedTasks() + " complete");
    }

    System.out.println("Job results are: " + jobFuture.get());
    spark.stop();
}

From source file:com.dmall.order.realtime.tacking.function.JavaStatusTrackerDemo.java

License:Apache License

public static void main(String[] args) throws Exception {
    SparkSession spark = SparkSession.builder().appName(APP_NAME).master("spark://192.168.184.128:7077")
            .getOrCreate();//  www  . ja  va  2s . c om

    final JavaSparkContext jsc = new JavaSparkContext(spark.sparkContext());
    jsc.addJar("D:\\learn\\java\\learn-spark\\target\\spark.jar");
    // Example of implementing a progress reporter for a simple job.
    JavaRDD<Integer> rdd = jsc.parallelize(Arrays.asList(1, 2, 3, 4, 5), 5)
            .map(new IdentityWithDelay<Integer>());
    JavaFutureAction<List<Integer>> jobFuture = rdd.collectAsync();
    while (!jobFuture.isDone()) {
        Thread.sleep(1000); // 1 second
        List<Integer> jobIds = jobFuture.jobIds();
        if (jobIds.isEmpty()) {
            continue;
        }
        int currentJobId = jobIds.get(jobIds.size() - 1);
        SparkJobInfo jobInfo = jsc.statusTracker().getJobInfo(currentJobId);
        SparkStageInfo stageInfo = jsc.statusTracker().getStageInfo(jobInfo.stageIds()[0]);
        System.out.println(stageInfo.numTasks() + " tasks total: " + stageInfo.numActiveTasks() + " active, "
                + stageInfo.numCompletedTasks() + " complete");
    }

    System.out.println("Job results are: " + jobFuture.get());
    spark.stop();
}

From source file:com.hxr.bigdata.spark.example141.JavaStatusTrackerDemo.java

License:Apache License

public static void main(String[] args) throws Exception {
    SparkConf sparkConf = new SparkConf().setAppName(APP_NAME);
    final JavaSparkContext sc = new JavaSparkContext(sparkConf);

    // Example of implementing a progress reporter for a simple job.
    JavaRDD<Integer> rdd = sc.parallelize(Arrays.asList(1, 2, 3, 4, 5), 5)
            .map(new IdentityWithDelay<Integer>());
    JavaFutureAction<List<Integer>> jobFuture = rdd.collectAsync();
    while (!jobFuture.isDone()) {
        Thread.sleep(1000); // 1 second
        List<Integer> jobIds = jobFuture.jobIds();
        if (jobIds.isEmpty()) {
            continue;
        }//from  w w w  . ja  va2 s. c om
        int currentJobId = jobIds.get(jobIds.size() - 1);
        SparkJobInfo jobInfo = sc.statusTracker().getJobInfo(currentJobId);
        SparkStageInfo stageInfo = sc.statusTracker().getStageInfo(jobInfo.stageIds()[0]);
        System.out.println(stageInfo.numTasks() + " tasks total: " + stageInfo.numActiveTasks() + " active, "
                + stageInfo.numCompletedTasks() + " complete");
    }

    System.out.println("Job results are: " + jobFuture.get());
    sc.stop();
}

From source file:gtl.spark.java.example.apache.JavaStatusTrackerDemo.java

License:Apache License

public static void main(String[] args) throws Exception {
    SparkSession spark = SparkSession.builder().appName(APP_NAME).getOrCreate();

    JavaSparkContext jsc = new JavaSparkContext(spark.sparkContext());

    // Example of implementing a progress reporter for a simple job.
    JavaRDD<Integer> rdd = jsc.parallelize(Arrays.asList(1, 2, 3, 4, 5), 5).map(new IdentityWithDelay<>());
    JavaFutureAction<List<Integer>> jobFuture = rdd.collectAsync();
    while (!jobFuture.isDone()) {
        Thread.sleep(1000); // 1 second
        List<Integer> jobIds = jobFuture.jobIds();
        if (jobIds.isEmpty()) {
            continue;
        }//w  w w .j a  v a  2  s  . co m
        int currentJobId = jobIds.get(jobIds.size() - 1);
        SparkJobInfo jobInfo = jsc.statusTracker().getJobInfo(currentJobId);
        SparkStageInfo stageInfo = jsc.statusTracker().getStageInfo(jobInfo.stageIds()[0]);
        System.out.println(stageInfo.numTasks() + " tasks total: " + stageInfo.numActiveTasks() + " active, "
                + stageInfo.numCompletedTasks() + " complete");
    }

    System.out.println("Job results are: " + jobFuture.get());
    spark.stop();
}

From source file:org.apache.hadoop.hive.ql.exec.spark.status.impl.LocalSparkJobStatus.java

License:Apache License

@Override
public Map<String, SparkStageProgress> getSparkStageProgress() {
    Map<String, SparkStageProgress> stageProgresses = new HashMap<String, SparkStageProgress>();
    for (int stageId : getStageIds()) {
        SparkStageInfo sparkStageInfo = getStageInfo(stageId);
        if (sparkStageInfo != null) {
            int runningTaskCount = sparkStageInfo.numActiveTasks();
            int completedTaskCount = sparkStageInfo.numCompletedTasks();
            int failedTaskCount = sparkStageInfo.numFailedTasks();
            int totalTaskCount = sparkStageInfo.numTasks();
            SparkStageProgress sparkStageProgress = new SparkStageProgress(totalTaskCount, completedTaskCount,
                    runningTaskCount, failedTaskCount);
            stageProgresses.put(/*  w w w. j  av  a  2 s  . c om*/
                    String.valueOf(sparkStageInfo.stageId()) + "_" + sparkStageInfo.currentAttemptId(),
                    sparkStageProgress);
        }
    }
    return stageProgresses;
}

From source file:org.apache.hadoop.hive.ql.exec.spark.status.impl.RemoteSparkJobStatus.java

License:Apache License

@Override
public Map<String, SparkStageProgress> getSparkStageProgress() throws HiveException {
    Map<String, SparkStageProgress> stageProgresses = new HashMap<String, SparkStageProgress>();
    for (int stageId : getStageIds()) {
        SparkStageInfo sparkStageInfo = getSparkStageInfo(stageId);
        if (sparkStageInfo != null && sparkStageInfo.name() != null) {
            int runningTaskCount = sparkStageInfo.numActiveTasks();
            int completedTaskCount = sparkStageInfo.numCompletedTasks();
            int failedTaskCount = sparkStageInfo.numFailedTasks();
            int totalTaskCount = sparkStageInfo.numTasks();
            SparkStageProgress sparkStageProgress = new SparkStageProgress(totalTaskCount, completedTaskCount,
                    runningTaskCount, failedTaskCount);
            stageProgresses.put(/*www.ja  v  a2s  .  co m*/
                    String.valueOf(sparkStageInfo.stageId()) + "_" + sparkStageInfo.currentAttemptId(),
                    sparkStageProgress);
        }
    }
    return stageProgresses;
}