com.sina.dip.twill.HelloWorldMultipleRunnablesAnyOrder.java Source code

Java tutorial

Introduction

Here is the source code for com.sina.dip.twill.HelloWorldMultipleRunnablesAnyOrder.java

Source

/*
 * Licensed to the Apache Software Foundation (ASF) under one
 * or more contributor license agreements.  See the NOTICE file
 * distributed with this work for additional information
 * regarding copyright ownership.  The ASF licenses this file
 * to you under the Apache License, Version 2.0 (the
 * "License"); you may not use this file except in compliance
 * with the License.  You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */
package com.sina.dip.twill;

import java.io.PrintWriter;
import java.net.URL;
import java.util.List;
import java.util.concurrent.ExecutionException;

import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.twill.api.AbstractTwillRunnable;
import org.apache.twill.api.ClassAcceptor;
import org.apache.twill.api.ResourceSpecification;
import org.apache.twill.api.ResourceSpecification.SizeUnit;
import org.apache.twill.api.TwillApplication;
import org.apache.twill.api.TwillController;
import org.apache.twill.api.TwillRunnerService;
import org.apache.twill.api.TwillSpecification;
import org.apache.twill.api.logging.PrinterLogHandler;
import org.apache.twill.yarn.YarnTwillRunnerService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.google.common.base.Splitter;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.util.concurrent.Futures;

/**
 * Hello World example using twill-yarn to run a TwillApplication over YARN.
 */
public class HelloWorldMultipleRunnablesAnyOrder {

    public static final Logger LOG = LoggerFactory.getLogger(HelloWorldMultipleRunnablesAnyOrder.class);

    /**
     * Hello World runnable that is provided to TwillRunnerService to be run.
     */
    private static class HelloWorldRunnable extends AbstractTwillRunnable {

        @Override
        public void run() {
            LOG.info("hello1 world runnable.");
        }

        @Override
        public void stop() {
        }

    }

    /**
     * Hello World runnable2 that is provided to TwillRunnerService to be run.
     */
    private static class HelloWorldRunnable2 extends AbstractTwillRunnable {

        @Override
        public void run() {
            LOG.info("hello2 world runnable.");
        }

        @Override
        public void stop() {
        }

    }

    private static class HelloWorldApplication implements TwillApplication {

        @Override
        public TwillSpecification configure() {
            return TwillSpecification.Builder.with().setName("HelloWorldApplication").withRunnable()
                    .add("hello1", new HelloWorldRunnable(),
                            ResourceSpecification.Builder
                                    .with().setVirtualCores(1).setMemory(2, SizeUnit.GIGA).setInstances(2).build())
                    .noLocalFiles()
                    .add("hello2", new HelloWorldRunnable2(), ResourceSpecification.Builder.with()
                            .setVirtualCores(1).setMemory(1, SizeUnit.GIGA).setInstances(4).build())
                    .noLocalFiles().anyOrder().build();
        }

    }

    public static void main(String[] args) {
        String zkStr = "localhost:2181";

        YarnConfiguration yarnConfiguration = new YarnConfiguration();

        final TwillRunnerService twillRunner = new YarnTwillRunnerService(yarnConfiguration, zkStr);

        twillRunner.start();

        String yarnClasspath = yarnConfiguration.get(YarnConfiguration.YARN_APPLICATION_CLASSPATH,
                "/usr/lib/hadoop/*,/usr/lib/hadoop-0.20-mapreduce/*,/usr/lib/hadoop-hdfs/*,/usr/lib/hadoop-mapreduce/*,/usr/lib/hadoop-yarn/*");

        List<String> applicationClassPaths = Lists.newArrayList();

        Iterables.addAll(applicationClassPaths, Splitter.on(",").split(yarnClasspath));

        final TwillController controller = twillRunner.prepare(new HelloWorldApplication())
                .addLogHandler(new PrinterLogHandler(new PrintWriter(System.out, true)))
                .withApplicationClassPaths(applicationClassPaths)
                .withBundlerClassAcceptor(new HadoopClassExcluder()).start();

        Runtime.getRuntime().addShutdownHook(new Thread() {
            @Override
            public void run() {
                try {
                    Futures.getUnchecked(controller.terminate());
                } finally {
                    twillRunner.stop();
                }
            }
        });

        try {
            controller.awaitTerminated();
        } catch (ExecutionException e) {
            e.printStackTrace();
        }
    }

    static class HadoopClassExcluder extends ClassAcceptor {

        @Override
        public boolean accept(String className, URL classUrl, URL classPathUrl) {
            // exclude hadoop but not hbase package
            return !(className.startsWith("org.apache.hadoop") && !className.startsWith("org.apache.hadoop.hbase"));
        }

    }

}