Example usage for com.google.common.base Preconditions checkArgument

List of usage examples for com.google.common.base Preconditions checkArgument

Introduction

In this page you can find the example usage for com.google.common.base Preconditions checkArgument.

Prototype

public static void checkArgument(boolean expression, @Nullable Object errorMessage) 

Source Link

Document

Ensures the truth of an expression involving one or more parameters to the calling method.

Usage

From source file:org.apache.fluo.command.FluoInit.java

public static void main(String[] args) {

    InitOptions opts = InitOptions.parse(args);
    File applicationPropsFile = new File(opts.getAppPropsPath());
    Preconditions.checkArgument(applicationPropsFile.exists(), opts.getAppPropsPath() + " does not exist");

    FluoConfiguration config = CommandUtil.resolveFluoConfig();
    config.load(applicationPropsFile);//from  w  ww  . j av a2 s . c  om
    config.setApplicationName(opts.getApplicationName());
    opts.overrideFluoConfig(config);

    String propKey = opts.getRetrieveProperty();
    if (propKey != null && !propKey.isEmpty()) {
        if (config.containsKey(propKey)) {
            System.out.println(config.getString(propKey));
        }
        System.exit(0);
    }

    if (!config.hasRequiredAdminProps()) {
        System.err.println("Error - Required properties are not set in " + opts.getAppPropsPath());
        System.exit(-1);
    }
    try {
        config.validate();
    } catch (Exception e) {
        System.err.println("Error - Invalid configuration due to " + e.getMessage());
        System.exit(-1);
    }

    try (FluoAdminImpl admin = new FluoAdminImpl(config)) {

        if (admin.applicationRunning()) {
            System.err.println("Error - The Fluo '" + config.getApplicationName() + "' application"
                    + " is already running and must be stopped before running 'fluo init'. "
                    + " Aborted initialization.");
            System.exit(-1);
        }

        FluoAdmin.InitializationOptions initOpts = new FluoAdmin.InitializationOptions();

        if (opts.getUpdate()) {
            System.out.println("Updating configuration for the Fluo '" + config.getApplicationName()
                    + "' application in Zookeeper using " + opts.getAppPropsPath());
            admin.updateSharedConfig();
            System.out.println("Update is complete.");
            System.exit(0);
        }

        if (opts.getForce()) {
            initOpts.setClearZookeeper(true).setClearTable(true);
        } else {
            if (opts.getClearZookeeper()) {
                initOpts.setClearZookeeper(true);
            } else if (admin.zookeeperInitialized()) {
                System.out.print("A Fluo '" + config.getApplicationName()
                        + "' application is already initialized in Zookeeper at " + config.getAppZookeepers()
                        + " - Would you like to clear and reinitialize Zookeeper"
                        + " for this application (y/n)? ");
                if (readYes()) {
                    initOpts.setClearZookeeper(true);
                } else {
                    System.out.println("Aborted initialization.");
                    System.exit(-1);
                }
            }

            if (opts.getClearTable()) {
                initOpts.setClearTable(true);
            } else if (admin.accumuloTableExists()) {
                System.out.print("The Accumulo table '" + config.getAccumuloTable()
                        + "' already exists - Would you like to drop and recreate this table (y/n)? ");
                if (readYes()) {
                    initOpts.setClearTable(true);
                } else {
                    System.out.println("Aborted initialization.");
                    System.exit(-1);
                }
            }
        }

        System.out.println("Initializing Fluo '" + config.getApplicationName() + "' application using "
                + opts.getAppPropsPath());
        try {
            admin.initialize(initOpts);
        } catch (Exception e) {
            System.out.println("Initialization failed due to the following exception:");
            e.printStackTrace();
            System.exit(-1);
        }
        System.out.println("Initialization is complete.");
    }
}

From source file:org.apache.gobblin.kafka.tool.SimpleKafkaConsumer.java

public static void main(String[] args) throws IOException {
    Preconditions.checkArgument(args.length >= 1,
            "Usage: java " + SimpleKafkaConsumer.class.getName() + " <properties_file> <checkpoint_file>");
    String fileName = args[0];//  ww w . j  a  v a 2 s  .  c  o  m
    Properties props = new Properties();
    props.load(new FileInputStream(new File(fileName)));

    KafkaCheckpoint checkpoint = KafkaCheckpoint.emptyCheckpoint();
    File checkpointFile = null;
    if (args.length > 1) {
        try {
            checkpointFile = new File(args[1]);
            if (checkpointFile.exists()) {
                FileInputStream fis = null;
                try {
                    fis = new FileInputStream(checkpointFile);
                    checkpoint = KafkaCheckpoint.deserialize(fis);
                } finally {
                    if (fis != null)
                        fis.close();
                }
            } else {
                log.info("Checkpoint doesn't exist, we will start with an empty one and store it here.");
            }
        } catch (IOException e) {
            log.warn("Could not deserialize the previous checkpoint. Starting with empty", e);
            if (!checkpoint.isEmpty()) {
                checkpoint = KafkaCheckpoint.emptyCheckpoint();
            }
        }
    }

    final SimpleKafkaConsumer consumer = new SimpleKafkaConsumer(props, checkpoint);
    Runtime.getRuntime().addShutdownHook(new Thread() {
        @Override
        public void run() {
            log.info("Shutting down...");
            consumer.shutdown();
        }
    });
    consumer.printLoop(checkpoint, checkpointFile);
}

From source file:org.apache.hadoop.hbase.rest.RESTServer.java

/**
 * The main method for the HBase rest server.
 * @param args command-line arguments/*from   w w  w .j a  va 2 s . c  o m*/
 * @throws Exception exception
 */
public static void main(String[] args) throws Exception {
    Log LOG = LogFactory.getLog("RESTServer");

    VersionInfo.logVersion();
    FilterHolder authFilter = null;
    Configuration conf = HBaseConfiguration.create();
    Class<? extends ServletContainer> containerClass = ServletContainer.class;
    UserProvider userProvider = UserProvider.instantiate(conf);
    // login the server principal (if using secure Hadoop)
    if (userProvider.isHadoopSecurityEnabled() && userProvider.isHBaseSecurityEnabled()) {
        String machineName = Strings.domainNamePointerToHostName(DNS.getDefaultHost(
                conf.get(REST_DNS_INTERFACE, "default"), conf.get(REST_DNS_NAMESERVER, "default")));
        String keytabFilename = conf.get(REST_KEYTAB_FILE);
        Preconditions.checkArgument(keytabFilename != null && !keytabFilename.isEmpty(),
                REST_KEYTAB_FILE + " should be set if security is enabled");
        String principalConfig = conf.get(REST_KERBEROS_PRINCIPAL);
        Preconditions.checkArgument(principalConfig != null && !principalConfig.isEmpty(),
                REST_KERBEROS_PRINCIPAL + " should be set if security is enabled");
        userProvider.login(REST_KEYTAB_FILE, REST_KERBEROS_PRINCIPAL, machineName);
        if (conf.get(REST_AUTHENTICATION_TYPE) != null) {
            containerClass = RESTServletContainer.class;
            authFilter = new FilterHolder();
            authFilter.setClassName(AuthFilter.class.getName());
            authFilter.setName("AuthenticationFilter");
        }
    }

    UserGroupInformation realUser = userProvider.getCurrent().getUGI();
    RESTServlet servlet = RESTServlet.getInstance(conf, realUser);

    Options options = new Options();
    options.addOption("p", "port", true, "Port to bind to [default: 8080]");
    options.addOption("ro", "readonly", false,
            "Respond only to GET HTTP " + "method requests [default: false]");
    options.addOption(null, "infoport", true, "Port for web UI");

    CommandLine commandLine = null;
    try {
        commandLine = new PosixParser().parse(options, args);
    } catch (ParseException e) {
        LOG.error("Could not parse: ", e);
        printUsageAndExit(options, -1);
    }

    // check for user-defined port setting, if so override the conf
    if (commandLine != null && commandLine.hasOption("port")) {
        String val = commandLine.getOptionValue("port");
        servlet.getConfiguration().setInt("hbase.rest.port", Integer.valueOf(val));
        LOG.debug("port set to " + val);
    }

    // check if server should only process GET requests, if so override the conf
    if (commandLine != null && commandLine.hasOption("readonly")) {
        servlet.getConfiguration().setBoolean("hbase.rest.readonly", true);
        LOG.debug("readonly set to true");
    }

    // check for user-defined info server port setting, if so override the conf
    if (commandLine != null && commandLine.hasOption("infoport")) {
        String val = commandLine.getOptionValue("infoport");
        servlet.getConfiguration().setInt("hbase.rest.info.port", Integer.valueOf(val));
        LOG.debug("Web UI port set to " + val);
    }

    @SuppressWarnings("unchecked")
    List<String> remainingArgs = commandLine != null ? commandLine.getArgList() : new ArrayList<String>();
    if (remainingArgs.size() != 1) {
        printUsageAndExit(options, 1);
    }

    String command = remainingArgs.get(0);
    if ("start".equals(command)) {
        // continue and start container
    } else if ("stop".equals(command)) {
        System.exit(1);
    } else {
        printUsageAndExit(options, 1);
    }

    // set up the Jersey servlet container for Jetty
    ServletHolder sh = new ServletHolder(containerClass);
    sh.setInitParameter("com.sun.jersey.config.property.resourceConfigClass",
            ResourceConfig.class.getCanonicalName());
    sh.setInitParameter("com.sun.jersey.config.property.packages", "jetty");
    // The servlet holder below is instantiated to only handle the case
    // of the /status/cluster returning arrays of nodes (live/dead). Without
    // this servlet holder, the problem is that the node arrays in the response
    // are collapsed to single nodes. We want to be able to treat the
    // node lists as POJO in the response to /status/cluster servlet call,
    // but not change the behavior for any of the other servlets
    // Hence we don't use the servlet holder for all servlets / paths
    ServletHolder shPojoMap = new ServletHolder(containerClass);
    @SuppressWarnings("unchecked")
    Map<String, String> shInitMap = sh.getInitParameters();
    for (Entry<String, String> e : shInitMap.entrySet()) {
        shPojoMap.setInitParameter(e.getKey(), e.getValue());
    }
    shPojoMap.setInitParameter(JSONConfiguration.FEATURE_POJO_MAPPING, "true");

    // set up Jetty and run the embedded server

    Server server = new Server();

    Connector connector = new SelectChannelConnector();
    if (conf.getBoolean(REST_SSL_ENABLED, false)) {
        SslSelectChannelConnector sslConnector = new SslSelectChannelConnector();
        String keystore = conf.get(REST_SSL_KEYSTORE_STORE);
        String password = conf.get(REST_SSL_KEYSTORE_PASSWORD);
        String keyPassword = conf.get(REST_SSL_KEYSTORE_KEYPASSWORD, password);
        sslConnector.setKeystore(keystore);
        sslConnector.setPassword(password);
        sslConnector.setKeyPassword(keyPassword);
        connector = sslConnector;
    }
    connector.setPort(servlet.getConfiguration().getInt("hbase.rest.port", 8080));
    connector.setHost(servlet.getConfiguration().get("hbase.rest.host", "0.0.0.0"));

    server.addConnector(connector);

    // Set the default max thread number to 100 to limit
    // the number of concurrent requests so that REST server doesn't OOM easily.
    // Jetty set the default max thread number to 250, if we don't set it.
    //
    // Our default min thread number 2 is the same as that used by Jetty.
    int maxThreads = servlet.getConfiguration().getInt("hbase.rest.threads.max", 100);
    int minThreads = servlet.getConfiguration().getInt("hbase.rest.threads.min", 2);
    QueuedThreadPool threadPool = new QueuedThreadPool(maxThreads);
    threadPool.setMinThreads(minThreads);
    server.setThreadPool(threadPool);

    server.setSendServerVersion(false);
    server.setSendDateHeader(false);
    server.setStopAtShutdown(true);
    // set up context
    Context context = new Context(server, "/", Context.SESSIONS);
    context.addServlet(shPojoMap, "/status/cluster");
    context.addServlet(sh, "/*");
    if (authFilter != null) {
        context.addFilter(authFilter, "/*", 1);
    }

    // Load filters from configuration.
    String[] filterClasses = servlet.getConfiguration().getStrings(FILTER_CLASSES,
            ArrayUtils.EMPTY_STRING_ARRAY);
    for (String filter : filterClasses) {
        filter = filter.trim();
        context.addFilter(Class.forName(filter), "/*", 0);
    }
    HttpServerUtil.constrainHttpMethods(context);

    // Put up info server.
    int port = conf.getInt("hbase.rest.info.port", 8085);
    if (port >= 0) {
        conf.setLong("startcode", System.currentTimeMillis());
        String a = conf.get("hbase.rest.info.bindAddress", "0.0.0.0");
        InfoServer infoServer = new InfoServer("rest", a, port, false, conf);
        infoServer.setAttribute("hbase.conf", conf);
        infoServer.start();
    }

    // start server
    server.start();
    server.join();
}

From source file:edu.byu.nlp.data.app.AnnotationStream2Annotators.java

public static void main(String[] args) throws IOException {
    // parse CLI arguments
    new ArgumentParser(AnnotationStream2Annotators.class).parseArgs(args);
    Preconditions.checkNotNull(jsonStream, "You must provide a valid --json-stream!");
    Preconditions.checkArgument(smooth >= 0, "invalid smoothing value=" + smooth);
    Preconditions.checkArgument(k > 0, "invalid number of clusters=" + k);

    // compile annotation stream data into a dataset
    RandomGenerator rnd = new MersenneTwister(seed);
    Dataset data = readData(jsonStream);

    // create confusion matrices for each annotator wrt some truth
    int[][][] confusionMatrices; // confusionMatrices[annotator][true label][annotation] = count
    logger.info("dataset=" + data);
    switch (confusionMatrixTruth) {
    case GOLD:/*from  w w  w  .j av  a  2  s  .  co m*/
        confusionMatrices = Datasets.confusionMatricesWrtGoldLabels(data);
        break;
    case MAJORITY:
        confusionMatrices = Datasets.confusionMatricesWrtMajorityVoteLabels(data, rnd);
        break;
    default:
        throw new IllegalArgumentException(
                "unknown truth standard for constructing confusion matrices: " + confusionMatrixTruth);
    }

    // aggregate annotators based on their confusion matrices
    double[][][] annotatorParameters = confusionMatrices2AnnotatorParameters(confusionMatrices);
    int[] clusterAssignments = clusterAnnotatorParameters(annotatorParameters, aggregate, k, maxIterations,
            smooth, rnd);
    double[][][] clusteredAnnotatorParameters = aggregateAnnotatorParameterClusters(annotatorParameters,
            clusterAssignments);

    // aggregate annotator rates
    double[] annotationRates = new double[clusteredAnnotatorParameters.length];
    for (int j = 0; j < confusionMatrices.length; j++) {
        long numAnnotationsPerJ = Matrices.sum(confusionMatrices[j]);
        // add this annotator's annotation count to the cluster total
        annotationRates[clusterAssignments[j]] += numAnnotationsPerJ;
    }
    DoubleArrays.normalizeToSelf(annotationRates);

    // output to console 
    logger.info("aggregated annotators=\n" + Matrices.toString(clusteredAnnotatorParameters, 10, 10, 20, 3));
    for (int c = 0; c < clusteredAnnotatorParameters.length; c++) {
        logger.info("aggregated annotator #" + c + " accuracy=" + accuracyOf(clusteredAnnotatorParameters[c]));
        logger.info("aggregated annotator #" + c + " rate=" + annotationRates[c]);
    }

    // output to file 
    if (output != null) {
        List<SimulatedAnnotator> annotators = SimulatedAnnotators.from(clusteredAnnotatorParameters,
                annotationRates);
        Files2.write(SimulatedAnnotators.serialize(annotators), output);
    }
}

From source file:tachyon.yarn.ApplicationMaster.java

/**
 * @param args Command line arguments to launch application master
 *//*  w w  w.  j a  v  a2  s.c  o  m*/
public static void main(String[] args) {
    Preconditions.checkArgument(args[1] != null, "Tachyon home cannot be null");
    Preconditions.checkArgument(args[2] != null, "Address of Tachyon master cannot be null");
    try {
        LOG.info("Starting Application Master with args " + Arrays.toString(args));
        final int numWorkers = Integer.parseInt(args[0]);
        final String tachyonHome = args[1];
        final String masterAddress = args[2];
        ApplicationMaster applicationMaster = new ApplicationMaster(numWorkers, tachyonHome, masterAddress);
        applicationMaster.start();
        applicationMaster.requestContainers();
        applicationMaster.stop();
    } catch (Exception ex) {
        LOG.error("Error running Application Master " + ex);
        System.exit(1);
    }
}

From source file:io.warp10.standalone.Warp.java

public static void main(String[] args) throws Exception {

    System.setProperty("java.awt.headless", "true");

    setProperties(args[0]);//from w  w w  .  j  a  v  a  2s.c  om

    boolean nullbackend = "true".equals(WarpConfig.getProperties().getProperty(NULL));

    boolean plasmabackend = "true".equals(WarpConfig.getProperties().getProperty(Configuration.PURE_PLASMA));

    boolean inmemory = "true".equals(WarpConfig.getProperties().getProperty(Configuration.IN_MEMORY));

    Properties properties = getProperties();

    for (String property : REQUIRED_PROPERTIES) {
        // Don't check LEVELDB_HOME when in-memory
        if (inmemory && Configuration.LEVELDB_HOME.equals(property)) {
            continue;
        }
        Preconditions.checkNotNull(properties.getProperty(property),
                "Property '" + property + "' MUST be set.");
    }

    //
    // Initialize KeyStore
    //

    KeyStore keystore;

    if (properties.containsKey(Configuration.OSS_MASTER_KEY)) {
        keystore = new OSSKeyStore(properties.getProperty(Configuration.OSS_MASTER_KEY));
    } else {
        keystore = new UnsecureKeyStore();
    }

    extractKeys(keystore, properties);

    keystore.setKey(KeyStore.SIPHASH_CLASS,
            keystore.decodeKey(properties.getProperty(Configuration.WARP_HASH_CLASS)));
    Preconditions.checkArgument(16 == keystore.getKey(KeyStore.SIPHASH_CLASS).length,
            Configuration.WARP_HASH_CLASS + " MUST be 128 bits long.");
    keystore.setKey(KeyStore.SIPHASH_LABELS,
            keystore.decodeKey(properties.getProperty(Configuration.WARP_HASH_LABELS)));
    Preconditions.checkArgument(16 == keystore.getKey(KeyStore.SIPHASH_LABELS).length,
            Configuration.WARP_HASH_LABELS + " MUST be 128 bits long.");

    //
    // Generate secondary keys. We use the ones' complement of the primary keys
    //

    keystore.setKey(KeyStore.SIPHASH_CLASS_SECONDARY,
            CryptoUtils.invert(keystore.getKey(KeyStore.SIPHASH_CLASS)));
    keystore.setKey(KeyStore.SIPHASH_LABELS_SECONDARY,
            CryptoUtils.invert(keystore.getKey(KeyStore.SIPHASH_LABELS)));

    keystore.setKey(KeyStore.SIPHASH_INDEX,
            keystore.decodeKey(properties.getProperty(Configuration.CONTINUUM_HASH_INDEX)));
    Preconditions.checkArgument(16 == keystore.getKey(KeyStore.SIPHASH_INDEX).length,
            Configuration.CONTINUUM_HASH_INDEX + " MUST be 128 bits long.");
    keystore.setKey(KeyStore.SIPHASH_TOKEN,
            keystore.decodeKey(properties.getProperty(Configuration.WARP_HASH_TOKEN)));
    Preconditions.checkArgument(16 == keystore.getKey(KeyStore.SIPHASH_TOKEN).length,
            Configuration.WARP_HASH_TOKEN + " MUST be 128 bits long.");
    keystore.setKey(KeyStore.SIPHASH_APPID,
            keystore.decodeKey(properties.getProperty(Configuration.WARP_HASH_APP)));
    Preconditions.checkArgument(16 == keystore.getKey(KeyStore.SIPHASH_APPID).length,
            Configuration.WARP_HASH_APP + " MUST be 128 bits long.");
    keystore.setKey(KeyStore.AES_TOKEN,
            keystore.decodeKey(properties.getProperty(Configuration.WARP_AES_TOKEN)));
    Preconditions.checkArgument(
            (16 == keystore.getKey(KeyStore.AES_TOKEN).length)
                    || (24 == keystore.getKey(KeyStore.AES_TOKEN).length)
                    || (32 == keystore.getKey(KeyStore.AES_TOKEN).length),
            Configuration.WARP_AES_TOKEN + " MUST be 128, 192 or 256 bits long.");
    keystore.setKey(KeyStore.AES_SECURESCRIPTS,
            keystore.decodeKey(properties.getProperty(Configuration.WARP_AES_SCRIPTS)));
    Preconditions.checkArgument(
            (16 == keystore.getKey(KeyStore.AES_SECURESCRIPTS).length)
                    || (24 == keystore.getKey(KeyStore.AES_SECURESCRIPTS).length)
                    || (32 == keystore.getKey(KeyStore.AES_SECURESCRIPTS).length),
            Configuration.WARP_AES_SCRIPTS + " MUST be 128, 192 or 256 bits long.");

    if (null != properties.getProperty(Configuration.WARP_AES_LOGGING,
            Configuration.WARP_DEFAULT_AES_LOGGING)) {
        keystore.setKey(KeyStore.AES_LOGGING, keystore.decodeKey(properties
                .getProperty(Configuration.WARP_AES_LOGGING, Configuration.WARP_DEFAULT_AES_LOGGING)));
        Preconditions.checkArgument(
                (16 == keystore.getKey(KeyStore.AES_LOGGING).length)
                        || (24 == keystore.getKey(KeyStore.AES_LOGGING).length)
                        || (32 == keystore.getKey(KeyStore.AES_LOGGING).length),
                Configuration.WARP_AES_LOGGING + " MUST be 128, 192 or 256 bits long.");
    }

    setKeyStore(keystore);

    //
    // Initialize levelDB
    //

    Options options = new Options();

    options.createIfMissing(false);

    if (properties.containsKey(Configuration.LEVELDB_MAXOPENFILES)) {
        int maxOpenFiles = Integer.parseInt(properties.getProperty(Configuration.LEVELDB_MAXOPENFILES));
        options.maxOpenFiles(maxOpenFiles);
    }

    if (null != properties.getProperty(Configuration.LEVELDB_CACHE_SIZE)) {
        options.cacheSize(Long.parseLong(properties.getProperty(Configuration.LEVELDB_CACHE_SIZE)));
    }

    if (null != properties.getProperty(Configuration.LEVELDB_COMPRESSION_TYPE)) {
        if ("snappy".equalsIgnoreCase(properties.getProperty(Configuration.LEVELDB_COMPRESSION_TYPE))) {
            options.compressionType(CompressionType.SNAPPY);
        } else {
            options.compressionType(CompressionType.NONE);
        }
    }

    //
    // Attempt to load JNI library, fallback to pure java in case of error
    //

    if (!inmemory && !nullbackend && !plasmabackend) {
        try {
            db = JniDBFactory.factory.open(new File(properties.getProperty(Configuration.LEVELDB_HOME)),
                    options);
        } catch (UnsatisfiedLinkError ule) {
            System.out.println("WARNING: falling back to pure java implementation of LevelDB.");
            db = Iq80DBFactory.factory.open(new File(properties.getProperty(Configuration.LEVELDB_HOME)),
                    options);
        }
    }

    // Register shutdown hook to close the DB.
    Runtime.getRuntime().addShutdownHook(new Thread(new Warp()));

    //
    // Initialize the backup manager
    //

    if (null != db) {
        String triggerPath = properties.getProperty(Configuration.STANDALONE_SNAPSHOT_TRIGGER);
        String signalPath = properties.getProperty(Configuration.STANDALONE_SNAPSHOT_SIGNAL);

        if (null != triggerPath && null != signalPath) {
            Thread backupManager = new StandaloneSnapshotManager(triggerPath, signalPath);
            backupManager.setDaemon(true);
            backupManager.setName("[Snapshot Manager]");
            backupManager.start();
        }
    }

    WarpScriptLib.registerExtensions();

    //
    // Initialize ThrottlingManager
    //

    ThrottlingManager.init();

    //
    // Create Jetty server
    //

    Server server = new Server();

    int acceptors = Integer.valueOf(properties.getProperty(Configuration.STANDALONE_ACCEPTORS));
    int selectors = Integer.valueOf(properties.getProperty(Configuration.STANDALONE_SELECTORS));
    port = Integer.valueOf(properties.getProperty(Configuration.STANDALONE_PORT));
    host = properties.getProperty(Configuration.STANDALONE_HOST);

    ServerConnector connector = new ServerConnector(server, acceptors, selectors);

    connector.setPort(port);
    if (null != host) {
        connector.setHost(host);
    }

    String idle = properties.getProperty(Configuration.STANDALONE_IDLE_TIMEOUT);

    if (null != idle) {
        connector.setIdleTimeout(Long.parseLong(idle));
    }

    connector.setName("Continuum Standalone Egress");

    server.setConnectors(new Connector[] { connector });

    HandlerList handlers = new HandlerList();

    Handler cors = new CORSHandler();
    handlers.addHandler(cors);

    StandaloneDirectoryClient sdc = null;
    StoreClient scc = null;

    if (inmemory) {
        sdc = new StandaloneDirectoryClient(null, keystore);
        scc = new StandaloneMemoryStore(keystore,
                Long.valueOf(WarpDist.getProperties().getProperty(Configuration.IN_MEMORY_DEPTH,
                        Long.toString(60 * 60 * 1000 * Constants.TIME_UNITS_PER_MS))),
                Long.valueOf(
                        WarpDist.getProperties().getProperty(Configuration.IN_MEMORY_HIGHWATERMARK, "100000")),
                Long.valueOf(
                        WarpDist.getProperties().getProperty(Configuration.IN_MEMORY_LOWWATERMARK, "80000")));
        ((StandaloneMemoryStore) scc).setDirectoryClient((StandaloneDirectoryClient) sdc);
        if ("true".equals(WarpDist.getProperties().getProperty(Configuration.IN_MEMORY_EPHEMERAL))) {
            ((StandaloneMemoryStore) scc).setEphemeral(true);
        }
        ((StandaloneMemoryStore) scc).load();
    } else if (plasmabackend) {
        sdc = new StandaloneDirectoryClient(null, keystore);
        scc = new PlasmaStoreClient();
    } else if (nullbackend) {
        sdc = new NullDirectoryClient(keystore);
        scc = new NullStoreClient();
    } else {
        sdc = new StandaloneDirectoryClient(db, keystore);
        scc = new StandaloneStoreClient(db, keystore, properties);
    }

    StandaloneGeoDirectory geodir = new StandaloneGeoDirectory(keystore.clone(), scc, sdc, properties);

    if (properties.containsKey(Configuration.RUNNER_ROOT)) {
        if (!properties.containsKey(Configuration.RUNNER_ENDPOINT)) {
            properties.setProperty(Configuration.RUNNER_ENDPOINT, "");
            StandaloneScriptRunner runner = new StandaloneScriptRunner(properties, keystore.clone(), scc, sdc,
                    geodir, properties);
        } else {
            //
            // Allocate a normal runner
            //
            ScriptRunner runner = new ScriptRunner(keystore.clone(), properties);
        }

    }

    //
    // Enable the ThrottlingManager (not 
    //

    ThrottlingManager.enable();

    QuasarTokenFilter tf = new QuasarTokenFilter(properties, keystore);

    GzipHandler gzip = new GzipHandler();
    EgressExecHandler egressExecHandler = new EgressExecHandler(keystore, properties, sdc, geodir.getClient(),
            scc);
    gzip.setHandler(egressExecHandler);
    gzip.setBufferSize(65536);
    gzip.setMinGzipSize(0);
    handlers.addHandler(gzip);
    setEgress(true);

    gzip = new GzipHandler();
    gzip.setHandler(new StandaloneIngressHandler(keystore, sdc, scc));
    gzip.setBufferSize(65536);
    gzip.setMinGzipSize(0);
    handlers.addHandler(gzip);

    gzip = new GzipHandler();
    gzip.setHandler(new EgressFetchHandler(keystore, properties, sdc, scc));
    gzip.setBufferSize(65536);
    gzip.setMinGzipSize(0);
    handlers.addHandler(gzip);

    gzip = new GzipHandler();
    gzip.setHandler(new EgressFindHandler(keystore, sdc));
    gzip.setBufferSize(65536);
    gzip.setMinGzipSize(0);
    handlers.addHandler(gzip);

    gzip = new GzipHandler();
    gzip.setHandler(new StandaloneDeleteHandler(keystore, sdc, scc));
    gzip.setBufferSize(65536);
    gzip.setMinGzipSize(0);
    handlers.addHandler(gzip);

    handlers.addHandler(geodir);

    //ContextHandler context = new ContextHandler();
    StandalonePlasmaHandler plasmaHandler = new StandalonePlasmaHandler(keystore, properties, sdc);
    scc.addPlasmaHandler(plasmaHandler);
    scc.addPlasmaHandler(geodir);

    //context.setHandler(plasmaHandler);
    //handlers.addHandler(context);
    handlers.addHandler(plasmaHandler);

    StandaloneStreamUpdateHandler streamUpdateHandler = new StandaloneStreamUpdateHandler(keystore, properties,
            sdc, scc);
    handlers.addHandler(streamUpdateHandler);

    EgressMobiusHandler mobiusHandler = new EgressMobiusHandler(scc, sdc, properties);
    handlers.addHandler(mobiusHandler);

    server.setHandler(handlers);

    JettyUtil.setSendServerVersion(server, false);

    // Clear master key from memory
    keystore.forget();

    try {
        server.start();
    } catch (Exception e) {
        throw new RuntimeException(e);
    }

    // Retrieve actual local port
    port = connector.getLocalPort();

    // Indicate standalone mode is on
    standaloneMode = true;

    WarpDist.setInitialized(true);

    try {
        while (true) {
            try {
                Thread.sleep(60000L);
            } catch (InterruptedException ie) {
            }
        }
    } catch (Throwable t) {
        System.err.println(t.getMessage());
        server.stop();
    }
}

From source file:com.xiaomi.linden.service.LindenServer.java

public static void main(String[] args) throws IOException {
    Preconditions.checkArgument(args.length != 0, "need conf dir");
    String conf = args[0];/*ww  w  .ja  va  2 s .  c  om*/
    LindenServer server = null;
    try {
        server = new LindenServer(conf);
        server.startServer();
    } catch (Exception e) {
        if (server != null)
            try {
                server.close();
            } catch (Exception e1) {
                e1.printStackTrace();
            }
        e.printStackTrace();
        LOGGER.error("Server start failed : {}", Throwables.getStackTraceAsString(e));
    }
}

From source file:io.warp10.WarpDist.java

public static void main(String[] args) throws Exception {

    System.setProperty("java.awt.headless", "true");

    setProperties(args[0]);/* w  w  w  . j  ava 2 s  . c  om*/

    //
    // Extract components to spawn
    //

    String[] components = properties.getProperty(Configuration.WARP_COMPONENTS).split(",");

    Set<String> subprocesses = new HashSet<String>();

    for (String component : components) {
        component = component.trim();

        subprocesses.add(component);
    }

    if (properties.containsKey(Configuration.OSS_MASTER_KEY)) {
        keystore = new OSSKeyStore(properties.getProperty(Configuration.OSS_MASTER_KEY));
    } else {
        keystore = new UnsecureKeyStore();
    }

    //
    // Set SIPHASH keys for class/labels/index
    //

    for (String property : REQUIRED_PROPERTIES) {
        Preconditions.checkNotNull(properties.getProperty(property),
                "Property '" + property + "' MUST be set.");
    }

    keystore.setKey(KeyStore.SIPHASH_CLASS,
            keystore.decodeKey(properties.getProperty(Configuration.WARP_HASH_CLASS)));
    Preconditions.checkArgument(16 == keystore.getKey(KeyStore.SIPHASH_CLASS).length,
            Configuration.WARP_HASH_CLASS + " MUST be 128 bits long.");
    keystore.setKey(KeyStore.SIPHASH_LABELS,
            keystore.decodeKey(properties.getProperty(Configuration.WARP_HASH_LABELS)));
    Preconditions.checkArgument(16 == keystore.getKey(KeyStore.SIPHASH_LABELS).length,
            Configuration.WARP_HASH_LABELS + " MUST be 128 bits long.");

    //
    // Generate secondary keys. We use the ones' complement of the primary keys
    //

    keystore.setKey(KeyStore.SIPHASH_CLASS_SECONDARY,
            CryptoUtils.invert(keystore.getKey(KeyStore.SIPHASH_CLASS)));
    keystore.setKey(KeyStore.SIPHASH_LABELS_SECONDARY,
            CryptoUtils.invert(keystore.getKey(KeyStore.SIPHASH_LABELS)));

    keystore.setKey(KeyStore.SIPHASH_INDEX,
            keystore.decodeKey(properties.getProperty(Configuration.CONTINUUM_HASH_INDEX)));
    Preconditions.checkArgument(16 == keystore.getKey(KeyStore.SIPHASH_INDEX).length,
            Configuration.CONTINUUM_HASH_INDEX + " MUST be 128 bits long.");
    keystore.setKey(KeyStore.SIPHASH_TOKEN,
            keystore.decodeKey(properties.getProperty(Configuration.WARP_HASH_TOKEN)));
    Preconditions.checkArgument(16 == keystore.getKey(KeyStore.SIPHASH_TOKEN).length,
            Configuration.WARP_HASH_TOKEN + " MUST be 128 bits long.");
    keystore.setKey(KeyStore.SIPHASH_APPID,
            keystore.decodeKey(properties.getProperty(Configuration.WARP_HASH_APP)));
    Preconditions.checkArgument(16 == keystore.getKey(KeyStore.SIPHASH_APPID).length,
            Configuration.WARP_HASH_APP + " MUST be 128 bits long.");
    keystore.setKey(KeyStore.AES_TOKEN,
            keystore.decodeKey(properties.getProperty(Configuration.WARP_AES_TOKEN)));
    Preconditions.checkArgument(
            (16 == keystore.getKey(KeyStore.AES_TOKEN).length)
                    || (24 == keystore.getKey(KeyStore.AES_TOKEN).length)
                    || (32 == keystore.getKey(KeyStore.AES_TOKEN).length),
            Configuration.WARP_AES_TOKEN + " MUST be 128, 192 or 256 bits long.");
    keystore.setKey(KeyStore.AES_SECURESCRIPTS,
            keystore.decodeKey(properties.getProperty(Configuration.WARP_AES_SCRIPTS)));
    Preconditions.checkArgument(
            (16 == keystore.getKey(KeyStore.AES_SECURESCRIPTS).length)
                    || (24 == keystore.getKey(KeyStore.AES_SECURESCRIPTS).length)
                    || (32 == keystore.getKey(KeyStore.AES_SECURESCRIPTS).length),
            Configuration.WARP_AES_SCRIPTS + " MUST be 128, 192 or 256 bits long.");

    if (null != properties.getProperty(Configuration.WARP_AES_LOGGING,
            Configuration.WARP_DEFAULT_AES_LOGGING)) {
        keystore.setKey(KeyStore.AES_LOGGING, keystore.decodeKey(properties
                .getProperty(Configuration.WARP_AES_LOGGING, Configuration.WARP_DEFAULT_AES_LOGGING)));
        Preconditions.checkArgument(
                (16 == keystore.getKey(KeyStore.AES_LOGGING).length)
                        || (24 == keystore.getKey(KeyStore.AES_LOGGING).length)
                        || (32 == keystore.getKey(KeyStore.AES_LOGGING).length),
                Configuration.WARP_AES_LOGGING + " MUST be 128, 192 or 256 bits long.");
    }

    if (null != properties.getProperty(Configuration.CONFIG_FETCH_PSK)) {
        keystore.setKey(KeyStore.SIPHASH_FETCH_PSK,
                keystore.decodeKey(properties.getProperty(Configuration.CONFIG_FETCH_PSK)));
        Preconditions.checkArgument((16 == keystore.getKey(KeyStore.SIPHASH_FETCH_PSK).length),
                Configuration.CONFIG_FETCH_PSK + " MUST be 128 bits long.");
    }

    WarpScriptLib.registerExtensions();

    KafkaWebCallService.initKeys(keystore, properties);

    //
    // Initialize ThrottlingManager
    //

    ThrottlingManager.init();

    if (subprocesses.contains("egress") && subprocesses.contains("fetcher")) {
        throw new RuntimeException("'fetcher' and 'egress' cannot be specified together as components to run.");
    }

    for (String subprocess : subprocesses) {
        if ("ingress".equals(subprocess)) {
            Ingress ingress = new Ingress(getKeyStore(), getProperties());
            Map<String, String> labels = new HashMap<String, String>();
            labels.put(SensisionConstants.SENSISION_LABEL_COMPONENT, "ingress");
            Sensision.set(SensisionConstants.SENSISION_CLASS_WARP_REVISION, labels, Revision.REVISION);
        } else if ("egress".equals(subprocess)) {
            Egress egress = new Egress(getKeyStore(), getProperties(), false);
            Map<String, String> labels = new HashMap<String, String>();
            labels.put(SensisionConstants.SENSISION_LABEL_COMPONENT, "egress");
            Sensision.set(SensisionConstants.SENSISION_CLASS_WARP_REVISION, labels, Revision.REVISION);
            hasEgress = true;
        } else if ("fetcher".equals(subprocess)) {
            Egress egress = new Egress(getKeyStore(), getProperties(), true);
            Map<String, String> labels = new HashMap<String, String>();
            labels.put(SensisionConstants.SENSISION_LABEL_COMPONENT, "fetcher");
            Sensision.set(SensisionConstants.SENSISION_CLASS_WARP_REVISION, labels, Revision.REVISION);
        } else if ("store".equals(subprocess)) {
            int nthreads = Integer.valueOf(properties.getProperty(Configuration.STORE_NTHREADS));
            for (int i = 0; i < nthreads; i++) {
                //Store store = new Store(getKeyStore(), getProperties(), null);
                Store store = new Store(getKeyStore(), getProperties(), 1);
            }
            Map<String, String> labels = new HashMap<String, String>();
            labels.put(SensisionConstants.SENSISION_LABEL_COMPONENT, "store");
            Sensision.set(SensisionConstants.SENSISION_CLASS_WARP_REVISION, labels, Revision.REVISION);
        } else if ("directory".equals(subprocess)) {
            Directory directory = new Directory(getKeyStore(), getProperties());
            Map<String, String> labels = new HashMap<String, String>();
            labels.put(SensisionConstants.SENSISION_LABEL_COMPONENT, "directory");
            Sensision.set(SensisionConstants.SENSISION_CLASS_WARP_REVISION, labels, Revision.REVISION);
            //} else if ("index".equals(subprocess)) {
            //  Index index = new Index(getKeyStore(), getProperties());
        } else if ("plasmaFE".equalsIgnoreCase(subprocess)) {
            PlasmaFrontEnd plasmaFE = new PlasmaFrontEnd(getKeyStore(), getProperties());
            Map<String, String> labels = new HashMap<String, String>();
            labels.put(SensisionConstants.SENSISION_LABEL_COMPONENT, "plasmafe");
            Sensision.set(SensisionConstants.SENSISION_CLASS_WARP_REVISION, labels, Revision.REVISION);
        } else if ("plasmaBE".equalsIgnoreCase(subprocess)) {
            PlasmaBackEnd plasmaBE = new PlasmaBackEnd(getKeyStore(), getProperties());
            Map<String, String> labels = new HashMap<String, String>();
            labels.put(SensisionConstants.SENSISION_LABEL_COMPONENT, "plasmabe");
            Sensision.set(SensisionConstants.SENSISION_CLASS_WARP_REVISION, labels, Revision.REVISION);
        } else if ("webcall".equals(subprocess)) {
            KafkaWebCallBroker webcall = new KafkaWebCallBroker(getKeyStore(), getProperties());
            Map<String, String> labels = new HashMap<String, String>();
            labels.put(SensisionConstants.SENSISION_LABEL_COMPONENT, "webcall");
            Sensision.set(SensisionConstants.SENSISION_CLASS_WARP_REVISION, labels, Revision.REVISION);
        } else if ("geodir".equals(subprocess)) {
            GeoDirectory geodir = new GeoDirectory(getKeyStore(), getProperties());
            Map<String, String> labels = new HashMap<String, String>();
            labels.put(SensisionConstants.SENSISION_LABEL_COMPONENT, "geodir");
            Sensision.set(SensisionConstants.SENSISION_CLASS_WARP_REVISION, labels, Revision.REVISION);
        } else if ("runner".equals(subprocess)) {
            ScriptRunner runner = new ScriptRunner(getKeyStore(), getProperties());
            Map<String, String> labels = new HashMap<String, String>();
            labels.put(SensisionConstants.SENSISION_LABEL_COMPONENT, "runner");
            Sensision.set(SensisionConstants.SENSISION_CLASS_WARP_REVISION, labels, Revision.REVISION);
        } else {
            System.err.println("Unknown component '" + subprocess + "', skipping.");
            continue;
        }
    }

    // Clear master key from memory
    keystore.forget();

    setInitialized(true);

    //
    // We're done, let's sleep endlessly
    //

    try {
        while (true) {
            try {
                Thread.sleep(60000L);
            } catch (InterruptedException ie) {
            }
        }
    } catch (Throwable t) {
        System.err.println(t.getMessage());
    }
}

From source file:org.opendaylight.protocol.pcep.pcc.mock.Main.java

public static void main(final String[] args)
        throws InterruptedException, ExecutionException, UnknownHostException {
    InetSocketAddress localAddress = new InetSocketAddress(LOCALHOST, DEFAULT_LOCAL_PORT);
    List<InetSocketAddress> remoteAddress = Lists
            .newArrayList(new InetSocketAddress(LOCALHOST, DEFAULT_REMOTE_PORT));
    int pccCount = 1;
    int lsps = 1;
    boolean pcError = false;
    final LoggerContext lc = (LoggerContext) LoggerFactory.getILoggerFactory();
    short ka = DEFAULT_KEEP_ALIVE;
    short dt = DEFAULT_DEAD_TIMER;
    String password = null;//from  www  .j  a va 2s.c  o m
    long reconnectTime = -1;
    int redelegationTimeout = 0;
    int stateTimeout = -1;

    getRootLogger(lc).setLevel(ch.qos.logback.classic.Level.INFO);
    int argIdx = 0;
    while (argIdx < args.length) {
        if (args[argIdx].equals("--local-address")) {
            localAddress = InetSocketAddressUtil.getInetSocketAddress(args[++argIdx], DEFAULT_LOCAL_PORT);
        } else if (args[argIdx].equals("--remote-address")) {
            remoteAddress = InetSocketAddressUtil.parseAddresses(args[++argIdx], DEFAULT_REMOTE_PORT);
        } else if (args[argIdx].equals("--pcc")) {
            pccCount = Integer.valueOf(args[++argIdx]);
        } else if (args[argIdx].equals("--lsp")) {
            lsps = Integer.valueOf(args[++argIdx]);
        } else if (args[argIdx].equals("--pcerr")) {
            pcError = true;
        } else if (args[argIdx].equals("--log-level")) {
            getRootLogger(lc).setLevel(Level.toLevel(args[++argIdx], ch.qos.logback.classic.Level.INFO));
        } else if (args[argIdx].equals("--keepalive") || args[argIdx].equals("-ka")) {
            ka = Short.valueOf(args[++argIdx]);
        } else if (args[argIdx].equals("--deadtimer") || args[argIdx].equals("-d")) {
            dt = Short.valueOf(args[++argIdx]);
        } else if (args[argIdx].equals("--password")) {
            password = args[++argIdx];
        } else if (args[argIdx].equals("--reconnect")) {
            reconnectTime = Integer.valueOf(args[++argIdx]).intValue();
        } else if (args[argIdx].equals("--redelegation-timeout")) {
            redelegationTimeout = Integer.valueOf(args[++argIdx]);
        } else if (args[argIdx].equals("--state-timeout")) {
            stateTimeout = Integer.valueOf(args[++argIdx]);
        } else if (args[argIdx].equals("--state-sync-avoidance")) {
            //"--state-sync-avoidance 10, 5, 10
            includeDbv = Boolean.TRUE;
            final Long dbVersionAfterReconnect = Long.valueOf(args[++argIdx]);
            disonnectAfterXSeconds = Integer.valueOf(args[++argIdx]);
            reconnectAfterXSeconds = Integer.valueOf(args[++argIdx]);
            syncOptDBVersion = BigInteger.valueOf(dbVersionAfterReconnect);
        } else if (args[argIdx].equals("--incremental-sync-procedure")) {
            //TODO Check that DBv > Lsp always ??
            includeDbv = Boolean.TRUE;
            incrementalSync = Boolean.TRUE;
            //Version of database to be used after restart
            final Long initialDbVersionAfterReconnect = Long.valueOf(args[++argIdx]);
            disonnectAfterXSeconds = Integer.valueOf(args[++argIdx]);
            reconnectAfterXSeconds = Integer.valueOf(args[++argIdx]);
            syncOptDBVersion = BigInteger.valueOf(initialDbVersionAfterReconnect);
        } else if (args[argIdx].equals("--triggered-initial-sync")) {
            triggeredInitSync = Boolean.TRUE;
        } else if (args[argIdx].equals("--triggered-re-sync")) {
            triggeredResync = Boolean.TRUE;
        } else {
            LOG.warn("WARNING: Unrecognized argument: {}", args[argIdx]);
        }
        argIdx++;
    }

    if (incrementalSync) {
        Preconditions.checkArgument(syncOptDBVersion.intValue() > lsps,
                "Synchronization Database Version which will be used after "
                        + "reconnectes requires to be higher than lsps");
    }

    final Optional<BigInteger> dBVersion = Optional.fromNullable(syncOptDBVersion);
    final PCCsBuilder pccs = new PCCsBuilder(lsps, pcError, pccCount, localAddress, remoteAddress, ka, dt,
            password, reconnectTime, redelegationTimeout, stateTimeout, getCapabilities());
    final TimerHandler timerHandler = new TimerHandler(pccs, dBVersion, disonnectAfterXSeconds,
            reconnectAfterXSeconds);
    pccs.createPCCs(BigInteger.valueOf(lsps), Optional.fromNullable(timerHandler));
    if (!triggeredInitSync) {
        timerHandler.createDisconnectTask();
    }
}

From source file:org.apache.tez.log.LogParser.java

public static void main(String[] args) throws IOException {
    Preconditions.checkArgument(args.length == 1, "Please provide the file to be parsed");
    File inputFile = new File(args[0]);
    Preconditions.checkArgument(inputFile.exists(),
            "Please provide valid file. " + inputFile + " does not exist");

    Stopwatch sw = Stopwatch.createStarted();

    LogParser parser = new LogParser(inputFile);

    parser.process();/* w  ww.  j  a  v  a  2  s.c o m*/
    System.out.println();

    IAnalyzer vertexMappingAnalyzer = parser.getAnalyzers().get(VertexMappingAnalyzer.class.getName());
    IAnalyzer vertexFinishedAnalyzer = parser.getAnalyzers().get(VertexFinishedAnalyzer.class.getName());
    if (vertexMappingAnalyzer != null && vertexFinishedAnalyzer != null) {
        System.out.println("Vertices that haven't finished");
        System.out.println("*******************************");
        Map<String, String> vertexMapping = (Map<String, String>) vertexMappingAnalyzer.getResult();
        Map<VertexFinishedAnalyzer.VertexFinished, String> vertexFinishedMap = (Map<VertexFinishedAnalyzer.VertexFinished, String>) vertexFinishedAnalyzer
                .getResult();

        for (Map.Entry<String, String> e : vertexMapping.entrySet()) {
            boolean found = false;
            for (Map.Entry<VertexFinishedAnalyzer.VertexFinished, String> fe : vertexFinishedMap.entrySet()) {
                if (fe.getKey().vertexId.equalsIgnoreCase(e.getKey())) {
                    found = true;
                    break;
                }
            }
            if (!found) {
                System.out.println(e.getKey() + " is not in finished map list. " + e.getValue());
            }
        }
    }

    /**
     * In case shuffle-blamed-for details is there, co-relate with rack details
     */
    IAnalyzer shuffleBlamedFor = parser.getAnalyzers().get(ShuffleBlamedForAnalyzer.class.getName());
    IAnalyzer rackResolver = parser.getAnalyzers().get(RackResolverExtractor.class.getName());
    if (shuffleBlamedFor != null && rackResolver != null) {
        // machine --> rack mapping
        Map<String, String> rackMap = (Map<String, String>) rackResolver.getResult();

        ShuffleBlamedForAnalyzer.ShuffleBlamedForResult result = (ShuffleBlamedForAnalyzer.ShuffleBlamedForResult) shuffleBlamedFor
                .getResult();

        parser.addAdditionalAnalysis("Source machine details..", true);
        for (String srcMachine : result.getSrcMachines()) {
            //machine:45454, containerPriority= 8, containerResources=<memory:3584, vCores:1>
            String machine = srcMachine.substring(0, srcMachine.indexOf(":"));
            parser.addAdditionalAnalysis(machine + " --> " + rackMap.get(machine));
        }

        parser.addAdditionalAnalysis("");
        parser.addAdditionalAnalysis("");
        parser.addAdditionalAnalysis("Fetcher machine details..", true);
        for (String fetcherMachine : result.getFetcherMachines()) {
            //machine:45454, containerPriority= 8, containerResources=<memory:3584, vCores:1>
            String machine = fetcherMachine.substring(0, fetcherMachine.indexOf(":"));
            parser.addAdditionalAnalysis(machine + " --> " + rackMap.get(machine));
        }
    }

    /**
     * For containers timeouts. Relate ContainerTimeoutAnalyzer and NodesAnalyzer
     *
     */
    IAnalyzer containerTimeoutAnalyzer = parser.getAnalyzers().get(ContainerTimeoutAnalyzer.class.getName());
    IAnalyzer nodesAnalyzer = parser.getAnalyzers().get(NodesAnalyzer.class.getName());
    if (nodesAnalyzer != null && containerTimeoutAnalyzer != null) {
        List<String> containersWithTimeout = (List<String>) containerTimeoutAnalyzer.getResult();

        // Node --> <attempt, container>
        Map<String, Map<String, String>> nodesResult = (Map<String, Map<String, String>>) nodesAnalyzer
                .getResult();

        parser.addAdditionalAnalysis("");
        parser.addAdditionalAnalysis("Container time outs and attempt/node details", true);
        for (String container : containersWithTimeout) {
            for (Map.Entry<String, Map<String, String>> nodeEntry : nodesResult.entrySet()) {
                Map<String, String> attemptToContainer = nodeEntry.getValue();
                for (Map.Entry<String, String> attemptEntry : attemptToContainer.entrySet()) {
                    if (attemptEntry.getValue().equalsIgnoreCase(container)) {
                        parser.addAdditionalAnalysis(
                                container + " --> " + nodeEntry.getKey() + " --> " + attemptEntry.getKey());
                    }
                }
            }
        }
        parser.addAdditionalAnalysis("");
    }

    /**
     * Task attempts not finished
     */
    IAnalyzer taskAttemptStarted = parser.getAnalyzers().get(TaskAttemptStartedAnalyzer.class.getName());
    IAnalyzer taskAttemptFinished = parser.getAnalyzers().get(TaskAttemptFinishedAnalyzer.class.getName());
    if (taskAttemptFinished != null && taskAttemptStarted != null) {
        Map<String, TaskAttemptStartedAnalyzer.TaskAttemptStarted> started = (Map<String, TaskAttemptStartedAnalyzer.TaskAttemptStarted>) taskAttemptStarted
                .getResult();
        Map<String, TaskAttemptFinishedAnalyzer.TaskAttemptFinished> finished = (Map<String, TaskAttemptFinishedAnalyzer.TaskAttemptFinished>) taskAttemptFinished
                .getResult();

        parser.addAdditionalAnalysis(
                "List of unfinished tasks!! started=" + started.size() + ", " + "finished=" + finished.size(),
                true);
        for (String task : started.keySet()) {
            //check if this task is in finished keys
            if (!finished.keySet().contains(task)) {
                parser.addAdditionalAnalysis(task + " is not in finished list");
            }
        }
    }

    /**
     * For swimlanes (not including killed tasks)
     */
    /*
    TODO: Need to work on this.
            
            
    IAnalyzer nodeAnalyzer = parser.getAnalyzers()
        .get(NodesAnalyzer.class.getName());
    IAnalyzer taFinishedAnalyzer = parser.getAnalyzers()
        .get(TaskAttemptFinishedAnalyzer.class.getName());
    if (nodeAnalyzer != null && taFinishedAnalyzer != null) {
      // machine --> task --> container
      Map<String, Map<String, String>> nodes =
          (Map<String, Map<String, String>>) nodeAnalyzer.getResult();
      // taskIDStr --> taskAttemptFinished
      Map<String, TaskAttemptFinishedAnalyzer.TaskAttemptFinished> taFinishedMap =
          (Map<String, TaskAttemptFinishedAnalyzer.TaskAttemptFinished>)
      taFinishedAnalyzer.getResult();
            
      //Dirty hack to get all DAG
      Set<String> allDags = Sets.newHashSet();
      for(Map.Entry<String, Map<String, String>> entry : nodes.entrySet()) {
        for (Map.Entry<String, String> taskEntry : entry.getValue().entrySet()) {
          String taskId = taskEntry.getKey();
          //attempt_1478350923850_0006_7
          allDags.add(taskId.substring(0, 28));
        }
      }
            
      // Construct a map of machine_container --> List<TaskAttemptId> from analyzer dataset.
      final Map<String, TreeSet<TaskAttemptFinishedAnalyzer.TaskAttemptFinished>> mapping = Maps.newHashMap();
      long minTime = Long.MAX_VALUE;
      long maxTime = Long.MIN_VALUE;
      for(String dag : allDags) {
        for (Map.Entry<String, Map<String, String>> entry : nodes.entrySet()) {
          for (Map.Entry<String, String> taskEntry : entry.getValue().entrySet()) {
    String machine = entry.getKey();
            
    String taskId = taskEntry.getKey();
    String containerId = taskEntry.getValue();
            
    if (!taskId.contains("1478350923850_0006_9")) {
      continue;
    }
            
    String machineContainer = machine + "_" + containerId;
    TreeSet<TaskAttemptFinishedAnalyzer.TaskAttemptFinished> attempts = mapping.get
        (machineContainer);
            
    if (attempts == null) {
      attempts = new TreeSet<>(
          new Comparator<TaskAttemptFinishedAnalyzer.TaskAttemptFinished>() {
            @Override public int compare(TaskAttemptFinishedAnalyzer.TaskAttemptFinished o1,
                TaskAttemptFinishedAnalyzer.TaskAttemptFinished o2) {
              if (Long.parseLong(o1.startTime) < Long.parseLong(o2.startTime)) {
                return -1;
              } else if (Long.parseLong(o1.startTime) > Long.parseLong(o2.startTime)) {
                return 1;
              } else {
                return 0;
              }
            }
          });
      mapping.put(machineContainer, attempts);
    }
            
    //Check if the attempt id is available in finished maps
    if (taFinishedMap.containsKey(taskId)) {
      TaskAttemptFinishedAnalyzer.TaskAttemptFinished attempt = taFinishedMap.get(taskId);
      attempts.add(attempt);
      if (Long.parseLong(attempt.finishTime) >= maxTime) {
        maxTime = Long.parseLong(attempt.finishTime);
      } else if (Long.parseLong(attempt.startTime) <= minTime) {
        minTime = Long.parseLong(attempt.startTime);
      }
    }
          }
        }
      }
            
      // draw SVG
      System.out.println("MinTime: " + minTime + ". maxTime: " + maxTime);
      SVGUtils svg = new SVGUtils(minTime, maxTime, new TreeSet(mapping.keySet()));
      int yOffset = 1;
      for(Map.Entry<String, TreeSet<TaskAttemptFinishedAnalyzer.TaskAttemptFinished>> entry :
          mapping.entrySet()) {
        for (TaskAttemptFinishedAnalyzer.TaskAttemptFinished task : entry.getValue()) {
          //draw lines
          svg.drawStep(task.vertexId, Long.parseLong(task.startTime), Long.parseLong(task
          .timeTaken), yOffset, "LightGreen");
        }
        yOffset++;
      }
            
      svg.saveFileStr("/tmp/test.svg");
      System.out.println("Wrote to /tmp/test.svg");
            
      // Now generate the swimlane.
            
            
    }
    */

    System.out.println();
    parser.writeAnalysis();

    System.out.println("Time taken " + (sw.elapsed(TimeUnit.SECONDS)) + " seconds");
    sw.stop();
}