Example usage for java.lang System setProperty

List of usage examples for java.lang System setProperty

Introduction

In this page you can find the example usage for java.lang System setProperty.

Prototype

public static String setProperty(String key, String value) 

Source Link

Document

Sets the system property indicated by the specified key.

Usage

From source file:de.metas.ui.web.WebRestApiApplication.java

public static void main(String[] args) {
    if (Check.isEmpty(System.getProperty("PropertyFile"), true)) {
        System.setProperty("PropertyFile", "./metasfresh.properties");
    }/*from w w  w.  j ava 2s  . co m*/

    // important because in Ini, there is a org.springframework.context.annotation.Condition that userwise wouldn't e.g. let the jasper servlet start
    Ini.setRunMode(RunMode.WEBUI);

    new SpringApplicationBuilder(WebRestApiApplication.class).headless(false) // FIXME: we need it for initial connection setup popup (if any)
            .web(true).profiles(PROFILE_Webui).run(args);

}

From source file:hadoopInstaller.Main.java

public static void main(String[] args) {
    // Disable VFS logging to console by default
    System.setProperty("org.apache.commons.logging.Log", //$NON-NLS-1$
            "org.apache.commons.logging.impl.NoOpLog"); //$NON-NLS-1$
    // Configure SimpleLog to show date and omit log name
    System.setProperty("org.apache.commons.logging.simplelog.showdatetime", "true"); //$NON-NLS-1$//$NON-NLS-2$
    System.setProperty("org.apache.commons.logging.simplelog.showlogname", "false"); //$NON-NLS-1$//$NON-NLS-2$
    System.setProperty("org.apache.commons.logging.simplelog.showShortLogname", "false"); //$NON-NLS-1$//$NON-NLS-2$
    try (PrintStream filePrintStream = new PrintStream(VFS.getManager()
            .resolveFile(MessageFormat.format("file://{0}/{1}", //$NON-NLS-1$
                    System.getProperty("user.dir"), Main.FILE_LOG_NAME)) //$NON-NLS-1$
            .getContent().getOutputStream(true))) {

        CompositeLog log = new CompositeLog();
        Integer logLevel = detectLogLevel(args);
        PrintStreamLog consoleLog = new PrintStreamLog(Installer.INSTALLER_NAME, System.out);
        consoleLog.setLevel(logLevel);//from  www . j  av  a  2 s  . c  o  m
        log.addLog(consoleLog);
        PrintStreamLog fileLog = new PrintStreamLog(Installer.INSTALLER_NAME, filePrintStream);
        fileLog.setLevel(logLevel);
        log.addLog(fileLog);
        boolean deploy = Arrays.asList(args).contains("-deploy"); //$NON-NLS-1$
        try {
            new Installer(log, deploy).run();
        } catch (InstallationFatalError e) {
            log.fatal(e.getLocalizedMessage());
            log.fatal(e.getCause().getLocalizedMessage());
            log.trace(e.getLocalizedMessage(), e);
        }
    } catch (FileSystemException e) {
        new PrintStreamLog(Installer.INSTALLER_NAME, System.err).fatal(e.getLocalizedMessage(), e);
        System.exit(1);
    }

    /*
     * TODO-- ssh-ask
     * 
     * Consider using a configuration that doesn't require password-less
     * authentication, but set's it up for the final cluster.
     */
}

From source file:com.openmeap.model.ModelTestUtils.java

/**
 * I used this mainly to generate the DDL.
 * @param argv//from  w  ww  .  j a  v  a2s.c  om
 */
static public void main(String[] argv) {
    //resetTestDb();
    //getPersistenceBean("modelManager");
    if (persistenceBeans == null) {

        System.setProperty("hibernate.show_sql", "true");
        System.setProperty("hibernate.hbm2ddl.auto", "update");
        System.setProperty("hibernate.dialect", "org.hibernate.dialect.MySQLDialect");
        System.setProperty("hibernate.connection.driver_class", "com.mysql.jdbc.Driver");
        System.setProperty("hibernate.connection.url", "jdbc:mysql://localhost:3306/openmeap");
        System.setProperty("hibernate.connection.username", "openmeap");
        System.setProperty("hibernate.connection.password", "password");

        persistenceBeans = new ClassPathXmlApplicationContext(
                new String[] { "/META-INF/persistenceContext.xml", "/META-INF/test/persistenceContext.xml" });
    }
    createModel(null);
}

From source file:com.ariatemplates.seleniumjavarobot.Main.java

public static void main(String[] args) throws Exception {
    SeleniumJavaRobot seleniumJavaRobot = new SeleniumJavaRobot();
    String browser;//from   w  ww .  j a  v a  2 s  . c  om
    seleniumJavaRobot.autoRestart = false;
    if (OS.isFamilyMac()) {
        browser = "safari";
    } else {
        browser = "firefox";
    }
    seleniumJavaRobot.url = "http://localhost:7777/__attester__/slave.html";
    String usageString = String.format(
            "Usage: selenium-java-robot [options]\nOptions:\n  --auto-restart\n  --url <url> [default: %s]\n  --browser <browser> [default: %s, accepted values: %s]\n  -DpropertyName=value",
            seleniumJavaRobot.url, browser, BROWSERS_LIST.toString());
    for (int i = 0, l = args.length; i < l; i++) {
        String curParam = args[i];
        if ("--browser".equalsIgnoreCase(curParam) && i + 1 < l) {
            browser = args[i + 1];
            i++;
        } else if ("--url".equalsIgnoreCase(curParam) && i + 1 < l) {
            seleniumJavaRobot.url = args[i + 1];
            i++;
        } else if ("--auto-restart".equalsIgnoreCase(curParam)) {
            seleniumJavaRobot.autoRestart = true;
        } else if ("--version".equalsIgnoreCase(curParam)) {
            System.out.println(Main.class.getPackage().getImplementationVersion());
            return;
        } else if ("--help".equalsIgnoreCase(curParam)) {
            System.out.println(usageString);
            return;
        } else {
            Matcher matcher = SET_SYSTEM_PROPERTY_REGEXP.matcher(curParam);
            if (matcher.matches()) {
                System.setProperty(matcher.group(1), matcher.group(2));
            } else {
                System.err.println("Unknown command line option: " + curParam);
                System.err.println(usageString);
                return;
            }
        }
    }
    seleniumJavaRobot.robotizedBrowserFactory = LocalRobotizedBrowserFactory
            .createRobotizedWebDriverFactory(browser);
    seleniumJavaRobot.start();
    closeOnStreamEnd(seleniumJavaRobot, System.in);
    closeOnProcessEnd(seleniumJavaRobot);
}

From source file:com.thinkbiganalytics.server.KyloServerApplication.java

public static void main(String[] args) {

    KyloVersion dbVersion = getDatabaseVersion();

    boolean skipUpgrade = KyloVersionUtil.isUpToDate(dbVersion);

    if (!skipUpgrade) {
        boolean upgradeComplete = false;
        do {// w  w  w  . java 2s.c  om
            log.info("Upgrading...");
            System.setProperty(SpringApplication.BANNER_LOCATION_PROPERTY, "upgrade-banner.txt");
            ConfigurableApplicationContext cxt = SpringApplication.run(UpgradeKyloConfig.class);
            KyloUpgrader upgrader = cxt.getBean(KyloUpgrader.class);
            upgradeComplete = upgrader.upgrade();
            cxt.close();
        } while (!upgradeComplete);
        log.info("Upgrading complete");
    } else {
        log.info("Kylo v{} is up to date.  Starting the application.", dbVersion);
    }
    System.setProperty(SpringApplication.BANNER_LOCATION_PROPERTY, "banner.txt");
    SpringApplication.run("classpath:application-context.xml", args);
}

From source file:ee.ria.xroad.asyncsender.AsyncSenderIntegrationTest.java

/**
 * Entry point./* w ww  . java  2  s.  c  o m*/
 * @param args arguments
 * @throws Exception if an error occurs
 */
@SuppressWarnings("unchecked")
public static void main(String[] args) throws Exception {
    System.setProperty(SystemProperties.ASYNC_DB_PATH, "build");

    DummyProxy proxy = new DummyProxy();
    proxy.start();

    // Create Queue 1 -----------------------------------------------------

    final NextAttempt attempts1 = new NextAttempt(getDate(0, 5), getDate(0, 12));

    QueueInfo info1 = mock(QueueInfo.class);
    when(info1.getName()).thenReturn(createServiceId("mockedQueue1"));

    doAnswer(new Answer<Object>() {
        @Override
        public Object answer(InvocationOnMock invocation) throws Throwable {
            return attempts1.getCurrent();
        }
    }).when(info1).getNextAttempt();

    MessageQueue queue1 = mock(MessageQueue.class);
    when(queue1.getQueueInfo()).thenReturn(info1);

    SendingCtx sendingCtx1 = createSendingCtx();
    doAnswer(new Answer<Object>() {
        @Override
        public Object answer(InvocationOnMock invocation) throws Throwable {
            attempts1.next();
            return null;
        }
    }).when(sendingCtx1).success(any(String.class));

    // Create Queue 2 -----------------------------------------------------

    final NextAttempt attempts2 = new NextAttempt(getDate(0, 10));

    QueueInfo info2 = mock(QueueInfo.class);
    when(info2.getName()).thenReturn(createServiceId("mockedQueue2"));

    doAnswer(new Answer<Object>() {
        @Override
        public Object answer(InvocationOnMock invocation) throws Throwable {
            return attempts2.getCurrent();
        }
    }).when(info2).getNextAttempt();

    MessageQueue queue2 = mock(MessageQueue.class);
    when(queue2.getQueueInfo()).thenReturn(info2);

    SendingCtx sendingCtx2 = createSendingCtx();
    doAnswer(new Answer<Object>() {
        @Override
        public Object answer(InvocationOnMock invocation) throws Throwable {
            attempts2.next();
            return null;
        }
    }).when(sendingCtx2).success(any(String.class));

    when(queue1.startSending()).thenReturn(sendingCtx1);
    when(queue2.startSending()).thenReturn(sendingCtx2);

    List<MessageQueue> queues = Arrays.asList(queue1, queue2);

    AsyncSender sender = Mockito.mock(AsyncSender.class);
    when(sender.getMessageQueues()).thenReturn(queues, new ArrayList<MessageQueue>());

    sender.startUp(true);

    proxy.stop();
    proxy.join();
}

From source file:com.thesmartweb.swebrank.Main.java

/**
 * @param args the command line arguments
 *//*from  www.j a v a2 s . c  o m*/
public static void main(String[] args) {
    Path input_path = Paths.get("//mnt//var//DBs//inputsL10//nba//");//input directory
    String output_parent_directory = "//mnt//var//DBs//outputsConfL10//nba//";//output directory
    String config_path = "//mnt//var//DBs//config//";//input directory
    //---Disable apache log manually----
    //System.setProperty("org.apache.commons.logging.Log","org.apache.commons.logging.impl.NoOpLog");
    System.setProperty("org.apache.commons.logging.Log", "org.apache.commons.logging.impl.Log4JLogger");
    //--------------Domain that is searched----------
    String domain = "";
    //------------------search engine related options----------------------
    List<String> queries = null;
    int results_number = 0;//the number of results that are returned from each search engine
    List<Boolean> enginechoice = null;
    //list element #0. True/False Bing
    //list element #1. True/False Google
    //list element #2. True/False Yahoo!
    //list element #3. True/False Merged
    //-----------Moz options---------------------
    List<Boolean> mozMetrics = null;
    //The list is going to contain the moz related input in the following order
    //list element #1. True/False, True we use Moz API, false not
    //list element #2. True if we use Domain Authority
    //list element #3. True if we use External MozRank
    //list element #4. True if we use MozRank
    //list element #5. True if we use MozTrust
    //list element #6. True if we use Subdomain MozRank
    //list element #7. True if we use Page Authority
    //only one is used (the first to be set to true)
    boolean moz_threshold_option = false;//set to true we use the threshold
    Double moz_threshold = 0.0;//if we want to have a threshold in moz
    int top_count_moz = 0;//if we want to get the moz top-something results
    //---------------Semantic Analysis method----------------
    List<Boolean> ContentSemantics = null;
    int SensebotConcepts = 0;//define the amount of concepts that sensebot is going to recognize
    List<Double> SWebRankSettings = null;
    //------(string)directory is going to be used later-----
    String output_child_directory;
    //-------we get all the paths of the txt (input) files from the input directory-------
    DataManipulation getfiles = new DataManipulation();//class responsible for the extraction of paths
    Collection<File> inputs_files;//array to include the paths of the txt files
    inputs_files = getfiles.getinputfiles(input_path.toString(), "txt");//method to retrieve all the path of the input documents
    //------------read the txt files------------
    for (File input : inputs_files) {
        ReadInput ri = new ReadInput();//function to read the input
        boolean check_reading_input = ri.perform(input);
        if (check_reading_input) {
            domain = ri.domain;
            //----------
            queries = ri.queries;
            results_number = ri.results_number;
            enginechoice = ri.enginechoice;
            //------------
            mozMetrics = ri.mozMetrics;
            moz_threshold_option = ri.moz_threshold_option;
            moz_threshold = ri.moz_threshold.doubleValue();
            //---------------
            ContentSemantics = ri.ContentSemantics;
            SWebRankSettings = ri.SWebRankSettings;
        }
        int top_visible = 0;//option to set the amount of results you can get in the merged search engine
        //------if we choose to use a Moz metric or Visibility score for our ranking, we need to set the results_number for the search engines to its max which is 50 
        //-----we set the top results number for moz or Visibility rank----
        if (mozMetrics.get(0) || enginechoice.get(3)) {
            if (mozMetrics.get(0)) {
                top_count_moz = results_number;
            } //if moz is true, top_count_moz gets the value of result number
            if (enginechoice.get(3)) {
                top_visible = results_number;
            } //if merged engine is true, top_visible gets the value of result number
            results_number = 50;//this is the max amount of results that you can get from the search engine APIs
        }
        //-----if we want to use Moz we should check first if it works
        if (mozMetrics.get(0)) {
            Moz Moz = new Moz();
            //---if it works, moz remains true, otherwise it is set to false
            mozMetrics.add(0, Moz.check(config_path));
            //if it is false and we have chosen to use Visibility score with Moz, we reset back to the standard settings (ranking and not merged)
            //therefore, we reset the number of results from 50 to the top_count_moz which contained the original number of results
            if (!mozMetrics.get(0)) {
                if (!enginechoice.get(3)) {
                    results_number = top_count_moz;
                }
            }
        }
        //----------we set the wordLists that we are going to use---------------------
        List<String> finalList = new ArrayList<String>();//finalList is going to contain all the content in the end
        Total_analysis ta = new Total_analysis();//we call total analysis
        int iteration_counter = 0;//the iteration_counter is used in order to count the number of iterations of the algorithm and to be checked with perf_limit
        //this list of arraylists  is going to contain all the wordLists that are produced for every term of the String[] query,
        //in order to calculate the NGD scores between every term of the wordList and the term that was used as query in order to produce the spesific wordList
        List<ArrayList<String>> array_wordLists = new ArrayList<>();
        List<String> wordList_previous = new ArrayList<>();
        List<String> wordList_new = new ArrayList<>();
        double convergence = 0;//we create the convergence percentage and initialize it
        String conv_percentages = "";//string that contains all the convergence percentages
        DataManipulation wordsmanipulation = new DataManipulation();//method to manipulate various word data (String, list<String>, etc)
        do { //if we run the algorithm for the 1st time we already have the query so we skip the loop below that produces the new array of query
            if (iteration_counter != 0) {
                wordList_previous = wordList_new;
                //we add the previous wordList to the finalList
                finalList = wordsmanipulation.AddAList(wordList_previous, finalList);
                List<String> query_new_list_total = new ArrayList<>();
                int iteration_previous = iteration_counter - 1;
                Combinations_Engine cn = new Combinations_Engine();//call the class to combine the terms produced
                for (String query : queries) {
                    List<String> ids = new ArrayList<>();
                    if (enginechoice.get(0)) {
                        String id = domain + "/" + query + "/bing" + "/" + iteration_previous;
                        ids.add(id);
                    }
                    if (enginechoice.get(1)) {
                        String id = domain + "/" + query + "/google" + "/" + iteration_previous;
                        ids.add(id);
                    }
                    if (enginechoice.get(2)) {
                        String id = domain + "/" + query + "/yahoo" + "/" + iteration_previous;
                        ids.add(id);
                    }
                    ElasticGetWordList ESget = new ElasticGetWordList();//we call this class to get the wordlist from the Elastic Search
                    List<String> maxWords = ESget.getMaxWords(ids, SWebRankSettings.get(9).intValue(),
                            config_path);//we are going to get a max amount of words
                    int query_index = queries.indexOf(query);
                    int size_query_new = SWebRankSettings.get(10).intValue();//the amount of new queries we are willing to create
                    //we create the new queries for every query of the previous round by combining the words produced from this query
                    List<String> query_new_list = cn.perform(maxWords, SWebRankSettings.get(7), queries,
                            SWebRankSettings.get(6), query_index, size_query_new, config_path);
                    //we add the list of new queries to the total list that containas all the new queries
                    query_new_list_total.addAll(query_new_list);
                    System.out.println("query pointer=" + query_index + "");
                }
                //---------------------the following cleans a list from null and duplicates
                query_new_list_total = wordsmanipulation.clearListString(query_new_list_total);
                //--------------we create the new directory that our files are going to be saved 
                String txt_directory = FilenameUtils.getBaseName(input.getName());
                output_child_directory = output_parent_directory + txt_directory + "_level_" + iteration_counter
                        + "//";
                //----------------append the wordlist to a file------------------
                wordsmanipulation.AppendWordList(query_new_list_total,
                        output_child_directory + "queries_" + iteration_counter + ".txt");
                if (query_new_list_total.size() < 1) {
                    break;
                } //if we don't create new queries we end the while loop
                //total analysis' function is going to do all the work and return back what we need
                ta = new Total_analysis();
                ta.perform(wordList_previous, iteration_counter, output_child_directory, domain, enginechoice,
                        query_new_list_total, results_number, top_visible, mozMetrics, moz_threshold_option,
                        moz_threshold.doubleValue(), top_count_moz, ContentSemantics, SensebotConcepts,
                        SWebRankSettings, config_path);
                //we get the array of wordlists
                array_wordLists = ta.getarray_wordLists();
                //get the wordlist that includes all the new queries
                wordList_new = ta.getwordList_total();
                //---------------------the following cleans a list from null and duplicates-------------
                wordList_new = wordsmanipulation.clearListString(wordList_new);
                //----------------append the wordlist to a file--------------------
                wordsmanipulation.AppendWordList(wordList_new, output_child_directory + "wordList.txt");
                //the concergence percentage of this iteration
                convergence = ta.getConvergence();//we are going to use convergence score to check the convergence
                //a string that contains all the convergence percentage for each round separated by \n character
                conv_percentages = conv_percentages + "\n" + convergence;
                //a file that is going to include the convergence percentages
                wordsmanipulation.AppendString(conv_percentages,
                        output_child_directory + "convergence_percentage.txt");
                //we add the new wordList to the finalList
                finalList = wordsmanipulation.AddAList(wordList_new, finalList);
                //we set the query array to be equal to the query new total that we have created
                queries = query_new_list_total;
                //we increment the iteration_counter in order to count the iterations of the algorithm and to use the perf_limit
                iteration_counter++;
            } else {//the following source code is performed on the 1st run of the loop
                    //------------we extract the parent path of the file 
                String txt_directory = FilenameUtils.getBaseName(input.getName());
                //----------we create a string that is going to be used for the corresponding directory of outputs
                output_child_directory = output_parent_directory + txt_directory + "_level_" + iteration_counter
                        + "//";
                //we call total analysis function performOld
                ta.perform(wordList_new, iteration_counter, output_child_directory, domain, enginechoice,
                        queries, results_number, top_visible, mozMetrics, moz_threshold_option,
                        moz_threshold.doubleValue(), top_count_moz, ContentSemantics, SensebotConcepts,
                        SWebRankSettings, config_path);
                //we get the array of wordlists
                array_wordLists = ta.getarray_wordLists();
                //get the wordlist that includes all the new queries
                wordList_new = ta.getwordList_total();
                //---------------------the following cleans a list from null and duplicates
                wordList_new = wordsmanipulation.clearListString(wordList_new);
                //----------------append the wordlist to a file
                wordsmanipulation.AppendWordList(wordList_new, output_child_directory + "wordList.txt");
                //-----------------------------------------
                iteration_counter++;//increase the iteration_counter that counts the iterations of the algorithm
            }
        } while (convergence < SWebRankSettings.get(5).doubleValue()
                && iteration_counter < SWebRankSettings.get(8).intValue());//while the convergence percentage is below the limit and the iteration_counter below the performance limit
        if (iteration_counter == 1) {
            finalList = wordsmanipulation.AddAList(wordList_new, finalList);
        }
        //--------------------content List----------------
        if (!finalList.isEmpty()) {
            //---------------------the following cleans the final list from null and duplicates
            finalList = wordsmanipulation.clearListString(finalList);
            //write the keywords to a file
            boolean flag_file = false;//boolean flag to declare successful write to file
            flag_file = wordsmanipulation.AppendWordList(finalList,
                    output_parent_directory + "total_content.txt");
            if (!flag_file) {
                System.out.print("can not create the content file for: " + output_parent_directory
                        + "total_content.txt");
            }
        }
        //we are going to save the total content with its convergence on the ElasticSearch cluster in a separated index
        //Node node = nodeBuilder().client(true).clusterName("lshrankldacluster").node();
        //Client client = node.client();
        //get the elastic search indexes in a list
        List<String> elasticIndexes = ri.GetKeyFile(config_path, "elasticSearchIndexes");
        Settings settings = ImmutableSettings.settingsBuilder().put("cluster.name", "lshrankldacluster")
                .build();
        Client client = new TransportClient(settings)
                .addTransportAddress(new InetSocketTransportAddress("localhost", 9300));
        JSONObject objEngineLevel = new JSONObject();
        objEngineLevel.put("TotalContent", finalList);//we save the total content
        objEngineLevel.put("Convergences", conv_percentages);//we save the convergence percentages
        IndexRequest indexReq = new IndexRequest(elasticIndexes.get(0), "content", domain);//we save also the domain 
        indexReq.source(objEngineLevel);
        IndexResponse indexRes = client.index(indexReq).actionGet();
        //node.close();
        client.close();
        //----------------------convergence percentages writing to file---------------
        //use the conv_percentages string
        if (conv_percentages.length() != 0) {
            boolean flag_file = false;//boolean flag to declare successful write to file
            flag_file = wordsmanipulation.AppendString(conv_percentages,
                    output_parent_directory + "convergence_percentages.txt");
            if (!flag_file) {
                System.out.print("can not create the convergence file for: " + output_parent_directory
                        + "convergence_percentages.txt");
            }
        }
    }
}

From source file:com.hortonworks.atlas.trash.DemoClass.java

public static void main(String[] args) throws Exception {
    // TODO Auto-generated method stub

    if (args.length < 1) {
        throw new Exception("Please provide the DGI host url");
    }/* ww  w  .j  ava  2 s  .com*/

    System.setProperty("atlas.conf", "/Users/sdutta/Applications/conf");

    String baseUrl = getServerUrl(args);

    DemoClass dc = new DemoClass(baseUrl);
    dc.createTypes();

    // Shows how to create types in Atlas for your meta model
    dc.createTypes();

    // Shows how to create entities (instances) for the added types in Atlas
    dc.createEntities();

    // Shows some search queries using DSL based on types
    //dc.search();

}

From source file:it.tizianofagni.sparkboost.BoostClassifierExe.java

public static void main(String[] args) {

    Options options = new Options();
    options.addOption("b", "binaryProblem", false,
            "Indicate if the input dataset contains a binary problem and not a multilabel one");
    options.addOption("z", "labels0based", false,
            "Indicate if the labels IDs in the dataset to classifyLibSvmWithResults are already assigned in the range [0, numLabels-1] included");
    options.addOption("l", "enableSparkLogging", false, "Enable logging messages of Spark");
    options.addOption("w", "windowsLocalModeFix", true,
            "Set the directory containing the winutils.exe command");
    options.addOption("p", "parallelismDegree", true,
            "Set the parallelism degree (default: number of available cores in the Spark runtime");

    CommandLineParser parser = new BasicParser();
    CommandLine cmd = null;/*  ww w .  j  av  a 2s  .  com*/
    String[] remainingArgs = null;
    try {
        cmd = parser.parse(options, args);
        remainingArgs = cmd.getArgs();
        if (remainingArgs.length != 3)
            throw new ParseException("You need to specify all mandatory parameters");
    } catch (ParseException e) {
        System.out.println("Parsing failed.  Reason: " + e.getMessage());
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp(
                BoostClassifierExe.class.getSimpleName() + " [OPTIONS] <inputFile> <inputModel> <outputFile>",
                options);
        System.exit(-1);
    }

    boolean binaryProblem = false;
    if (cmd.hasOption("b"))
        binaryProblem = true;
    boolean labels0Based = false;
    if (cmd.hasOption("z"))
        labels0Based = true;
    boolean enablingSparkLogging = false;
    if (cmd.hasOption("l"))
        enablingSparkLogging = true;

    if (cmd.hasOption("w")) {
        System.setProperty("hadoop.home.dir", cmd.getOptionValue("w"));
    }

    String inputFile = remainingArgs[0];
    String inputModel = remainingArgs[1];
    String outputFile = remainingArgs[2];

    long startTime = System.currentTimeMillis();

    // Disable Spark logging.
    if (!enablingSparkLogging) {
        Logger.getLogger("org").setLevel(Level.OFF);
        Logger.getLogger("akka").setLevel(Level.OFF);
    }

    // Create and configure Spark context.
    SparkConf conf = new SparkConf().setAppName("Spark MPBoost classifier");
    JavaSparkContext sc = new JavaSparkContext(conf);

    // Load boosting classifier from disk.
    BoostClassifier classifier = DataUtils.loadModel(sc, inputModel);

    // Get the parallelism degree.
    int parallelismDegree = sc.defaultParallelism();
    if (cmd.hasOption("p")) {
        parallelismDegree = Integer.parseInt(cmd.getOptionValue("p"));
    }

    // Classify documents available on specified input file.
    classifier.classifyLibSvm(sc, inputFile, parallelismDegree, labels0Based, binaryProblem, outputFile);
    long endTime = System.currentTimeMillis();
    System.out.println("Execution time: " + (endTime - startTime) + " milliseconds.");
}

From source file:com.kurtraschke.wmata.gtfsrealtime.WMATARealtimeMain.java

public static void main(String[] args) throws Exception {
    System.setProperty("net.sf.ehcache.enableShutdownHook", "true");
    WMATARealtimeMain m = new WMATARealtimeMain();
    try {//from   w w w.j  a v a 2 s  .c  o m
        m.run(args);
    } catch (CreationException | ConfigurationException | ProvisionException e) {
        _log.error("Error in startup:", e);
        System.exit(-1);
    }
}