List of usage examples for org.apache.commons.cli CommandLineParser parse
CommandLine parse(Options options, String[] arguments) throws ParseException;
From source file:de.bamamoto.mactools.png2icns.Scaler.java
public static void main(String[] args) { Options options = new Options(); options.addOption("i", "input-filename", true, "Filename ofthe image containing the icon. The image should be a square with at least 1024x124 pixel in PNG format."); options.addOption("o", "iconset-foldername", true, "Name of the folder where the iconset will be stored. The extension .iconset will be added automatically."); String folderName;/*w ww . j av a 2s.c om*/ CommandLineParser parser = new DefaultParser(); CommandLine cmd = null; try { cmd = parser.parse(options, args); if (cmd.hasOption("i")) { if (new File(cmd.getOptionValue("i")).isFile()) { if (cmd.hasOption("o")) { folderName = cmd.getOptionValue("o"); } else { folderName = "/tmp/noname.iconset"; } if (!folderName.endsWith(".iconset")) { folderName = folderName + ".iconset"; } new File(folderName).mkdirs(); BufferedImage source = ImageIO.read(new File(cmd.getOptionValue("i"))); BufferedImage resized = resize(source, 1024, 1024); save(resized, folderName + "/icon_512x512@2x.png"); resized = resize(source, 512, 512); save(resized, folderName + "/icon_512x512.png"); save(resized, folderName + "/icon_256x256@2x.png"); resized = resize(source, 256, 256); save(resized, folderName + "/icon_256x256.png"); save(resized, folderName + "/icon_128x128@2x.png"); resized = resize(source, 128, 128); save(resized, folderName + "/icon_128x128.png"); resized = resize(source, 64, 64); save(resized, folderName + "/icon_32x32@2x.png"); resized = resize(source, 32, 32); save(resized, folderName + "/icon_32x32.png"); save(resized, folderName + "/icon_16x16@2x.png"); resized = resize(source, 16, 16); save(resized, folderName + "/icon_16x16.png"); Scaler.runProcess(new String[] { "/usr/bin/iconutil", "-c", "icns", folderName }); } } } catch (IOException e) { System.out.println("Error reading image: " + cmd.getOptionValue("i")); e.printStackTrace(); } catch (ParseException ex) { Logger.getLogger(Scaler.class.getName()).log(Level.SEVERE, null, ex); } }
From source file:eu.scape_project.arc2warc.Arc2WarcHadoopJob.java
/** * Main entry point.//from w ww. ja v a 2 s . c o m * * @param args * @throws Exception */ public static void main(String[] args) throws Exception { Configuration conf = new Configuration(); // Command line interface config = new CliConfig(); CommandLineParser cmdParser = new PosixParser(); GenericOptionsParser gop = new GenericOptionsParser(conf, args); CommandLine cmd = cmdParser.parse(Options.OPTIONS, gop.getRemainingArgs()); if ((args.length == 0) || (cmd.hasOption(Options.HELP_OPT))) { Options.exit("Usage", 0); } else { Options.initOptions(cmd, config); } startHadoopJob(conf); }
From source file:net.sourceforge.dita4publishers.tools.ditadxpmappackager.DitaDxpMapPackager.java
/** * @param args//from w w w . j a va 2s . co m */ public static void main(String[] args) { Options cmdlineOptions = configureOptions(); CommandLineParser parser = new PosixParser(); CommandLine cmdline = null; try { // parse the command line arguments cmdline = parser.parse(cmdlineOptions, args); } catch (ParseException exp) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp(DitaDxpMapPackager.class.getSimpleName(), cmdlineOptions); System.exit(-1); } if (!cmdline.hasOption(INPUT_OPTION_ONE_CHAR)) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp(DitaDxpMapPackager.class.getSimpleName(), cmdlineOptions); System.exit(-1); } DitaDxpMapPackager app = new DitaDxpMapPackager(cmdline); try { app.run(); } catch (Exception e) { e.printStackTrace(); System.exit(1); } }
From source file:junkProducer.java
public static void main(String[] args) throws Exception { // read command line args for a connection to Kafka CommandLineParser parser = new BasicParser(); Options options = getCommonRequiredOptions(); CommandLine cmd = parser.parse(options, args); // create the producer and consumer KafkaDataStore objects Map<String, String> dsConf = getKafkaDataStoreConf(cmd); // dsConf.put("isProducer", "true"); DataStore producerDS = DataStoreFinder.getDataStore(dsConf); // verify that we got back our KafkaDataStore objects properly if (producerDS == null) { throw new Exception("Null producer KafkaDataStore"); }// w ww . ja v a 2s .c o m // create the schema which creates a topic in Kafka // (only needs to be done once) final String sftName = "junk"; final String sftSchema = "trainStatus:String,trainCode:String,publicMessage:String,direction:String,dtg:Date,*geom:Point:srid=4326"; SimpleFeatureType sft = SimpleFeatureTypes.createType(sftName, sftSchema); // set zkPath to default if not specified String zkPath = (dsConf.get(ZK_PATH) == null) ? "/geomesa/ds/kafka" : dsConf.get(ZK_PATH); SimpleFeatureType preppedOutputSft = KafkaDataStoreHelper.createStreamingSFT(sft, zkPath); // only create the schema if it hasn't been created already if (!Arrays.asList(producerDS.getTypeNames()).contains(sftName)) producerDS.createSchema(preppedOutputSft); // the live consumer must be created before the producer writes features // in order to read streaming data. // i.e. the live consumer will only read data written after its instantiation SimpleFeatureStore producerFS = (SimpleFeatureStore) producerDS.getFeatureSource(sftName); // creates and adds SimpleFeatures to the producer on an interval System.out.println("Writing features to Kafka... refresh GeoServer layer preview to see changes"); addSimpleFeatures(sft, producerFS); System.exit(0); }
From source file:com.ardoq.mavenImport.ArdoqMavenImport.java
@SuppressWarnings("unchecked") public static void main(String[] args) throws Exception { Options options = initOptions();/*from w ww .j av a2 s .co m*/ CommandLine cmd; try { CommandLineParser parser = new BasicParser(); cmd = parser.parse(options, args); if (cmd.hasOption("help")) { printHelp(options); return; } if (cmd.getArgList().isEmpty()) { System.out.println( "One or more Maven artifact IDs required. For instance: 'io.dropwizard:dropwizard-core:0.8.1'"); return; } String host = cmd.getOptionValue("h", "https://app.ardoq.com"); String token = cmd.getOptionValue("t"); String org = cmd.getOptionValue("o", "ardoq"); String workspace = cmd.getOptionValue("w"); List<String> importList = cmd.getArgList(); ArdoqMavenImport ardoqMavenImport = new ArdoqMavenImport(host, workspace, org, token); MavenUtil mavenUtil = new MavenUtil(System.out, "test", "provided"); if (cmd.hasOption("r")) { String extrarepo = cmd.getOptionValue("r"); if (cmd.hasOption("u") && cmd.hasOption("p")) { String username = cmd.getOptionValue("u"); String password = cmd.getOptionValue("p"); mavenUtil.addRepository(extrarepo, username, password); } else { mavenUtil.addRepository(extrarepo); } } ardoqMavenImport.startImport(importList, mavenUtil); } catch (MissingOptionException moe) { printHelp(options); } }
From source file:com.github.trohovsky.jira.analyzer.Main.java
public static void main(String[] args) throws Exception { final Options options = new Options(); options.addOption("u", true, "username"); options.addOption("p", true, "password (optional, if not provided, the password is prompted)"); options.addOption("h", false, "show this help"); options.addOption("s", true, "use the strategy for querying and output, the strategy can be either 'issues_toatal' (default) or" + " 'per_month'"); options.addOption("d", true, "CSV delimiter"); // parsing of the command line arguments final CommandLineParser parser = new DefaultParser(); CommandLine cmdLine = null;/*from w w w . ja va 2 s. co m*/ try { cmdLine = parser.parse(options, args); if (cmdLine.hasOption('h') || cmdLine.getArgs().length == 0) { final HelpFormatter formatter = new HelpFormatter(); formatter.setOptionComparator(null); formatter.printHelp(HELP_CMDLINE, HELP_HEADER, options, null); return; } if (cmdLine.getArgs().length != 3) { throw new ParseException("You should specify exactly three arguments JIRA_SERVER JQL_QUERY_TEMPLATE" + " PATH_TO_PARAMETER_FILE"); } } catch (ParseException e) { System.err.println("Error parsing command line: " + e.getMessage()); final HelpFormatter formatter = new HelpFormatter(); formatter.printHelp(HELP_CMDLINE, HELP_HEADER, options, null); return; } final String csvDelimiter = (String) (cmdLine.getOptionValue('d') != null ? cmdLine.getOptionObject('d') : CSV_DELIMITER); final URI jiraServerUri = URI.create(cmdLine.getArgs()[0]); final String jqlQueryTemplate = cmdLine.getArgs()[1]; final List<List<String>> queryParametersData = readCSVFile(cmdLine.getArgs()[2], csvDelimiter); final String username = cmdLine.getOptionValue("u"); String password = cmdLine.getOptionValue("p"); final String strategy = cmdLine.getOptionValue("s"); try { // initialization of the REST client final AsynchronousJiraRestClientFactory factory = new AsynchronousJiraRestClientFactory(); if (username != null) { if (password == null) { final Console console = System.console(); final char[] passwordCharacters = console.readPassword("Password: "); password = new String(passwordCharacters); } restClient = factory.createWithBasicHttpAuthentication(jiraServerUri, username, password); } else { restClient = factory.create(jiraServerUri, new AnonymousAuthenticationHandler()); } final SearchRestClient searchRestClient = restClient.getSearchClient(); // choosing of an analyzer strategy AnalyzerStrategy analyzer = null; if (strategy != null) { switch (strategy) { case "issues_total": analyzer = new IssuesTotalStrategy(searchRestClient); break; case "issues_per_month": analyzer = new IssuesPerMonthStrategy(searchRestClient); break; default: System.err.println("The strategy does not exist"); return; } } else { analyzer = new IssuesTotalStrategy(searchRestClient); } // analyzing for (List<String> queryParameters : queryParametersData) { analyzer.analyze(jqlQueryTemplate, queryParameters); } } finally { // destroy the REST client, otherwise it stucks restClient.close(); } }
From source file:com.basistech.ninja.Train.java
/** * Command line interface to train a model. * * <pre>/* w ww. ja va2 s.c o m*/ * usage: Train [options] * --batch-size <arg> batch size (default = 10) * --epochs <arg> epochs (default = 5) * --examples <arg> input examples file (required) * --layer-sizes <arg> layer sizes, including input/output, e.g. 3 4 2 (required) * --learning-rate <arg> learning-rate (default = 0.7) * --model <arg> output model file (required) * </pre> * * @param args command line arguments * @throws IOException */ public static void main(String[] args) throws IOException { String defaultBatchSize = "10"; String deafaultEpochs = "5"; String defaultLearningRate = "0.7"; Options options = new Options(); Option option; option = new Option(null, "examples", true, "input examples file (required)"); option.setRequired(true); options.addOption(option); option = new Option(null, "model", true, "output model file (required)"); option.setRequired(true); options.addOption(option); option = new Option(null, "layer-sizes", true, "layer sizes, including input/output, e.g. 3 4 2 (required)"); option.setRequired(true); option.setArgs(Option.UNLIMITED_VALUES); options.addOption(option); option = new Option(null, "batch-size", true, String.format("batch size (default = %s)", defaultBatchSize)); options.addOption(option); option = new Option(null, "epochs", true, String.format("epochs (default = %s)", deafaultEpochs)); options.addOption(option); option = new Option(null, "learning-rate", true, String.format("learning-rate (default = %s)", defaultLearningRate)); options.addOption(option); CommandLineParser parser = new GnuParser(); CommandLine cmdline = null; try { cmdline = parser.parse(options, args); } catch (org.apache.commons.cli.ParseException e) { System.err.println(e.getMessage()); usage(options); System.exit(1); } String[] remaining = cmdline.getArgs(); if (remaining == null) { usage(options); System.exit(1); } List<Integer> layerSizes = Lists.newArrayList(); for (String s : cmdline.getOptionValues("layer-sizes")) { layerSizes.add(Integer.parseInt(s)); } File examplesFile = new File(cmdline.getOptionValue("examples")); Train that = new Train(layerSizes, examplesFile); int batchSize = Integer.parseInt(cmdline.getOptionValue("batch-size", defaultBatchSize)); int epochs = Integer.parseInt(cmdline.getOptionValue("epochs", deafaultEpochs)); double learningRate = Double.parseDouble(cmdline.getOptionValue("learning-rate", defaultLearningRate)); File modelFile = new File(cmdline.getOptionValue("model")); that.train(batchSize, epochs, learningRate, modelFile); }
From source file:com.act.reachables.ConditionalReachabilityInterpreter.java
public static void main(String[] args) throws Exception { // Parse the command line options Options opts = new Options(); for (Option.Builder b : OPTION_BUILDERS) { opts.addOption(b.build());//from ww w .jav a2 s. co m } CommandLine cl = null; try { CommandLineParser parser = new DefaultParser(); cl = parser.parse(opts, args); } catch (ParseException e) { System.err.format("Argument parsing failed: %s\n", e.getMessage()); HELP_FORMATTER.printHelp(BingSearchRanker.class.getCanonicalName(), HELP_MESSAGE, opts, null, true); System.exit(1); } if (cl.hasOption("help")) { HELP_FORMATTER.printHelp(BingSearchRanker.class.getCanonicalName(), HELP_MESSAGE, opts, null, true); return; } String inputPath = cl.getOptionValue(OPTION_INPUT_ACT_FILEPATH); String outputPath = cl.getOptionValue(OPTION_OUTPUT_FILEPATH); String dbName = cl.getOptionValue(OPTION_DB_NAME); LOGGER.info("Starting to deserialize reachables forest."); ActData.instance().deserialize(inputPath); ActData actData = ActData.instance(); LOGGER.info("Finished deserializing reachables forest."); NoSQLAPI db = new NoSQLAPI(dbName, dbName); ConditionalReachabilityInterpreter conditionalReachabilityInterpreter = new ConditionalReachabilityInterpreter( actData, db); conditionalReachabilityInterpreter.run(outputPath); }
From source file:com.example.geomesa.authorizations.AuthorizationsTutorial.java
/** * Main entry point. Executes queries against an existing GDELT dataset. * * @param args/*from w w w.j a va 2s . c o m*/ * * @throws Exception */ public static void main(String[] args) throws Exception { // read command line options - this contains the connection to accumulo and the table to query CommandLineParser parser = new BasicParser(); Options options = SetupUtil.getGeomesaDataStoreOptions(); CommandLine cmd = parser.parse(options, args); // verify that we can see this Accumulo destination in a GeoTools manner Map<String, String> dsConf = SetupUtil.getAccumuloDataStoreConf(cmd); // get an instance of the data store that uses the default authorizations provider, which will use whatever auths the connector has available System.setProperty(AuthorizationsProvider.AUTH_PROVIDER_SYS_PROPERTY, DefaultAuthorizationsProvider.class.getName()); DataStore authDataStore = DataStoreFinder.getDataStore(dsConf); assert authDataStore != null; // get another instance of the data store that uses our authorizations provider that always returns empty auths System.setProperty(AuthorizationsProvider.AUTH_PROVIDER_SYS_PROPERTY, EmptyAuthorizationsProvider.class.getName()); DataStore noAuthDataStore = DataStoreFinder.getDataStore(dsConf); // create the simple feature type for our test String simpleFeatureTypeName = cmd.getOptionValue(SetupUtil.FEATURE_NAME); SimpleFeatureType simpleFeatureType = GdeltFeature.buildGdeltFeatureType(simpleFeatureTypeName); // execute the query, with and without visibilities System.out.println("\nExecuting query with AUTHORIZED data store: auths are '" + ((AccumuloDataStore) authDataStore).config().authProvider().getAuthorizations() + "'"); executeQuery(simpleFeatureTypeName, authDataStore); System.out.println("Executing query with UNAUTHORIZED data store: auths are '" + ((AccumuloDataStore) noAuthDataStore).config().authProvider().getAuthorizations() + "'"); executeQuery(simpleFeatureTypeName, noAuthDataStore); }
From source file:geomesa.tutorial.AuthorizationsTutorial.java
/** * Main entry point. Executes queries against an existing GDELT dataset. * * @param args/* ww w . j a va 2 s.c o m*/ * * @throws Exception */ public static void main(String[] args) throws Exception { // read command line options - this contains the connection to accumulo and the table to query CommandLineParser parser = new BasicParser(); Options options = SetupUtil.getGeomesaDataStoreOptions(); CommandLine cmd = parser.parse(options, args); // verify that we can see this Accumulo destination in a GeoTools manner Map<String, String> dsConf = SetupUtil.getAccumuloDataStoreConf(cmd); // get an instance of the data store that uses the default authorizations provider, which will use whatever auths the connector has available System.setProperty(AuthorizationsProvider.AUTH_PROVIDER_SYS_PROPERTY, DefaultAuthorizationsProvider.class.getName()); DataStore authDataStore = DataStoreFinder.getDataStore(dsConf); assert authDataStore != null; // get another instance of the data store that uses our authorizations provider that always returns empty auths System.setProperty(AuthorizationsProvider.AUTH_PROVIDER_SYS_PROPERTY, EmptyAuthorizationsProvider.class.getName()); DataStore noAuthDataStore = DataStoreFinder.getDataStore(dsConf); // create the simple feature type for our test String simpleFeatureTypeName = cmd.getOptionValue(SetupUtil.FEATURE_NAME); SimpleFeatureType simpleFeatureType = GdeltFeature.buildGdeltFeatureType(simpleFeatureTypeName); // execute the query, with and without visibilities System.out.println("\nExecuting query with AUTHORIZED data store: auths are '" + ((AccumuloDataStore) authDataStore).authorizationsProvider().getAuthorizations() + "'"); executeQuery(simpleFeatureTypeName, authDataStore); System.out.println("Executing query with UNAUTHORIZED data store: auths are '" + ((AccumuloDataStore) noAuthDataStore).authorizationsProvider().getAuthorizations() + "'"); executeQuery(simpleFeatureTypeName, noAuthDataStore); }