List of usage examples for org.apache.commons.cli PosixParser PosixParser
PosixParser
From source file:fr.inria.atlanmod.kyanos.benchmarks.KyanosMapCreator.java
public static void main(String[] args) { Options options = new Options(); Option inputOpt = OptionBuilder.create(IN); inputOpt.setArgName("INPUT"); inputOpt.setDescription("Input file"); inputOpt.setArgs(1);/*from w w w . ja v a 2 s.c om*/ inputOpt.setRequired(true); Option outputOpt = OptionBuilder.create(OUT); outputOpt.setArgName("OUTPUT"); outputOpt.setDescription("Output directory"); outputOpt.setArgs(1); outputOpt.setRequired(true); Option inClassOpt = OptionBuilder.create(EPACKAGE_CLASS); inClassOpt.setArgName("CLASS"); inClassOpt.setDescription("FQN of EPackage implementation class"); inClassOpt.setArgs(1); inClassOpt.setRequired(true); options.addOption(inputOpt); options.addOption(outputOpt); options.addOption(inClassOpt); CommandLineParser parser = new PosixParser(); try { PersistenceBackendFactoryRegistry.getFactories().put(NeoMapURI.NEO_MAP_SCHEME, new MapPersistenceBackendFactory()); CommandLine commandLine = parser.parse(options, args); URI sourceUri = URI.createFileURI(commandLine.getOptionValue(IN)); URI targetUri = NeoMapURI.createNeoMapURI(new File(commandLine.getOptionValue(OUT))); Class<?> inClazz = KyanosMapCreator.class.getClassLoader() .loadClass(commandLine.getOptionValue(EPACKAGE_CLASS)); inClazz.getMethod("init").invoke(null); ResourceSet resourceSet = new ResourceSetImpl(); resourceSet.getResourceFactoryRegistry().getExtensionToFactoryMap().put("xmi", new XMIResourceFactoryImpl()); resourceSet.getResourceFactoryRegistry().getExtensionToFactoryMap().put("zxmi", new XMIResourceFactoryImpl()); resourceSet.getResourceFactoryRegistry().getProtocolToFactoryMap().put(NeoMapURI.NEO_MAP_SCHEME, PersistentResourceFactory.eINSTANCE); Resource sourceResource = resourceSet.createResource(sourceUri); Map<String, Object> loadOpts = new HashMap<String, Object>(); if ("zxmi".equals(sourceUri.fileExtension())) { loadOpts.put(XMIResource.OPTION_ZIP, Boolean.TRUE); } Runtime.getRuntime().gc(); long initialUsedMemory = Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory(); LOG.log(Level.INFO, MessageFormat.format("Used memory before loading: {0}", MessageUtil.byteCountToDisplaySize(initialUsedMemory))); LOG.log(Level.INFO, "Loading source resource"); sourceResource.load(loadOpts); LOG.log(Level.INFO, "Source resource loaded"); Runtime.getRuntime().gc(); long finalUsedMemory = Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory(); LOG.log(Level.INFO, MessageFormat.format("Used memory after loading: {0}", MessageUtil.byteCountToDisplaySize(finalUsedMemory))); LOG.log(Level.INFO, MessageFormat.format("Memory use increase: {0}", MessageUtil.byteCountToDisplaySize(finalUsedMemory - initialUsedMemory))); Resource targetResource = resourceSet.createResource(targetUri); Map<String, Object> saveOpts = new HashMap<String, Object>(); List<StoreOption> storeOptions = new ArrayList<StoreOption>(); storeOptions.add(MapResourceOptions.EStoreMapOption.AUTOCOMMIT); saveOpts.put(MapResourceOptions.STORE_OPTIONS, storeOptions); targetResource.save(saveOpts); LOG.log(Level.INFO, "Start moving elements"); targetResource.getContents().clear(); targetResource.getContents().addAll(sourceResource.getContents()); LOG.log(Level.INFO, "End moving elements"); LOG.log(Level.INFO, "Start saving"); targetResource.save(saveOpts); LOG.log(Level.INFO, "Saved"); if (targetResource instanceof PersistentResourceImpl) { PersistentResourceImpl.shutdownWithoutUnload((PersistentResourceImpl) targetResource); } else { targetResource.unload(); } } catch (ParseException e) { MessageUtil.showError(e.toString()); MessageUtil.showError("Current arguments: " + Arrays.toString(args)); HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("java -jar <this-file.jar>", options, true); } catch (Throwable e) { MessageUtil.showError(e.toString()); e.printStackTrace(); } }
From source file:com.pinterest.pinlater.client.PinLaterClientTool.java
public static void main(String[] args) { try {/*from w ww . ja v a 2 s . co m*/ CommandLineParser parser = new PosixParser(); CommandLine cmdLine = parser.parse(getOptions(), args); new PinLaterQueryIssuer(cmdLine).run(); } catch (Exception e) { LOG.error("Exception in client tool", e); System.exit(1); } }
From source file:com.aerospike.examples.frequencycap.FrequencyCap.java
public static void main(String[] args) throws AerospikeException { try {/*from w ww . jav a 2 s .c om*/ Options options = new Options(); options.addOption("h", "host", true, "Server hostname (default: 172.28.128.6)"); options.addOption("p", "port", true, "Server port (default: 3000)"); options.addOption("n", "namespace", true, "Namespace (default: test)"); options.addOption("u", "usage", false, "Print usage."); options.addOption("l", "load", false, "Load data."); CommandLineParser parser = new PosixParser(); CommandLine cl = parser.parse(options, args, false); String host = cl.getOptionValue("h", "172.28.128.6"); String portString = cl.getOptionValue("p", "3000"); int port = Integer.parseInt(portString); String namespace = cl.getOptionValue("n", "test"); String set = cl.getOptionValue("s", "demo"); log.debug("Host: " + host); log.debug("Port: " + port); log.debug("Namespace: " + namespace); log.debug("Set: " + set); @SuppressWarnings("unchecked") List<String> cmds = cl.getArgList(); if (cmds.size() == 0 && cl.hasOption("u")) { logUsage(options); return; } FrequencyCap as = new FrequencyCap(host, port, namespace, set); if (cl.hasOption("l")) { as.generateDataDateInKey(); as.generateDataDateInBin(); } else { as.simulateWorkDateInKey(); as.simulateWorkDateInBin(); } } catch (Exception e) { log.error("Critical error", e); } }
From source file:net.iridiant.hdfs.webdav.Main.java
public static void main(String[] args) { HDFSWebDAVServlet servlet = HDFSWebDAVServlet.getServlet(); Configuration conf = servlet.getConfiguration(); // Process command line Options options = new Options(); options.addOption("d", "debug", false, "Enable debug logging"); options.addOption("p", "port", true, "Port to bind to [default: 8080]"); options.addOption("b", "bind-address", true, "Address or hostname to bind to [default: 0.0.0.0]"); options.addOption("g", "ganglia", true, "Send Ganglia metrics to host:port [default: none]"); CommandLine cmd = null;// ww w.j a va2 s .co m try { cmd = new PosixParser().parse(options, args); } catch (ParseException e) { printUsageAndExit(options, -1); } if (cmd.hasOption('d')) { Logger rootLogger = Logger.getLogger("net.iridiant"); rootLogger.setLevel(Level.DEBUG); } if (cmd.hasOption('b')) { conf.set("hadoop.webdav.bind.address", cmd.getOptionValue('b')); } if (cmd.hasOption('p')) { conf.setInt("hadoop.webdav.port", Integer.valueOf(cmd.getOptionValue('p'))); } String gangliaHost = null; int gangliaPort = 8649; if (cmd.hasOption('g')) { String val = cmd.getOptionValue('g'); if (val.indexOf(':') != -1) { String[] split = val.split(":"); gangliaHost = split[0]; gangliaPort = Integer.valueOf(split[1]); } else { gangliaHost = val; } } InetSocketAddress addr = getAddress(conf); // Log in the server principal from keytab UserGroupInformation.setConfiguration(conf); if (UserGroupInformation.isSecurityEnabled()) try { SecurityUtil.login(conf, "hadoop.webdav.server.kerberos.keytab", "hadoop.webdav.server.kerberos.principal", addr.getHostName()); } catch (IOException e) { LOG.fatal("Could not log in", e); System.err.println("Could not log in"); System.exit(-1); } // Set up embedded Jetty Server server = new Server(); server.setSendServerVersion(false); server.setSendDateHeader(false); server.setStopAtShutdown(true); // Set up connector Connector connector = new SelectChannelConnector(); connector.setPort(addr.getPort()); connector.setHost(addr.getHostName()); server.addConnector(connector); LOG.info("Listening on " + addr); // Set up context Context context = new Context(server, "/", Context.SESSIONS); // WebDAV servlet ServletHolder servletHolder = new ServletHolder(servlet); servletHolder.setInitParameter("authenticate-header", "Basic realm=\"Hadoop WebDAV Server\""); context.addServlet(servletHolder, "/*"); // metrics instrumentation filter context.addFilter(new FilterHolder(new DefaultWebappMetricsFilter()), "/*", 0); // auth filter context.addFilter(new FilterHolder(new AuthFilter(conf)), "/*", 0); server.setHandler(context); // Set up Ganglia metrics reporting if (gangliaHost != null) { GangliaReporter.enable(1, TimeUnit.MINUTES, gangliaHost, gangliaPort); } // Start and join the server thread try { server.start(); server.join(); } catch (Exception e) { LOG.fatal("Failed to start Jetty", e); System.err.println("Failed to start Jetty"); System.exit(-1); } }
From source file:CountandraServer.java
public static void main(String args[]) { try {/*from ww w.j a v a 2s . c o m*/ System.out.println(args[0]); CommandLineParser parser = new PosixParser(); CommandLine line = parser.parse(options, args); if (line.hasOption("cassandrahostip")) { CountandraUtils.setCassandraHostIp(line.getOptionValue("cassandrahostip")); if (line.hasOption("consistencylevel")) { if (line.hasOption("replicationfactor")) { CassandraStorage.setGlobalParams(line.getOptionValue("cassandrahostip"), line.getOptionValue("consistencylevel")); CassandraDB.setGlobalParams(line.getOptionValue("cassandrahostip"), line.getOptionValue("replicationfactor")); } else { CassandraStorage.setGlobalParams(line.getOptionValue("cassandrahostip"), line.getOptionValue("consistencylevel")); CassandraDB.setGlobalParams(line.getOptionValue("cassandrahostip")); } } else { // no consistency level -- assumed to be ONE if (line.hasOption("replicationfactor")) { CassandraStorage.setGlobalParams(line.getOptionValue("cassandrahostip")); CassandraDB.setGlobalParams(line.getOptionValue("cassandrahostip"), line.getOptionValue("replicationfactor")); } else { CassandraStorage.setGlobalParams(line.getOptionValue("cassandrahostip")); CassandraDB.setGlobalParams(line.getOptionValue("cassandrahostip")); } } } else { CassandraStorage.setGlobalParams(cassandraServerForClient); CassandraDB.setGlobalParams(cassandraServerForClient); } if (line.hasOption("s")) { System.out.println("Starting Cassandra"); // cassandra server CassandraUtils.startupCassandraServer(); } if (line.hasOption("i")) { System.out.print("Checking if Cassandra is initialized"); CassandraDB csdb = new CassandraDB(); while (!csdb.isCassandraUp()) { System.out.print("."); } System.out.println("."); System.out.println("Initializing Basic structures"); CountandraUtils.initBasicDataStructures(); System.out.println("Initialized Basic structures"); } if (line.hasOption("h")) { if (line.hasOption("httpserverport")) { httpPort = Integer.parseInt(line.getOptionValue("httpserverport")); } NettyUtils.startupNettyServer(httpPort); System.out.println("Started Http Server"); } if (line.hasOption("k")) { KafkaUtils.startupKafkaConsumer(); System.out.println("Started Kafka Consumer"); } // Unit Tests if (line.hasOption("t")) { try { Thread.sleep(30000); } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } org.junit.runner.JUnitCore.main(CountandraTestCases.class.getName()); } } catch (IOException ioe) { System.out.println(ioe); } catch (Exception e) { System.out.println(e); } }
From source file:com.trendmicro.hdfs.webdav.Main.java
public static void main(String[] args) { HDFSWebDAVServlet servlet = HDFSWebDAVServlet.getServlet(); Configuration conf = servlet.getConfiguration(); // Process command line Options options = new Options(); options.addOption("d", "debug", false, "Enable debug logging"); options.addOption("p", "port", true, "Port to bind to [default: 8080]"); options.addOption("b", "bind-address", true, "Address or hostname to bind to [default: 0.0.0.0]"); options.addOption("g", "ganglia", true, "Send Ganglia metrics to host:port [default: none]"); CommandLine cmd = null;//from w ww.j a va 2s.co m try { cmd = new PosixParser().parse(options, args); } catch (ParseException e) { printUsageAndExit(options, -1); } if (cmd.hasOption('d')) { Logger rootLogger = Logger.getLogger("com.trendmicro"); rootLogger.setLevel(Level.DEBUG); } if (cmd.hasOption('b')) { conf.set("hadoop.webdav.bind.address", cmd.getOptionValue('b')); } if (cmd.hasOption('p')) { conf.setInt("hadoop.webdav.port", Integer.valueOf(cmd.getOptionValue('p'))); } String gangliaHost = null; int gangliaPort = 8649; if (cmd.hasOption('g')) { String val = cmd.getOptionValue('g'); if (val.indexOf(':') != -1) { String[] split = val.split(":"); gangliaHost = split[0]; gangliaPort = Integer.valueOf(split[1]); } else { gangliaHost = val; } } InetSocketAddress addr = getAddress(conf); // Log in the server principal from keytab UserGroupInformation.setConfiguration(conf); if (UserGroupInformation.isSecurityEnabled()) try { SecurityUtil.login(conf, "hadoop.webdav.server.kerberos.keytab", "hadoop.webdav.server.kerberos.principal", addr.getHostName()); } catch (IOException e) { LOG.fatal("Could not log in", e); System.err.println("Could not log in"); System.exit(-1); } // Set up embedded Jetty Server server = new Server(); server.setSendServerVersion(false); server.setSendDateHeader(false); server.setStopAtShutdown(true); // Set up connector Connector connector = new SelectChannelConnector(); connector.setPort(addr.getPort()); connector.setHost(addr.getHostName()); server.addConnector(connector); LOG.info("Listening on " + addr); // Set up context Context context = new Context(server, "/", Context.SESSIONS); // WebDAV servlet ServletHolder servletHolder = new ServletHolder(servlet); servletHolder.setInitParameter("authenticate-header", "Basic realm=\"Hadoop WebDAV Server\""); context.addServlet(servletHolder, "/*"); // metrics instrumentation filter context.addFilter(new FilterHolder(new DefaultWebappMetricsFilter()), "/*", 0); // auth filter context.addFilter(new FilterHolder(new AuthFilter(conf)), "/*", 0); server.setHandler(context); // Set up Ganglia metrics reporting if (gangliaHost != null) { GangliaReporter.enable(1, TimeUnit.MINUTES, gangliaHost, gangliaPort); } // Start and join the server thread try { server.start(); server.join(); } catch (Exception e) { LOG.fatal("Failed to start Jetty", e); System.err.println("Failed to start Jetty"); System.exit(-1); } }
From source file:eu.scape_project.tb.lsdr.seqfileutility.SequenceFileUtility.java
/** * Main method//from ww w . ja v a 2 s.c o m * @param args Command line arguments * @throws Exception */ public static void main(String args[]) throws Exception { int res = 1; Configuration conf = new Configuration(); //conf.set("mapred.max.split.size", "16777216"); //conf.setBoolean("mapreduce.client.genericoptionsparser.used", false); GenericOptionsParser gop = new GenericOptionsParser(conf, args); ProcessParameters pc = new ProcessParameters(); CommandLineParser cmdParser = new PosixParser(); CommandLine cmd = cmdParser.parse(Options.OPTIONS, gop.getRemainingArgs()); if ((args.length == 0) || (cmd.hasOption(Options.HELP_OPT))) { Options.exit("Usage", 0); } else { Options.initOptions(cmd, pc); eu.scape_project.tb.lsdr.seqfileutility.Job j; if (!pc.isHadoopmapmode()) { j = new BatchJob(pc); j.run(); res = 0; } else { HadoopJob hj = new HadoopJob(); hj.setPc(pc); res = ToolRunner.run(conf, hj, args); if (res == 0) System.out.print(pc.getOutputDirectory()); } } System.exit(res); }
From source file:com.github.enr.markdownj.extras.MarkdownApp.java
public static void main(String[] args) { MarkdownApp app = new MarkdownApp(); app.log().debug("Markdown app starting with args: {}", Arrays.toString(args)); CommandLineParser parser = new PosixParser(); Options options = new Options(); options.addOption("s", "source", true, "The source directory for markdown files"); options.addOption("d", "destination", true, "The destination directory for html files"); options.addOption("h", "header", true, "The path to the html header file"); options.addOption("f", "footer", true, "The path to the html footer file"); options.addOption("t", "code-template", true, "The template for code blocks"); options.addOption("e", "extensions", true, "A comma separated list of file extensions to process. If setted, files with extension not in list won't be processed"); options.addOption("c", "char-encoding", true, "The encoding to read and write files"); HelpFormatter formatter = new HelpFormatter(); String helpHeader = String.format("%s", MarkdownApp.class.getName()); try {//from w ww. j av a2s . co m CommandLine line = parser.parse(options, args); app.process(line); } catch (ParseException e) { app.log().warn(e.getMessage(), e); formatter.printHelp(helpHeader, options); } }
From source file:com.aerospike.examples.travel.FlighAggregation.java
public static void main(String[] args) throws AerospikeException { try {/*from w w w .jav a 2 s . co m*/ Options options = new Options(); options.addOption("h", "host", true, "Server hostname (default: 127.0.0.1)"); options.addOption("p", "port", true, "Server port (default: 3000)"); options.addOption("n", "namespace", true, "Namespace (default: test)"); options.addOption("s", "set", true, "Set (default: demo)"); options.addOption("u", "usage", false, "Print usage."); CommandLineParser parser = new PosixParser(); CommandLine cl = parser.parse(options, args, false); String host = cl.getOptionValue("h", "127.0.0.1"); String portString = cl.getOptionValue("p", "3000"); int port = Integer.parseInt(portString); String namespace = cl.getOptionValue("n", "test"); String set = cl.getOptionValue("s", "demo"); log.debug("Host: " + host); log.debug("Port: " + port); log.debug("Namespace: " + namespace); log.debug("Set: " + set); @SuppressWarnings("unchecked") List<String> cmds = cl.getArgList(); if (cmds.size() == 0 && cl.hasOption("u")) { logUsage(options); return; } FlighAggregation as = new FlighAggregation(host, port, namespace, set); as.init(); as.work(); } catch (Exception e) { log.error("Critical error", e); } }
From source file:StompMessagePublisher.java
public static void main(String[] args) throws ParseException { Options options = new Options(); options.addOption("h", true, "Host to connect to"); options.addOption("p", true, "Port to connect to"); options.addOption("u", true, "User name"); options.addOption("P", true, "Password"); options.addOption("d", true, "JMS Destination"); options.addOption("j", true, "JSON to send"); CommandLineParser parser = new PosixParser(); CommandLine cmd = parser.parse(options, args); // String host = "loyjbsms11-public.lolacloud.com"; // String host = "pink.cloudtroopers.ro"; String host = "localhost"; // String host = "pink.cloudtroopers.ro"; String port = "61613"; String user = "guest"; String pass = "P@ssword1"; // String destination = REWARD_POINTS_JMS_DESTINATION; // String json = REWARD_POINTS_JSON_MERGE; String destination = MEMBER_REGISTRATION_JMS_DESTINATION; String json = MEMBER_REGISTRATION_JSON; if (cmd.hasOption("h")) { host = cmd.getOptionValue("h"); }/*from w w w . j a va 2 s. c om*/ if (cmd.hasOption("p")) { port = cmd.getOptionValue("p"); } if (cmd.hasOption("u")) { user = cmd.getOptionValue("u"); } if (cmd.hasOption("P")) { pass = cmd.getOptionValue("P"); } if (cmd.hasOption("d")) { destination = cmd.getOptionValue("d"); } if (cmd.hasOption("j")) { json = cmd.getOptionValue("j"); } try { StompConnection connection = new StompConnection(); connection.open(host, Integer.parseInt(port)); connection.connect(user, pass); for (int i = 0; i < 1; i++) { System.out.println(" msg " + i); String newJson = json.replaceAll("" + EXTERNAL_PROVIDER_ID, "" + (EXTERNAL_PROVIDER_ID + i)); newJson = newJson.replaceAll("" + CUSTOMER_ACCT_ID, "" + (CUSTOMER_ACCT_ID + i)); newJson = newJson.replaceAll("" + LOYALTY_ACCT_ID, "" + (LOYALTY_ACCT_ID + i)); send(host, port, user, pass, destination, newJson, connection); } connection.disconnect(); connection.close(); } catch (UnknownHostException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } catch (Exception e) { e.printStackTrace(); } }