List of usage examples for org.apache.commons.cli Options Options
Options
From source file:gov.lanl.adore.djatoka.DjatokaCompress.java
/** * Uses apache commons cli to parse input args. Passes parsed * parameters to ICompress implementation. * @param args command line parameters to defined input,output,etc. */// w ww . j av a 2 s . com public static void main(String[] args) { // create the command line parser CommandLineParser parser = new PosixParser(); // create the Options Options options = new Options(); options.addOption("i", "input", true, "Filepath of the input file or dir."); options.addOption("o", "output", true, "Filepath of the output file or dir."); options.addOption("r", "rate", true, "Absolute Compression Ratio"); options.addOption("s", "slope", true, "Used to generate relative compression ratio based on content characteristics."); options.addOption("y", "Clayers", true, "Number of quality levels."); options.addOption("l", "Clevels", true, "Number of DWT levels (reolution levels)."); options.addOption("v", "Creversible", true, "Use Reversible Wavelet"); options.addOption("c", "Cprecincts", true, "Precinct dimensions"); options.addOption("p", "props", true, "Compression Properties File"); options.addOption("d", "Corder", true, "Progression order"); options.addOption("g", "ORGgen_plt", true, "Enables insertion of packet length information in the header"); options.addOption("t", "ORGtparts", true, "Division of each tile's packets into tile-parts"); options.addOption("b", "Cblk", true, "Codeblock Size"); options.addOption("a", "AltImpl", true, "Alternate ICompress Implemenation"); try { if (args.length == 0) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("gov.lanl.adore.djatoka.DjatokaCompress", options); System.exit(0); } // parse the command line arguments CommandLine line = parser.parse(options, args); String input = line.getOptionValue("i"); String output = line.getOptionValue("o"); String propsFile = line.getOptionValue("p"); DjatokaEncodeParam p; if (propsFile != null) { Properties props = IOUtils.loadConfigByPath(propsFile); p = new DjatokaEncodeParam(props); } else p = new DjatokaEncodeParam(); String rate = line.getOptionValue("r"); if (rate != null) p.setRate(rate); String slope = line.getOptionValue("s"); if (slope != null) p.setSlope(slope); String Clayers = line.getOptionValue("y"); if (Clayers != null) p.setLayers(Integer.parseInt(Clayers)); String Clevels = line.getOptionValue("l"); if (Clevels != null) p.setLevels(Integer.parseInt(Clevels)); String Creversible = line.getOptionValue("v"); if (Creversible != null) p.setUseReversible(Boolean.parseBoolean(Creversible)); String Cprecincts = line.getOptionValue("c"); if (Cprecincts != null) p.setPrecincts(Cprecincts); String Corder = line.getOptionValue("d"); if (Corder != null) p.setProgressionOrder(Corder); String ORGgen_plt = line.getOptionValue("g"); if (ORGgen_plt != null) p.setInsertPLT(Boolean.parseBoolean(ORGgen_plt)); String Cblk = line.getOptionValue("b"); if (Cblk != null) p.setCodeBlockSize(Cblk); String alt = line.getOptionValue("a"); ICompress jp2 = new KduCompressExe(); if (alt != null) jp2 = (ICompress) Class.forName(alt).newInstance(); if (new File(input).isDirectory() && new File(output).isDirectory()) { ArrayList<File> files = IOUtils.getFileList(input, new SourceImageFileFilter(), false); for (File f : files) { long x = System.currentTimeMillis(); File outFile = new File(output, f.getName().substring(0, f.getName().indexOf(".")) + ".jp2"); compress(jp2, f.getAbsolutePath(), outFile.getAbsolutePath(), p); report(f.getAbsolutePath(), x); } } else { long x = System.currentTimeMillis(); File f = new File(input); if (output == null) output = f.getName().substring(0, f.getName().indexOf(".")) + ".jp2"; if (new File(output).isDirectory()) output = output + f.getName().substring(0, f.getName().indexOf(".")) + ".jp2"; compress(jp2, input, output, p); report(input, x); } } catch (ParseException e) { logger.error("Parse exception:" + e.getMessage(), e); } catch (DjatokaException e) { logger.error("djatoka Compression exception:" + e.getMessage(), e); } catch (InstantiationException e) { logger.error("Unable to initialize alternate implemenation:" + e.getMessage(), e); } catch (Exception e) { logger.error("An exception occured:" + e.getMessage(), e); } }
From source file:edu.nyu.vida.data_polygamy.pre_processing.PreProcessing.java
/** * @param args/* ww w .j a v a2 s. co m*/ * @throws IOException * @throws ClassNotFoundException * @throws InterruptedException */ @SuppressWarnings("deprecation") public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException { Options options = new Options(); Option nameOption = new Option("dn", "name", true, "the name of the dataset"); nameOption.setRequired(true); nameOption.setArgName("DATASET NAME"); options.addOption(nameOption); Option headerOption = new Option("dh", "header", true, "the file that contains the header of the dataset"); headerOption.setRequired(true); headerOption.setArgName("DATASET HEADER FILE"); options.addOption(headerOption); Option deafultsOption = new Option("dd", "defaults", true, "the file that contains the default values of the dataset"); deafultsOption.setRequired(true); deafultsOption.setArgName("DATASET DEFAULTS FILE"); options.addOption(deafultsOption); Option tempResOption = new Option("t", "temporal", true, "desired temporal resolution (hour, day, week, or month)"); tempResOption.setRequired(true); tempResOption.setArgName("TEMPORAL RESOLUTION"); options.addOption(tempResOption); Option spatialResOption = new Option("s", "spatial", true, "desired spatial resolution (points, nbhd, zip, grid, or city)"); spatialResOption.setRequired(true); spatialResOption.setArgName("SPATIAL RESOLUTION"); options.addOption(spatialResOption); Option currentSpatialResOption = new Option("cs", "current-spatial", true, "current spatial resolution (points, nbhd, zip, grid, or city)"); currentSpatialResOption.setRequired(true); currentSpatialResOption.setArgName("CURRENT SPATIAL RESOLUTION"); options.addOption(currentSpatialResOption); Option indexResOption = new Option("i", "index", true, "indexes of the temporal and spatial attributes"); indexResOption.setRequired(true); indexResOption.setArgName("INDEX OF SPATIO-TEMPORAL RESOLUTIONS"); indexResOption.setArgs(Option.UNLIMITED_VALUES); options.addOption(indexResOption); Option machineOption = new Option("m", "machine", true, "machine identifier"); machineOption.setRequired(true); machineOption.setArgName("MACHINE"); machineOption.setArgs(1); options.addOption(machineOption); Option nodesOption = new Option("n", "nodes", true, "number of nodes"); nodesOption.setRequired(true); nodesOption.setArgName("NODES"); nodesOption.setArgs(1); options.addOption(nodesOption); Option s3Option = new Option("s3", "s3", false, "data on Amazon S3"); s3Option.setRequired(false); options.addOption(s3Option); Option awsAccessKeyIdOption = new Option("aws_id", "aws-id", true, "aws access key id; " + "this is required if the execution is on aws"); awsAccessKeyIdOption.setRequired(false); awsAccessKeyIdOption.setArgName("AWS-ACCESS-KEY-ID"); awsAccessKeyIdOption.setArgs(1); options.addOption(awsAccessKeyIdOption); Option awsSecretAccessKeyOption = new Option("aws_key", "aws-id", true, "aws secrect access key; " + "this is required if the execution is on aws"); awsSecretAccessKeyOption.setRequired(false); awsSecretAccessKeyOption.setArgName("AWS-SECRET-ACCESS-KEY"); awsSecretAccessKeyOption.setArgs(1); options.addOption(awsSecretAccessKeyOption); Option bucketOption = new Option("b", "s3-bucket", true, "bucket on s3; " + "this is required if the execution is on aws"); bucketOption.setRequired(false); bucketOption.setArgName("S3-BUCKET"); bucketOption.setArgs(1); options.addOption(bucketOption); Option helpOption = new Option("h", "help", false, "display this message"); helpOption.setRequired(false); options.addOption(helpOption); HelpFormatter formatter = new HelpFormatter(); CommandLineParser parser = new PosixParser(); CommandLine cmd = null; try { cmd = parser.parse(options, args); } catch (ParseException e) { formatter.printHelp( "hadoop jar data-polygamy.jar " + "edu.nyu.vida.data_polygamy.pre_processing.PreProcessing", options, true); System.exit(0); } if (cmd.hasOption("h")) { formatter.printHelp( "hadoop jar data-polygamy.jar " + "edu.nyu.vida.data_polygamy.pre_processing.PreProcessing", options, true); System.exit(0); } boolean s3 = cmd.hasOption("s3"); String s3bucket = ""; String awsAccessKeyId = ""; String awsSecretAccessKey = ""; if (s3) { if ((!cmd.hasOption("aws_id")) || (!cmd.hasOption("aws_key")) || (!cmd.hasOption("b"))) { System.out.println( "Arguments 'aws_id', 'aws_key', and 'b'" + " are mandatory if execution is on AWS."); formatter.printHelp( "hadoop jar data-polygamy.jar " + "edu.nyu.vida.data_polygamy.pre_processing.PreProcessing", options, true); System.exit(0); } s3bucket = cmd.getOptionValue("b"); awsAccessKeyId = cmd.getOptionValue("aws_id"); awsSecretAccessKey = cmd.getOptionValue("aws_key"); } boolean snappyCompression = false; boolean bzip2Compression = false; String machine = cmd.getOptionValue("m"); int nbNodes = Integer.parseInt(cmd.getOptionValue("n")); Configuration s3conf = new Configuration(); if (s3) { s3conf.set("fs.s3.awsAccessKeyId", awsAccessKeyId); s3conf.set("fs.s3.awsSecretAccessKey", awsSecretAccessKey); s3conf.set("bucket", s3bucket); } Configuration conf = new Configuration(); Machine machineConf = new Machine(machine, nbNodes); String dataset = cmd.getOptionValue("dn"); String header = cmd.getOptionValue("dh"); String defaults = cmd.getOptionValue("dd"); String temporalResolution = cmd.getOptionValue("t"); String spatialResolution = cmd.getOptionValue("s"); String gridResolution = ""; String currentSpatialResolution = cmd.getOptionValue("cs"); if (spatialResolution.contains("grid")) { String[] res = spatialResolution.split("-"); spatialResolution = res[0]; gridResolution = res[1]; } conf.set("header", s3bucket + FrameworkUtils.dataDir + "/" + header); conf.set("defaults", s3bucket + FrameworkUtils.dataDir + "/" + defaults); conf.set("temporal-resolution", temporalResolution); conf.set("spatial-resolution", spatialResolution); conf.set("grid-resolution", gridResolution); conf.set("current-spatial-resolution", currentSpatialResolution); String[] indexes = cmd.getOptionValues("i"); String temporalPos = ""; Integer sizeSpatioTemp = 0; if (!(currentSpatialResolution.equals("points"))) { String spatialPos = ""; for (int i = 0; i < indexes.length; i++) { temporalPos += indexes[i] + ","; spatialPos += indexes[++i] + ","; sizeSpatioTemp++; } conf.set("spatial-pos", spatialPos); } else { String xPositions = "", yPositions = ""; for (int i = 0; i < indexes.length; i++) { temporalPos += indexes[i] + ","; xPositions += indexes[++i] + ","; yPositions += indexes[++i] + ","; sizeSpatioTemp++; } conf.set("xPositions", xPositions); conf.set("yPositions", yPositions); } conf.set("temporal-pos", temporalPos); conf.set("size-spatio-temporal", sizeSpatioTemp.toString()); // checking resolutions if (utils.spatialResolution(spatialResolution) < 0) { System.out.println("Invalid spatial resolution: " + spatialResolution); System.exit(-1); } if (utils.spatialResolution(spatialResolution) == FrameworkUtils.POINTS) { System.out.println("The data needs to be reduced at least to neighborhoods or grid."); System.exit(-1); } if (utils.spatialResolution(currentSpatialResolution) < 0) { System.out.println("Invalid spatial resolution: " + currentSpatialResolution); System.exit(-1); } if (utils.spatialResolution(currentSpatialResolution) > utils.spatialResolution(spatialResolution)) { System.out.println("The current spatial resolution is coarser than " + "the desired one. You can only navigate from a fine resolution" + " to a coarser one."); System.exit(-1); } if (utils.temporalResolution(temporalResolution) < 0) { System.out.println("Invalid temporal resolution: " + temporalResolution); System.exit(-1); } String fileName = s3bucket + FrameworkUtils.preProcessingDir + "/" + dataset + "-" + temporalResolution + "-" + spatialResolution + gridResolution; conf.set("aggregates", fileName + ".aggregates"); // making sure both files are removed, if they exist FrameworkUtils.removeFile(fileName, s3conf, s3); FrameworkUtils.removeFile(fileName + ".aggregates", s3conf, s3); /** * Hadoop Parameters * sources: http://www.slideshare.net/ImpetusInfo/ppt-on-advanced-hadoop-tuning-n-optimisation * https://cloudcelebrity.wordpress.com/2013/08/14/12-key-steps-to-keep-your-hadoop-cluster-running-strong-and-performing-optimum/ */ conf.set("mapreduce.tasktracker.map.tasks.maximum", String.valueOf(machineConf.getMaximumTasks())); conf.set("mapreduce.tasktracker.reduce.tasks.maximum", String.valueOf(machineConf.getMaximumTasks())); conf.set("mapreduce.jobtracker.maxtasks.perjob", "-1"); conf.set("mapreduce.reduce.shuffle.parallelcopies", "20"); conf.set("mapreduce.input.fileinputformat.split.minsize", "0"); conf.set("mapreduce.task.io.sort.mb", "200"); conf.set("mapreduce.task.io.sort.factor", "100"); // using SnappyCodec for intermediate and output data ? // TODO: for now, using SnappyCodec -- what about LZO + Protocol Buffer serialization? // LZO - http://www.oberhumer.com/opensource/lzo/#download // Hadoop-LZO - https://github.com/twitter/hadoop-lzo // Protocol Buffer - https://github.com/twitter/elephant-bird // General Info - http://www.devx.com/Java/Article/47913 // Compression - http://comphadoop.weebly.com/index.html if (snappyCompression) { conf.set("mapreduce.map.output.compress", "true"); conf.set("mapreduce.map.output.compress.codec", "org.apache.hadoop.io.compress.SnappyCodec"); conf.set("mapreduce.output.fileoutputformat.compress.codec", "org.apache.hadoop.io.compress.SnappyCodec"); } if (bzip2Compression) { conf.set("mapreduce.map.output.compress", "true"); conf.set("mapreduce.map.output.compress.codec", "org.apache.hadoop.io.compress.BZip2Codec"); conf.set("mapreduce.output.fileoutputformat.compress.codec", "org.apache.hadoop.io.compress.BZip2Codec"); } // TODO: this is dangerous! if (s3) { conf.set("fs.s3.awsAccessKeyId", awsAccessKeyId); conf.set("fs.s3.awsSecretAccessKey", awsSecretAccessKey); } Job job = new Job(conf); job.setJobName(dataset + "-" + temporalResolution + "-" + spatialResolution); job.setMapOutputKeyClass(MultipleSpatioTemporalWritable.class); job.setMapOutputValueClass(AggregationArrayWritable.class); job.setOutputKeyClass(MultipleSpatioTemporalWritable.class); job.setOutputValueClass(AggregationArrayWritable.class); job.setMapperClass(PreProcessingMapper.class); job.setCombinerClass(PreProcessingCombiner.class); job.setReducerClass(PreProcessingReducer.class); job.setNumReduceTasks(machineConf.getNumberReduces()); //job.setNumReduceTasks(1); job.setInputFormatClass(TextInputFormat.class); job.setOutputFormatClass(SequenceFileOutputFormat.class); SequenceFileOutputFormat.setCompressOutput(job, true); SequenceFileOutputFormat.setOutputCompressionType(job, CompressionType.BLOCK); FileInputFormat.setInputPaths(job, new Path(s3bucket + FrameworkUtils.dataDir + "/" + dataset)); FileOutputFormat.setOutputPath(job, new Path(fileName)); job.setJarByClass(PreProcessing.class); long start = System.currentTimeMillis(); job.submit(); job.waitForCompletion(true); System.out.println(fileName + "\t" + (System.currentTimeMillis() - start)); }
From source file:io.yucca.lucene.IndexUtility.java
public static void main(String[] args) { CommandLineParser parser = new PosixParser(); Options options = new Options(); initOptions(options);/*from w w w .jav a 2 s . c om*/ try { final CommandLine line = parser.parse(options, args); if (line.hasOption('h')) { usage(options); } if (line.hasOption('s')) { String value = line.getOptionValue('s'); sourceIndexDirectory = new File(value); if (sourceIndexDirectory.exists() == false) { log.error("Index source directory: {} does not exist!", sourceIndexDirectory); System.exit(1); } } else { usage(options); System.exit(1); } if (line.hasOption('d')) { String value = line.getOptionValue('d'); destIndexDirectory = new File(value); if (destIndexDirectory.exists() == true) { log.error("Index destination directory: {} already exist", destIndexDirectory); System.exit(1); } } else { usage(options); System.exit(1); } if (line.hasOption('v')) { try { String value = line.getOptionValue('v'); version = Version.parseLeniently(value); } catch (Exception e) { log.error("Unrecognized index version, exiting"); usage(options); System.exit(1); } } if (line.hasOption('r')) { String value = line.getOptionValue('r'); String[] fields = value.trim().split(" *, *"); if (fields == null || fields.length == 0) { log.error("No fields were given, exiting"); usage(options); System.exit(1); } (new FieldRemover()).removeFields(sourceIndexDirectory, destIndexDirectory, fields, version); System.exit(0); } } catch (IndexUtilityException e) { log.error("Failed to work on index:", e); System.exit(1); } catch (MissingOptionException e) { log.error("Mandatory options is missing!"); usage(options); System.exit(1); } catch (ParseException e) { log.error("Failed to parse commandline options!"); usage(options); System.exit(1); } }
From source file:com.eternitywall.ots.OtsCli.java
public static void main(String[] args) { // Create the Options Options options = new Options(); options.addOption("c", "calendar", true, "Create timestamp with the aid of a remote calendar. May be specified multiple times."); options.addOption("k", "key", true, "Signature key file of private remote calendars."); options.addOption("d", "digest", true, "Verify a (hex-encoded) digest rather than a file."); options.addOption("a", "algorithm", true, "Pass the hashing algorithm of the document to timestamp: SHA256(default), SHA1, RIPEMD160."); options.addOption("m", "", true, "Commitments are sent to remote calendars in the event of timeout the timestamp is considered done if at least M calendars replied."); options.addOption("s", "shrink", false, "Shrink upgraded timestamp."); options.addOption("V", "version", false, "Print " + title + " version."); options.addOption("v", "verbose", false, "Be more verbose.."); options.addOption("f", "file", true, "Specify target file explicitly (default: original file present in the same directory without .ots)"); options.addOption("h", "help", false, "print this help."); // Parse the args to retrieve options & command CommandLineParser parser = new BasicParser(); try {//from w w w. j a v a 2 s . c o m CommandLine line = parser.parse(options, args); // args are the arguments passed to the the application via the main method if (line.hasOption("c")) { String[] cals = line.getOptionValues("c"); calendarsUrl.addAll(Arrays.asList(cals)); } if (line.hasOption("m")) { m = Integer.valueOf(line.getOptionValue("m")); } if (line.hasOption("k")) { signatureFile = line.getOptionValue("k"); calendarsUrl.clear(); } if (line.hasOption("s")) { shrink = true; } if (line.hasOption("v")) { verbose = true; } if (line.hasOption("V")) { System.out.println("Version: " + title + " v." + version + '\n'); return; } if (line.hasOption("h")) { showHelp(); return; } if (line.hasOption("d")) { shasum = Utils.hexToBytes(line.getOptionValue("d")); } if (line.hasOption("f")) { verifyFile = line.getOptionValue("f"); } if (line.hasOption("a")) { algorithm = line.getOptionValue("a"); if (!Arrays.asList(algorithms).contains(algorithm.toUpperCase())) { System.out.println("Algorithm: " + algorithm + " not supported\n"); return; } } if (line.getArgList().isEmpty()) { showHelp(); return; } cmd = line.getArgList().get(0); files = line.getArgList().subList(1, line.getArgList().size()); } catch (Exception e) { System.out.println(title + ": invalid parameters "); return; } // Parse the command switch (cmd) { case "info": case "i": if (files.isEmpty()) { System.out.println("Show information on a timestamp given as argument.\n"); System.out.println(title + " info: bad options "); break; } info(files.get(0), verbose); break; case "stamp": case "s": if (!files.isEmpty()) { multistamp(files, calendarsUrl, m, signatureFile, algorithm); } else if (shasum != null) { Hash hash = new Hash(shasum, algorithm); stamp(hash, calendarsUrl, m, signatureFile); } else { System.out.println("Create timestamp with the aid of a remote calendar.\n"); System.out.println(title + ": bad options number "); } break; case "verify": case "v": if (!files.isEmpty()) { Hash hash = null; if (shasum != null) { hash = new Hash(shasum, algorithm); } if (verifyFile == null) { verifyFile = files.get(0).replace(".ots", ""); } verify(files.get(0), hash, verifyFile); } else { System.out.println("Verify the timestamp attestations given as argument.\n"); System.out.println(title + ": bad options number "); } break; case "upgrade": case "u": if (files.isEmpty()) { System.out.println("Upgrade remote calendar timestamps to be locally verifiable.\n"); System.out.println(title + ": bad options number "); break; } upgrade(files.get(0), shrink); break; default: System.out.println(title + ": bad option: " + cmd); } }
From source file:com.damon.rocketmq.example.benchmark.Producer.java
public static void main(String[] args) throws MQClientException, UnsupportedEncodingException { Options options = ServerUtil.buildCommandlineOptions(new Options()); CommandLine commandLine = ServerUtil.parseCmdLine("benchmarkProducer", args, buildCommandlineOptions(options), new PosixParser()); if (null == commandLine) { System.exit(-1);//ww w.ja v a 2 s . com } final String topic = commandLine.hasOption('t') ? commandLine.getOptionValue('t').trim() : "BenchmarkTest"; final int threadCount = commandLine.hasOption('w') ? Integer.parseInt(commandLine.getOptionValue('w')) : 64; final int messageSize = commandLine.hasOption('s') ? Integer.parseInt(commandLine.getOptionValue('s')) : 128; final boolean keyEnable = commandLine.hasOption('k') && Boolean.parseBoolean(commandLine.getOptionValue('k')); System.out.printf("topic %s threadCount %d messageSize %d keyEnable %s%n", topic, threadCount, messageSize, keyEnable); final Logger log = ClientLogger.getLog(); final Message msg = buildMessage(messageSize, topic); final ExecutorService sendThreadPool = Executors.newFixedThreadPool(threadCount); final StatsBenchmarkProducer statsBenchmark = new StatsBenchmarkProducer(); final Timer timer = new Timer("BenchmarkTimerThread", true); final LinkedList<Long[]> snapshotList = new LinkedList<Long[]>(); timer.scheduleAtFixedRate(new TimerTask() { @Override public void run() { snapshotList.addLast(statsBenchmark.createSnapshot()); if (snapshotList.size() > 10) { snapshotList.removeFirst(); } } }, 1000, 1000); timer.scheduleAtFixedRate(new TimerTask() { private void printStats() { if (snapshotList.size() >= 10) { Long[] begin = snapshotList.getFirst(); Long[] end = snapshotList.getLast(); final long sendTps = (long) (((end[3] - begin[3]) / (double) (end[0] - begin[0])) * 1000L); final double averageRT = (end[5] - begin[5]) / (double) (end[3] - begin[3]); System.out.printf( "Send TPS: %d Max RT: %d Average RT: %7.3f Send Failed: %d Response Failed: %d%n", sendTps, statsBenchmark.getSendMessageMaxRT().get(), averageRT, end[2], end[4]); } } @Override public void run() { try { this.printStats(); } catch (Exception e) { e.printStackTrace(); } } }, 10000, 10000); final DefaultMQProducer producer = new DefaultMQProducer("benchmark_producer"); producer.setInstanceName(Long.toString(System.currentTimeMillis())); if (commandLine.hasOption('n')) { String ns = commandLine.getOptionValue('n'); producer.setNamesrvAddr(ns); } producer.setCompressMsgBodyOverHowmuch(Integer.MAX_VALUE); producer.start(); for (int i = 0; i < threadCount; i++) { sendThreadPool.execute(new Runnable() { @Override public void run() { while (true) { try { final long beginTimestamp = System.currentTimeMillis(); if (keyEnable) { msg.setKeys(String.valueOf(beginTimestamp / 1000)); } producer.send(msg); statsBenchmark.getSendRequestSuccessCount().incrementAndGet(); statsBenchmark.getReceiveResponseSuccessCount().incrementAndGet(); final long currentRT = System.currentTimeMillis() - beginTimestamp; statsBenchmark.getSendMessageSuccessTimeTotal().addAndGet(currentRT); long prevMaxRT = statsBenchmark.getSendMessageMaxRT().get(); while (currentRT > prevMaxRT) { boolean updated = statsBenchmark.getSendMessageMaxRT().compareAndSet(prevMaxRT, currentRT); if (updated) break; prevMaxRT = statsBenchmark.getSendMessageMaxRT().get(); } } catch (RemotingException e) { statsBenchmark.getSendRequestFailedCount().incrementAndGet(); log.error("[BENCHMARK_PRODUCER] Send Exception", e); try { Thread.sleep(3000); } catch (InterruptedException ignored) { } } catch (InterruptedException e) { statsBenchmark.getSendRequestFailedCount().incrementAndGet(); try { Thread.sleep(3000); } catch (InterruptedException e1) { } } catch (MQClientException e) { statsBenchmark.getSendRequestFailedCount().incrementAndGet(); log.error("[BENCHMARK_PRODUCER] Send Exception", e); } catch (MQBrokerException e) { statsBenchmark.getReceiveResponseFailedCount().incrementAndGet(); log.error("[BENCHMARK_PRODUCER] Send Exception", e); try { Thread.sleep(3000); } catch (InterruptedException ignored) { } } } } }); } }
From source file:eu.fbk.utils.twm.FormPageSearcher.java
public static void main(final String args[]) throws Exception { String logConfig = System.getProperty("log-config"); if (logConfig == null) { logConfig = "configuration/log-config.txt"; }//from w w w . j a v a 2 s . c om PropertyConfigurator.configure(logConfig); final Options options = new Options(); try { OptionBuilder.withArgName("index"); OptionBuilder.hasArg(); OptionBuilder.withDescription("open an index with the specified name"); OptionBuilder.isRequired(); OptionBuilder.withLongOpt("index"); final Option indexNameOpt = OptionBuilder.create("i"); OptionBuilder.withArgName("interactive-mode"); OptionBuilder.withDescription("enter in the interactive mode"); OptionBuilder.withLongOpt("interactive-mode"); final Option interactiveModeOpt = OptionBuilder.create("t"); OptionBuilder.withArgName("search"); OptionBuilder.hasArg(); OptionBuilder.withDescription("search for the specified key"); OptionBuilder.withLongOpt("search"); final Option searchOpt = OptionBuilder.create("s"); OptionBuilder.withArgName("key-freq"); OptionBuilder.hasArg(); OptionBuilder.withDescription("read the keys' frequencies from the specified file"); OptionBuilder.withLongOpt("key-freq"); final Option freqFileOpt = OptionBuilder.create("f"); OptionBuilder.withArgName("minimum-freq"); // Option keyFieldNameOpt = // OptionBuilder.withArgName("key-field-name").hasArg().withDescription("use the specified name for the field key").withLongOpt("key-field-name").create("k"); // Option valueFieldNameOpt = // OptionBuilder.withArgName("value-field-name").hasArg().withDescription("use the specified name for the field value").withLongOpt("value-field-name").create("v"); final Option minimumKeyFreqOpt = OptionBuilder.hasArg() .withDescription("minimum key frequency of cached values (default is " + DEFAULT_MIN_FREQ + ")") .withLongOpt("minimum-freq").create("m"); OptionBuilder.withArgName("int"); final Option notificationPointOpt = OptionBuilder.hasArg() .withDescription( "receive notification every n pages (default is " + DEFAULT_NOTIFICATION_POINT + ")") .withLongOpt("notification-point").create("b"); options.addOption("h", "help", false, "print this message"); options.addOption("v", "version", false, "output version information and exit"); options.addOption(indexNameOpt); options.addOption(interactiveModeOpt); options.addOption(searchOpt); options.addOption(freqFileOpt); // options.addOption(keyFieldNameOpt); // options.addOption(valueFieldNameOpt); options.addOption(minimumKeyFreqOpt); options.addOption(notificationPointOpt); final CommandLineParser parser = new PosixParser(); final CommandLine line = parser.parse(options, args); if (line.hasOption("help") || line.hasOption("version")) { throw new ParseException(""); } int minFreq = DEFAULT_MIN_FREQ; if (line.hasOption("minimum-freq")) { minFreq = Integer.parseInt(line.getOptionValue("minimum-freq")); } int notificationPoint = DEFAULT_NOTIFICATION_POINT; if (line.hasOption("notification-point")) { notificationPoint = Integer.parseInt(line.getOptionValue("notification-point")); } final FormPageSearcher pageFormSearcher = new FormPageSearcher(line.getOptionValue("index")); pageFormSearcher.setNotificationPoint(notificationPoint); /* * logger.debug(line.getOptionValue("key-field-name") + "\t" + * line.getOptionValue("value-field-name")); if (line.hasOption("key-field-name")) { * pageFormSearcher.setKeyFieldName(line.getOptionValue("key-field-name")); } if * (line.hasOption("value-field-name")) { * pageFormSearcher.setValueFieldName(line.getOptionValue("value-field-name")); } */ if (line.hasOption("key-freq")) { pageFormSearcher.loadCache(line.getOptionValue("key-freq"), minFreq); } if (line.hasOption("search")) { logger.debug("searching " + line.getOptionValue("search") + "..."); final FreqSetSearcher.Entry[] result = pageFormSearcher.search(line.getOptionValue("search")); logger.info(Arrays.toString(result)); } if (line.hasOption("interactive-mode")) { pageFormSearcher.interactive(); } } catch (final ParseException e) { // oops, something went wrong if (e.getMessage().length() > 0) { System.out.println("Parsing failed: " + e.getMessage() + "\n"); } final HelpFormatter formatter = new HelpFormatter(); formatter.printHelp(400, "java -cp dist/thewikimachine.jar org.fbk.cit.hlt.thewikimachine.index.FormPageSearcher", "\n", options, "\n", true); } }
From source file:fr.liglab.jlcm.RunPLCM.java
public static void main(String[] args) throws Exception { Options options = new Options(); CommandLineParser parser = new PosixParser(); options.addOption("a", false, "Output all frequent itemsets, not only closed ones"); options.addOption("b", false, "Benchmark mode : patterns are not outputted at all (in which case OUTPUT_PATH is ignored)"); options.addOption("h", false, "Show help"); options.addOption("m", false, "Give peak memory usage after mining (instanciates a watcher thread that periodically triggers garbage collection)"); options.addOption("s", false, "Sort items in outputted patterns, in ascending order"); options.addOption("S", false, "Use arbitrary strings as item IDs (space-separated)"); options.addOption("t", true, "How many threads will be launched (defaults to your machine's processors count)"); options.addOption("v", false, "Enable verbose mode, which logs every extension of the empty pattern"); options.addOption("V", false, "Enable ultra-verbose mode, which logs every pattern extension (use with care: it may produce a LOT of output)"); try {/*from w w w .j ava 2 s . c o m*/ CommandLine cmd = parser.parse(options, args); if (cmd.getArgs().length < 2 || cmd.getArgs().length > 3 || cmd.hasOption('h')) { printMan(options); } else { standalone(cmd); } } catch (ParseException e) { printMan(options); } }
From source file:dhtaccess.benchmark.ThroughputMeasure.java
public static void main(String[] args) { boolean details = false; int repeats = DEFAULT_REPEATS; int queryFreq = DEFAULT_QUERIES_PER_SEC; boolean doPut = true; // parse options Options options = new Options(); options.addOption("h", "help", false, "print help"); options.addOption("d", "details", false, "requests secret hash and TTL"); options.addOption("r", "repeats", true, "number of requests"); options.addOption("f", "freq", true, "number of queries per second"); options.addOption("n", "no-put", false, "does not put"); CommandLineParser parser = new PosixParser(); CommandLine cmd = null;//w ww . java 2s . c o m try { cmd = parser.parse(options, args); } catch (ParseException e) { System.out.println("There is an invalid option."); e.printStackTrace(); System.exit(1); } String optVal; if (cmd.hasOption('h')) { usage(COMMAND); System.exit(1); } if (cmd.hasOption('d')) { details = true; } optVal = cmd.getOptionValue('r'); if (optVal != null) { repeats = Integer.parseInt(optVal); } optVal = cmd.getOptionValue('f'); if (optVal != null) { queryFreq = Integer.parseInt(optVal); } if (cmd.hasOption('n')) { doPut = false; } args = cmd.getArgs(); // parse arguments if (args.length < 1) { usage(COMMAND); System.exit(1); } (new ThroughputMeasure()).start(details, repeats, queryFreq, doPut, args); }
From source file:com.alibaba.rocketmq.example.benchmark.Producer.java
public static void main(String[] args) throws MQClientException, UnsupportedEncodingException { Options options = ServerUtil.buildCommandlineOptions(new Options()); CommandLine commandLine = ServerUtil.parseCmdLine("producer", args, buildCommandlineOptions(options), new PosixParser()); if (null == commandLine) { System.exit(-1);//ww w . ja va 2 s.co m } final int threadCount = commandLine.hasOption('t') ? Integer.parseInt(commandLine.getOptionValue('t')) : 64; final int messageSize = commandLine.hasOption('s') ? Integer.parseInt(commandLine.getOptionValue('s')) : 128; final boolean keyEnable = commandLine.hasOption('k') ? Boolean.parseBoolean(commandLine.getOptionValue('k')) : false; System.out.printf("threadCount %d messageSize %d keyEnable %s%n", threadCount, messageSize, keyEnable); final Logger log = ClientLogger.getLog(); final Message msg = buildMessage(messageSize); final ExecutorService sendThreadPool = Executors.newFixedThreadPool(threadCount); final StatsBenchmarkProducer statsBenchmark = new StatsBenchmarkProducer(); final Timer timer = new Timer("BenchmarkTimerThread", true); final LinkedList<Long[]> snapshotList = new LinkedList<Long[]>(); timer.scheduleAtFixedRate(new TimerTask() { @Override public void run() { snapshotList.addLast(statsBenchmark.createSnapshot()); if (snapshotList.size() > 10) { snapshotList.removeFirst(); } } }, 1000, 1000); timer.scheduleAtFixedRate(new TimerTask() { private void printStats() { if (snapshotList.size() >= 10) { Long[] begin = snapshotList.getFirst(); Long[] end = snapshotList.getLast(); final long sendTps = (long) (((end[3] - begin[3]) / (double) (end[0] - begin[0])) * 1000L); final double averageRT = ((end[5] - begin[5]) / (double) (end[3] - begin[3])); System.out.printf( "Send TPS: %d Max RT: %d Average RT: %7.3f Send Failed: %d Response Failed: %d%n"// , sendTps// , statsBenchmark.getSendMessageMaxRT().get()// , averageRT// , end[2]// , end[4]// ); } } @Override public void run() { try { this.printStats(); } catch (Exception e) { e.printStackTrace(); } } }, 10000, 10000); final DefaultMQProducer producer = new DefaultMQProducer("benchmark_producer"); producer.setInstanceName(Long.toString(System.currentTimeMillis())); if (commandLine.hasOption('n')) { String ns = commandLine.getOptionValue('n'); producer.setNamesrvAddr(ns); } producer.setCompressMsgBodyOverHowmuch(Integer.MAX_VALUE); producer.start(); for (int i = 0; i < threadCount; i++) { sendThreadPool.execute(new Runnable() { @Override public void run() { while (true) { try { final long beginTimestamp = System.currentTimeMillis(); if (keyEnable) { msg.setKeys(String.valueOf(beginTimestamp / 1000)); } producer.send(msg); statsBenchmark.getSendRequestSuccessCount().incrementAndGet(); statsBenchmark.getReceiveResponseSuccessCount().incrementAndGet(); final long currentRT = System.currentTimeMillis() - beginTimestamp; statsBenchmark.getSendMessageSuccessTimeTotal().addAndGet(currentRT); long prevMaxRT = statsBenchmark.getSendMessageMaxRT().get(); while (currentRT > prevMaxRT) { boolean updated = statsBenchmark.getSendMessageMaxRT().compareAndSet(prevMaxRT, currentRT); if (updated) break; prevMaxRT = statsBenchmark.getSendMessageMaxRT().get(); } } catch (RemotingException e) { statsBenchmark.getSendRequestFailedCount().incrementAndGet(); log.error("[BENCHMARK_PRODUCER] Send Exception", e); try { Thread.sleep(3000); } catch (InterruptedException e1) { } } catch (InterruptedException e) { statsBenchmark.getSendRequestFailedCount().incrementAndGet(); try { Thread.sleep(3000); } catch (InterruptedException e1) { } } catch (MQClientException e) { statsBenchmark.getSendRequestFailedCount().incrementAndGet(); log.error("[BENCHMARK_PRODUCER] Send Exception", e); } catch (MQBrokerException e) { statsBenchmark.getReceiveResponseFailedCount().incrementAndGet(); log.error("[BENCHMARK_PRODUCER] Send Exception", e); try { Thread.sleep(3000); } catch (InterruptedException e1) { } } } } }); } }
From source file:com.microsoft.kafkaavailability.App.java
public static void main(String[] args) throws IOException, MetaDataManagerException, InterruptedException { System.out.println("Starting KafkaAvailability Tool"); IPropertiesManager appPropertiesManager = new PropertiesManager<AppProperties>("appProperties.json", AppProperties.class); appProperties = (AppProperties) appPropertiesManager.getProperties(); Options options = new Options(); options.addOption("r", "run", true, "Number of runs. Don't use this argument if you want to run infintely."); options.addOption("s", "sleep", true, "Time (in milliseconds) to sleep between each run. Default is 300000"); Option clusterOption = Option.builder("c").hasArg().required(true).longOpt("cluster") .desc("(REQUIRED) Cluster name").build(); options.addOption(clusterOption);/*w w w .ja va2s .c om*/ CommandLineParser parser = new DefaultParser(); HelpFormatter formatter = new HelpFormatter(); try { // parse the command line arguments CommandLine line = parser.parse(options, args); int howManyRuns; m_cluster = line.getOptionValue("cluster"); MDC.put("cluster", m_cluster); if (line.hasOption("sleep")) { m_sleepTime = Integer.parseInt(line.getOptionValue("sleep")); } if (line.hasOption("run")) { howManyRuns = Integer.parseInt(line.getOptionValue("run")); for (int i = 0; i < howManyRuns; i++) { InitMetrics(); RunOnce(); Thread.sleep(m_sleepTime); } } else { while (true) { InitMetrics(); RunOnce(); Thread.sleep(m_sleepTime); } } } catch (ParseException exp) { // oops, something went wrong System.err.println("Parsing failed. Reason: " + exp.getMessage()); formatter.printHelp("KafkaAvailability", options); } }