List of usage examples for org.apache.commons.cli Option getValue
public String getValue()
null
if there is no value. From source file:org.oclc.firefly.hadoop.backup.Backup.java
/** * Entry point/*ww w. ja v a 2s . c o m*/ * @param args Command line arguments * @throws Exception exception */ public static void main(String[] args) throws Exception { int initialReplication = 1; int finalReplication = 0; int numMaps = 2; int tries = 0; String tbl = null; String dest = null; String user = System.getProperty("user.name"); Path destPath = null; CommandLineParser parser = new PosixParser(); CommandLine cmdline = null; // Parse command line options try { cmdline = parser.parse(getOptions(), args); } catch (org.apache.commons.cli.ParseException e) { System.out.println(e.getMessage()); printOptions(); System.exit(-1); } // Get command line options for (Option option : cmdline.getOptions()) { switch (option.getId()) { case 'd': dest = option.getValue(); destPath = new Path(dest); if (!destPath.isAbsolute()) { throw new IllegalArgumentException("Destination path must be an absolute path"); } break; case 'm': numMaps = Integer.parseInt(option.getValue()); if (numMaps <= 0) { throw new IllegalArgumentException("Number of map tasks must be greater than zero."); } break; case 'n': tries = Integer.parseInt(option.getValue()); if (tries < 0) { throw new IllegalArgumentException( "Maximum number of tries must be greater than or equal to zero."); } break; case 'f': finalReplication = Integer.parseInt(option.getValue()); if (finalReplication <= 0) { throw new IllegalArgumentException("Initial replication must be greater than zero."); } break; case 'r': initialReplication = Integer.parseInt(option.getValue()); if (initialReplication <= 0) { throw new IllegalArgumentException("Initial replication must be greater than zero."); } break; case 't': tbl = option.getValue(); break; case 'u': user = option.getValue(); break; default: throw new IllegalArgumentException("unexpected option " + option); } } String[] tables = null; if (tbl != null) { tables = tbl.split(","); } Configuration srcConf = HBaseConfiguration.create(); Configuration dstConf = HBaseConfiguration.create(); // This allows us to copy to a separate HDFS instance String destDir = null; if (dest != null) { destDir = destPath.toUri().getPath(); String fsName = null; if (destDir != null && destDir.length() > 0) { LOG.debug("destination dfs: " + dest.substring(0, dest.length() - destDir.length())); fsName = dest.substring(0, dest.length() - destDir.length()); } else { fsName = dest; destDir = null; } if (fsName != null && fsName.length() > 0) { dstConf.set("fs.default.name", fsName); } } Backup backup = new Backup(srcConf, dstConf); backup.setInitialReplication(initialReplication); backup.setFinalReplication(finalReplication); backup.setUsername(user); backup.setNumMapTasks(numMaps); if (destDir != null) { backup.setBackupStoreDirectory(destDir); } LOG.info("HBase backup tool"); LOG.info("--------------------------------------------------"); //LOG.info("Destination fs : " + dstConf.get("fs.default.name")); LOG.info("Initial replication: " + backup.getInitialReplication()); LOG.info("Final replication : " + backup.getFinalReplication()); LOG.info("Number of attempts : " + ((tries == 0) ? "Until nothing left to copy" : tries)); LOG.info("Username : " + backup.getUsername()); LOG.info("Number map tasks : " + backup.getNumMapTasks()); LOG.info("Backup store path : " + backup.getBackupStoreDirectory()); LOG.info("--------------------------------------------------"); boolean success = backup.doMajorCopy(tables, tries); LOG.info("--------------------------------------------------"); if (success) { LOG.info("Backup located at: " + backup.getBackupDirectoryPath()); LOG.info("Backup complete"); } else { LOG.info("Files located at: " + backup.getBackupDirectoryPath()); LOG.info("Backup failed"); } System.exit(success ? 0 : -1); }
From source file:org.oclc.firefly.hadoop.backup.Import.java
/** * Import table entry point/*from w w w .j a v a 2s .c om*/ * @param args Command line arguments * @throws Exception If failed to read from file system */ public static void main(String[] args) throws Exception { boolean copy = false; boolean ignoreBadName = false; String inputDir = null; String tbl = null; CommandLineParser parser = new PosixParser(); CommandLine cmdline = null; // Parse command line options try { cmdline = parser.parse(getOptions(), args); } catch (org.apache.commons.cli.ParseException e) { System.out.println(e.getMessage()); printOptions(); System.exit(-1); } // Get command line options for (Option option : cmdline.getOptions()) { switch (option.getId()) { case 'b': ignoreBadName = true; break; case 'i': inputDir = option.getValue(); break; case 'c': copy = true; break; case 't': tbl = option.getValue(); break; default: throw new IllegalArgumentException("unexpected option " + option); } } String[] tables = null; Configuration conf = HBaseConfiguration.create(); Path backupDirPath = new Path(inputDir); Import importer = new Import(conf, backupDirPath, ignoreBadName); importer.setRetainOriginal(copy); if (tbl == null) { tables = importer.getTableNames(); } else { tables = tbl.split(","); } LOG.info("HBase import tool"); LOG.info("--------------------------------------------------"); LOG.info("Backup start time : " + importer.getStartDate()); LOG.info("Backup end time : " + importer.getEndDate()); LOG.info("Retain original copy: " + importer.getRetainOriginal()); LOG.info("HBase location : " + conf.get(HConstants.HBASE_DIR)); LOG.info("Backup location : " + backupDirPath); LOG.info("--------------------------------------------------"); importer.importAll(tables); int totalSuccess = importer.getNumTablesImported(); int totalFailed = importer.getNumFailedImports(); LOG.info("Import results"); LOG.info("--------------------------------------------------"); LOG.info("Number of tables: " + tables.length); LOG.info("Imported tables : " + totalSuccess); LOG.info("Failed : " + totalFailed); LOG.info("--------------------------------------------------"); if (totalFailed == 0) { LOG.info("Import completed successfully."); } else if (totalSuccess > 0) { LOG.warn("Import completed but with errors. Please inspect manually."); } else { LOG.error("Import failed. Please inspect manually."); System.exit(1); } System.exit(0); }
From source file:org.oclc.firefly.hadoop.backup.LogCopier.java
/** * Main/* ww w. j a v a 2 s . c o m*/ * @param args Command line arguments * @throws Exception If failed to read from file system */ public static void main(String[] args) throws Exception { String destDirectory = null; long frequency = DEFAULT_FREQUENCY; long threads = DEFAULT_NUM_THREADS; long dtl = 0L; CommandLineParser parser = new PosixParser(); CommandLine cmdline = null; // Parse command line options try { cmdline = parser.parse(getOptions(), args); } catch (org.apache.commons.cli.ParseException e) { System.out.println(e.getMessage()); printOptions(); System.exit(-1); } // Get command line options for (Option option : cmdline.getOptions()) { switch (option.getId()) { case 'd': destDirectory = option.getValue(); break; case 'm': frequency = Long.parseLong(option.getValue()); if (frequency <= 0) { throw new IllegalArgumentException("Minutes must be greater than 0"); } break; case 'l': dtl = Long.parseLong(option.getValue()); if (dtl < 0) { throw new IllegalArgumentException("Log days-to-live Must be non-negative"); } break; case 't': threads = Long.parseLong(option.getValue()); if (threads < 0) { throw new IllegalArgumentException("Number of threads must be greater than 0"); } break; default: throw new IllegalArgumentException("unexpected option " + option); } } LogCopier copier = new LogCopier(destDirectory, frequency); copier.setLogDaysToLive(dtl); copier.setNumberThreads(threads); LOG.info("--------------------------------------------------"); LOG.info("Copy frequency : " + copier.getCopyFrequency() + " minutes"); LOG.info("Archive directory : " + copier.getArchiveDirectory()); LOG.info("Log days to live : " + copier.getLogDaysToLive() + " days"); LOG.info("Copy threads : " + copier.getNumberThreads()); LOG.info("--------------------------------------------------"); copier.run(); }
From source file:org.onebusaway.gtfs_merge.GtfsMergerMain.java
private void processOptions(CommandLine cli, GtfsMerger merger) { OptionHandler currentOptionHandler = null; AbstractEntityMergeStrategy mergeStrategy = null; for (Option option : cli.getOptions()) { if (option.getOpt().equals(ARG_FILE)) { String filename = option.getValue(); Class<?> entityClass = _entityClassesByFilename.get(filename); if (entityClass == null) { throw new IllegalStateException("unknown GTFS filename: " + filename); }// www .j a v a2s. co m mergeStrategy = getMergeStrategyForEntityClass(entityClass, merger); currentOptionHandler = getOptionHandlerForEntityClass(entityClass); } else { if (currentOptionHandler == null) { throw new IllegalArgumentException( "you must specify a --file argument first before specifying file-specific arguments"); } currentOptionHandler.handleOption(option, mergeStrategy); } } }
From source file:org.onebusaway.gtfs_merge.OptionHandler.java
public void handleOption(Option option, AbstractEntityMergeStrategy strategy) { if (option.getOpt().equals(GtfsMergerMain.ARG_DUPLICATE_DETECTION)) { String strategyName = option.getValue().toUpperCase(); strategy.setDuplicateDetectionStrategy(EDuplicateDetectionStrategy.valueOf(strategyName)); } else if (option.getOpt().equals(GtfsMergerMain.ARG_LOG_DROPPED_DUPLICATES)) { strategy.setLogDuplicatesStrategy(ELogDuplicatesStrategy.WARNING); } else if (option.getOpt().equals(GtfsMergerMain.ARG_ERROR_ON_DROPPED_DUPLICATES)) { strategy.setLogDuplicatesStrategy(ELogDuplicatesStrategy.ERROR); }/*from ww w . j a v a 2 s.c o m*/ }
From source file:org.onebusaway.gtfs_transformer.GtfsTransformerMain.java
protected void runApplication(CommandLine cli, String[] originalArgs) throws Exception { String[] args = cli.getArgs(); if (args.length < 2) { printHelp();// w ww.j a v a 2 s . c om System.exit(-1); } List<File> paths = new ArrayList<File>(); for (int i = 0; i < args.length - 1; ++i) { paths.add(new File(args[i])); _log.info("input path: " + args[i]); } GtfsTransformer transformer = new GtfsTransformer(); transformer.setGtfsInputDirectories(paths); transformer.setOutputDirectory(new File(args[args.length - 1])); _log.info("output path: " + args[args.length - 1]); Option[] options = getOptionsInCommandLineOrder(cli, originalArgs); for (Option option : options) { String name = option.getOpt(); if (name.equals(ARG_REMOVE_REPEATED_STOP_TIMES)) configureRemoveRepeatedStopTimes(transformer); if (name.equals(ARG_REMOVE_DUPLICATE_TRIPS)) configureRemoveDuplicateTrips(transformer); if (name.equals(ARG_CHECK_STOP_TIMES)) configureEnsureStopTimesInOrder(transformer); if (name.equals(ARG_AGENCY_ID)) configureAgencyId(transformer, cli.getOptionValue(ARG_AGENCY_ID)); if (name.equals(ARG_MODIFICATIONS) || name.equals(ARG_TRANSFORM)) GtfsTransformerLibrary.configureTransformation(transformer, option.getValue()); if (name.equals(ARG_LOCAL_VS_EXPRESS)) configureLocalVsExpressUpdates(transformer); if (name.equals(ARG_OVERWRITE_DUPLICATES)) { transformer.getReader().setOverwriteDuplicates(true); } } transformer.run(); }
From source file:org.openbel.framework.tools.CacheManager.java
/** * Handle the cli option that was provided. * * @param option {@link Option} the current option to handle *//*w w w .j a v a2 s . co m*/ private void handleOption(Option option) { if (LIST_CACHE_OPTION.equals(option.getOpt())) { handleListCache(); } else if (CACHE_RESOURCE_OPTION.equals(option.getOpt())) { handleLoadResource(option); } else if (CACHE_SYSCONFIG_FILE_OPTION.equals(option.getOpt())) { handleLoadDefaultIndex(); } else if (CACHE_INDEX_FILE.equals(option.getOpt())) { handleLoadIndex(option.getValue()); } else if (PURGE_CACHE_OPTION.equals(option.getOpt())) { handlePurgeCache(); } else if (GENERATE_CHECKSUM_OPTION.equals(option.getOpt())) { handleGenerateHash(option); } }
From source file:org.openbel.framework.tools.CacheManager.java
/** * Handle the load resource option./*w w w.j av a 2s .com*/ * * @param option {@link Option} the option to fetch the cli argument from */ protected void handleLoadResource(Option option) { String resourceLocation = option.getValue(); reportable.output("Loading resource into the cache:"); reportable.output(" " + resourceLocation); ResourceType type = ResourceType.fromLocation(resourceLocation); if (type == null) { reportable.error("Resource type cannot be determined, consult help with -h."); bail(GENERAL_FAILURE); } cacheMgrService.updateResourceInCache(type, resourceLocation); }
From source file:org.openbel.framework.tools.CacheManager.java
/** * Handle the generate hash option.//from w w w. j a v a2 s . c o m * * @param option {@link Option} the option to fetch the cli argument from */ protected void handleGenerateHash(Option option) { String hashFileLocation = option.getValue(); reportable.output("Generating checksum for file: " + hashFileLocation); File hashFile = new File(hashFileLocation); if (!hashFile.exists() || !hashFile.canRead()) { reportable.error("File cannot be read"); bail(GENERAL_FAILURE); } String hashContents = null; try { hashContents = FileUtils.readFileToString(hashFile); } catch (IOException e) { reportable.error("Unable to read file"); reportable.error("Reason: " + e.getMessage()); bail(GENERAL_FAILURE); } Hasher hasher = Hasher.INSTANCE; try { String generatedhash = hasher.hashValue(hashContents); reportable.output("Checksum: " + generatedhash); } catch (Exception e) { reportable.error("Unable to created checksum"); reportable.error("Reason: " + e.getMessage()); bail(GENERAL_FAILURE); } }
From source file:org.prot.appserver.config.ArgumentParser.java
public static void dump() { Logger logger = Logger.getLogger(ArgumentParser.class); if (cmd == null) { logger.error("Missing starup arguments"); return;/*from www. j av a 2 s. c o m*/ } logger.info("Startup arguments:"); for (Option option : cmd.getOptions()) { logger.info(option.getOpt() + " = " + option.getValue()); } }