List of usage examples for org.apache.commons.cli CommandLineParser parse
CommandLine parse(Options options, String[] arguments) throws ParseException;
From source file:com.alibaba.jstorm.flux.Flux.java
public static void main(String[] args) throws Exception { Options options = new Options(); options.addOption(option(0, "l", OPTION_LOCAL, "Run the topology in local mode.")); options.addOption(option(0, "r", OPTION_REMOTE, "Deploy the topology to a remote cluster.")); options.addOption(option(0, "R", OPTION_RESOURCE, "Treat the supplied path as a classpath resource instead of a file.")); options.addOption(/*from ww w . j a v a 2 s . c om*/ option(1, "s", OPTION_SLEEP, "ms", "When running locally, the amount of time to sleep (in ms.) " + "before killing the topology and shutting down the local cluster.")); options.addOption(option(0, "d", OPTION_DRY_RUN, "Do not run or deploy the topology. Just build, validate, " + "and print information about the topology.")); options.addOption(option(0, "q", OPTION_NO_DETAIL, "Suppress the printing of topology details.")); options.addOption(option(0, "n", OPTION_NO_SPLASH, "Suppress the printing of the splash screen.")); options.addOption(option(0, "i", OPTION_INACTIVE, "Deploy the topology, but do not activate it.")); options.addOption(option(1, "z", OPTION_ZOOKEEPER, "host:port", "When running in local mode, use the ZooKeeper at the " + "specified <host>:<port> instead of the in-process ZooKeeper. (requires Storm 0.9.3 or later)")); options.addOption(option(1, "f", OPTION_FILTER, "file", "Perform property substitution. Use the specified file " + "as a source of properties, and replace keys identified with {$[property name]} with the value defined " + "in the properties file.")); options.addOption( option(0, "e", OPTION_ENV_FILTER, "Perform environment variable substitution. Replace keys" + "identified with `${ENV-[NAME]}` will be replaced with the corresponding `NAME` environment value")); CommandLineParser parser = new BasicParser(); CommandLine cmd = parser.parse(options, args); if (cmd.getArgs().length != 1) { usage(options); System.exit(1); } runCli(cmd); }
From source file:com.versusoft.packages.jodl.gui.CommandLineGUI.java
public static void main(String args[]) throws SAXException, IOException { Handler fh = new FileHandler(LOG_FILENAME_PATTERN); fh.setFormatter(new SimpleFormatter()); //removeAllLoggersHandlers(Logger.getLogger("")); Logger.getLogger("").addHandler(fh); Logger.getLogger("").setLevel(Level.FINEST); Options options = new Options(); Option option1 = new Option("in", "ODT file (required)"); option1.setRequired(true);//from w ww .j av a2 s .c om option1.setArgs(1); Option option2 = new Option("out", "Output file (required)"); option2.setRequired(false); option2.setArgs(1); Option option3 = new Option("pic", "extract pics"); option3.setRequired(false); option3.setArgs(1); Option option4 = new Option("page", "enable pagination processing"); option4.setRequired(false); option4.setArgs(0); options.addOption(option1); options.addOption(option2); options.addOption(option3); options.addOption(option4); CommandLineParser parser = new BasicParser(); CommandLine cmd = null; try { cmd = parser.parse(options, args); } catch (ParseException e) { printHelp(); return; } if (cmd.hasOption("help")) { printHelp(); return; } File outFile = new File(cmd.getOptionValue("out")); OdtUtils utils = new OdtUtils(); utils.open(cmd.getOptionValue("in")); //utils.correctionStep(); utils.saveXML(outFile.getAbsolutePath()); try { if (cmd.hasOption("page")) { OdtUtils.paginationProcessing(outFile.getAbsolutePath()); } OdtUtils.correctionProcessing(outFile.getAbsolutePath()); } catch (ParserConfigurationException ex) { logger.log(Level.SEVERE, null, ex); } catch (SAXException ex) { logger.log(Level.SEVERE, null, ex); } catch (IOException ex) { logger.log(Level.SEVERE, null, ex); } catch (TransformerConfigurationException ex) { logger.log(Level.SEVERE, null, ex); } catch (TransformerException ex) { logger.log(Level.SEVERE, null, ex); } if (cmd.hasOption("pic")) { String imageDir = cmd.getOptionValue("pic"); if (!imageDir.endsWith("/")) { imageDir += "/"; } try { String basedir = new File(cmd.getOptionValue("out")).getParent().toString() + System.getProperty("file.separator"); OdtUtils.extractAndNormalizeEmbedPictures(cmd.getOptionValue("out"), cmd.getOptionValue("in"), basedir, imageDir); } catch (SAXException ex) { logger.log(Level.SEVERE, null, ex); } catch (ParserConfigurationException ex) { logger.log(Level.SEVERE, null, ex); } catch (TransformerConfigurationException ex) { logger.log(Level.SEVERE, null, ex); } catch (TransformerException ex) { logger.log(Level.SEVERE, null, ex); } } }
From source file:com.vmware.photon.controller.common.auth.AuthOIDCRegistrar.java
public static int main(String[] args) { Options options = new Options(); options.addOption(USERNAME_ARG, true, "Lightwave user name"); options.addOption(PASSWORD_ARG, true, "Password"); options.addOption(TARGET_ARG, true, "Registration Hostname or IPAddress"); // Possible // load-balancer // address options.addOption(MANAGEMENT_UI_REG_FILE_ARG, true, "Management UI Registration Path"); options.addOption(SWAGGER_UI_REG_FILE_ARG, true, "Swagger UI Registration Path"); options.addOption(HELP_ARG, false, "Help"); try {// www . j ava2s .c o m String username = null; String password = null; String registrationAddress = null; String mgmtUiRegPath = null; String swaggerUiRegPath = null; CommandLineParser parser = new DefaultParser(); CommandLine cmd = null; cmd = parser.parse(options, args); if (cmd.hasOption(HELP_ARG)) { showUsage(options); return 0; } if (cmd.hasOption(USERNAME_ARG)) { username = cmd.getOptionValue(USERNAME_ARG); } if (cmd.hasOption(PASSWORD_ARG)) { password = cmd.getOptionValue(PASSWORD_ARG); } if (cmd.hasOption(TARGET_ARG)) { registrationAddress = cmd.getOptionValue(TARGET_ARG); } if (cmd.hasOption(MANAGEMENT_UI_REG_FILE_ARG)) { mgmtUiRegPath = cmd.getOptionValue(MANAGEMENT_UI_REG_FILE_ARG); } if (cmd.hasOption(SWAGGER_UI_REG_FILE_ARG)) { swaggerUiRegPath = cmd.getOptionValue(SWAGGER_UI_REG_FILE_ARG); } if (username == null || username.trim().isEmpty()) { throw new UsageException("Error: username is not specified"); } if (password == null) { char[] passwd = System.console().readPassword("Password:"); password = new String(passwd); } DomainInfo domainInfo = DomainInfo.build(); AuthOIDCRegistrar registrar = new AuthOIDCRegistrar(domainInfo); registrar.register(registrationAddress, username, password, mgmtUiRegPath, swaggerUiRegPath); return 0; } catch (ParseException e) { System.err.println(e.getMessage()); return ERROR_PARSE_EXCEPTION; } catch (UsageException e) { System.err.println(e.getMessage()); showUsage(options); return ERROR_USAGE_EXCEPTION; } catch (AuthException e) { System.err.println(e.getMessage()); return ERROR_AUTH_EXCEPTION; } }
From source file:eu.scape_project.spacip.Spacip.java
/** * Main entry point.//from w w w. j a va 2s. c o m * * @param args * @throws Exception */ public static void main(String[] args) throws Exception { // configuration properties pu = new PropertyUtil("/eu/scape_project/spacip/config.properties"); // hadoop configuration Configuration hadoopConf = new Configuration(); // Command line interface config = new CliConfig(); CommandLineParser cmdParser = new PosixParser(); GenericOptionsParser gop = new GenericOptionsParser(hadoopConf, args); CommandLine cmd = cmdParser.parse(Options.OPTIONS, gop.getRemainingArgs()); if ((args.length == 0) || (cmd.hasOption(Options.HELP_OPT))) { Options.exit("Usage", 0); } else { Options.initOptions(cmd, config); } // cli parameter has priority over default configuration int cliParamNumPerInv = config.getNumItemsPerInvokation(); int defaultNumPerInv = Integer.parseInt(pu.getProp("default.itemsperinvokation")); int numPerInv = (cliParamNumPerInv != 0) ? cliParamNumPerInv : defaultNumPerInv; // setting hadoop configuration parameters so that they can be used // during MapReduce hadoopConf.setInt("num_items_per_task", numPerInv); hadoopConf.set("output_file_suffix", pu.getProp("default.outputfilesuffix")); hadoopConf.set("scape_platform_invoke", pu.getProp("tomar.invoke.command")); hadoopConf.set("unpack_hdfs_path", pu.getProp("default.hdfsdir.unpacked")); hadoopConf.set("joboutput_hdfs_path", pu.getProp("default.hdfsdir.joboutput")); hadoopConf.set("tooloutput_hdfs_path", pu.getProp("default.hdfsdir.toolout")); hadoopConf.set("container_file_suffix", pu.getProp("containerfilesuffix")); hadoopConf.set("tomar_param_pattern", pu.getProp("tomar.param.pattern")); startHadoopJob(hadoopConf); }
From source file:com.browseengine.bobo.index.MakeBobo.java
/** * @param args/* w w w . j a va2 s . c o m*/ */ public static void main(String[] args) throws IOException { // TODO Auto-generated method stub Option help = new Option("help", false, "print this message"); OptionBuilder.withArgName("path"); OptionBuilder.hasArg(); OptionBuilder.withDescription("data source - required"); Option src = OptionBuilder.create("source"); src.setRequired(true); OptionBuilder.withArgName("path"); OptionBuilder.hasArg(); OptionBuilder.withDescription("index to create - required"); Option index = OptionBuilder.create("index"); index.setRequired(true); OptionBuilder.withArgName("file"); OptionBuilder.hasArg(); OptionBuilder.withDescription("field configuration - optional"); Option conf = OptionBuilder.create("conf"); OptionBuilder.withArgName("class"); OptionBuilder.hasArg(); OptionBuilder.withDescription("class name of the data digester - default: xml digester"); Option digesterOpt = OptionBuilder.create("digester"); OptionBuilder.withArgName("name"); OptionBuilder.hasArg(); OptionBuilder.withDescription("character set name - default: UTF-8"); Option charset = OptionBuilder.create("charset"); OptionBuilder.withArgName("maxdocs"); OptionBuilder.hasArg(); OptionBuilder.withDescription("maximum number of documents - default: 100"); Option maxdocs = OptionBuilder.create("maxdocs"); Options options = new Options(); options.addOption(help); options.addOption(conf); options.addOption(index); options.addOption(src); options.addOption(charset); options.addOption(digesterOpt); options.addOption(maxdocs); // create the parser CommandLineParser parser = new BasicParser(); try { // parse the command line arguments CommandLine line = parser.parse(options, args); String output = line.getOptionValue("index"); File data = new File(line.getOptionValue("source")); Class digesterClass; if (line.hasOption("digester")) digesterClass = Class.forName(line.getOptionValue("digester")); else throw new RuntimeException("digester not specified"); Charset chset; if (line.hasOption("charset")) { chset = Charset.forName(line.getOptionValue("charset")); } else { chset = Charset.forName("UTF-8"); } int maxDocs; try { maxDocs = Integer.parseInt(line.getOptionValue("maxdocs")); } catch (Exception e) { maxDocs = 100; } FileDigester digester; try { Constructor constructor = digesterClass.getConstructor(new Class[] { File.class }); digester = (FileDigester) constructor.newInstance(new Object[] { data }); digester.setCharset(chset); digester.setMaxDocs(maxDocs); } catch (Exception e) { throw new RuntimeException("Invalid digester class.", e); } BoboIndexer indexer = new BoboIndexer(digester, FSDirectory.open(new File(output))); indexer.index(); } catch (ParseException exp) { exp.printStackTrace(); usage(options); } catch (ClassNotFoundException e) { System.out.println("Invalid digester class."); usage(options); } }
From source file:com.khubla.antlr4formatter.Antlr4Formatter.java
public static void main(String[] args) { try {/*from www . j a v a 2 s . c o m*/ System.out.println("khubla.com Antlr4 Formatter"); /* * options */ final Options options = new Options(); final Option o1 = Option.builder().argName(INPUT_OPTION).longOpt(INPUT_OPTION).type(String.class) .hasArg().required(false).desc("input file").build(); options.addOption(o1); final Option o2 = Option.builder().argName(OUTPUT_OPTION).longOpt(OUTPUT_OPTION).type(String.class) .hasArg().required(false).desc("output file").build(); options.addOption(o2); final Option o3 = Option.builder().argName(DIR_OPTION).longOpt(DIR_OPTION).type(String.class).hasArg() .required(false).desc("input dir").build(); options.addOption(o3); /* * parse */ final CommandLineParser parser = new DefaultParser(); CommandLine cmd = null; try { cmd = parser.parse(options, args); } catch (final Exception e) { e.printStackTrace(); final HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("posix", options); System.exit(0); } /* * get the file */ final String inputFilename = cmd.getOptionValue(INPUT_OPTION); final String outputFilename = cmd.getOptionValue(OUTPUT_OPTION); final String inputDirOption = cmd.getOptionValue(DIR_OPTION); if (null == inputDirOption) { formatSingleFile(inputFilename, outputFilename); } else { formatDirectory(inputDirOption); } } catch (final Exception e) { e.printStackTrace(); } }
From source file:com.hortonworks.registries.storage.tool.sql.TablesInitializer.java
public static void main(String[] args) throws Exception { Options options = new Options(); options.addOption(Option.builder("s").numberOfArgs(1).longOpt(OPTION_SCRIPT_ROOT_PATH) .desc("Root directory of script path").build()); options.addOption(Option.builder("c").numberOfArgs(1).longOpt(OPTION_CONFIG_FILE_PATH) .desc("Config file path").build()); options.addOption(Option.builder("m").numberOfArgs(1).longOpt(OPTION_MYSQL_JAR_URL_PATH) .desc("Mysql client jar url to download").build()); options.addOption(Option.builder().hasArg(false).longOpt(SchemaMigrationOption.CREATE.toString()) .desc("Run sql migrations from scatch").build()); options.addOption(Option.builder().hasArg(false).longOpt(SchemaMigrationOption.DROP.toString()) .desc("Drop all the tables in the target database").build()); options.addOption(Option.builder().hasArg(false).longOpt(SchemaMigrationOption.CHECK_CONNECTION.toString()) .desc("Check the connection for configured data source").build()); options.addOption(Option.builder().hasArg(false).longOpt(SchemaMigrationOption.MIGRATE.toString()) .desc("Execute schema migration from last check point").build()); options.addOption(Option.builder().hasArg(false).longOpt(SchemaMigrationOption.INFO.toString()) .desc("Show the status of the schema migration compared to the target database").build()); options.addOption(Option.builder().hasArg(false).longOpt(SchemaMigrationOption.VALIDATE.toString()) .desc("Validate the target database changes with the migration scripts").build()); options.addOption(Option.builder().hasArg(false).longOpt(SchemaMigrationOption.REPAIR.toString()).desc( "Repairs the DATABASE_CHANGE_LOG by removing failed migrations and correcting checksum of existing migration script") .build());/* w ww. j av a 2s . c o m*/ options.addOption(Option.builder().hasArg(false).longOpt(DISABLE_VALIDATE_ON_MIGRATE) .desc("Disable flyway validation checks while running migrate").build()); CommandLineParser parser = new BasicParser(); CommandLine commandLine = parser.parse(options, args); if (!commandLine.hasOption(OPTION_CONFIG_FILE_PATH) || !commandLine.hasOption(OPTION_SCRIPT_ROOT_PATH)) { usage(options); System.exit(1); } boolean isSchemaMigrationOptionSpecified = false; SchemaMigrationOption schemaMigrationOptionSpecified = null; for (SchemaMigrationOption schemaMigrationOption : SchemaMigrationOption.values()) { if (commandLine.hasOption(schemaMigrationOption.toString())) { if (isSchemaMigrationOptionSpecified) { System.out.println( "Only one operation can be execute at once, please select one of 'create', ',migrate', 'validate', 'info', 'drop', 'repair', 'check-connection'."); System.exit(1); } isSchemaMigrationOptionSpecified = true; schemaMigrationOptionSpecified = schemaMigrationOption; } } if (!isSchemaMigrationOptionSpecified) { System.out.println( "One of the option 'create', ',migrate', 'validate', 'info', 'drop', 'repair', 'check-connection' must be specified to execute."); System.exit(1); } String confFilePath = commandLine.getOptionValue(OPTION_CONFIG_FILE_PATH); String scriptRootPath = commandLine.getOptionValue(OPTION_SCRIPT_ROOT_PATH); String mysqlJarUrl = commandLine.getOptionValue(OPTION_MYSQL_JAR_URL_PATH); StorageProviderConfiguration storageProperties; Map<String, Object> conf; try { conf = Utils.readConfig(confFilePath); StorageProviderConfigurationReader confReader = new StorageProviderConfigurationReader(); storageProperties = confReader.readStorageConfig(conf); } catch (IOException e) { System.err.println("Error occurred while reading config file: " + confFilePath); System.exit(1); throw new IllegalStateException("Shouldn't reach here"); } String bootstrapDirPath = null; try { bootstrapDirPath = System.getProperty("bootstrap.dir"); Proxy proxy = Proxy.NO_PROXY; String httpProxyUrl = (String) conf.get(HTTP_PROXY_URL); String httpProxyUsername = (String) conf.get(HTTP_PROXY_USERNAME); String httpProxyPassword = (String) conf.get(HTTP_PROXY_PASSWORD); if ((httpProxyUrl != null) && !httpProxyUrl.isEmpty()) { URL url = new URL(httpProxyUrl); proxy = new Proxy(Proxy.Type.HTTP, new InetSocketAddress(url.getHost(), url.getPort())); if ((httpProxyUsername != null) && !httpProxyUsername.isEmpty()) { Authenticator.setDefault(getBasicAuthenticator(url.getHost(), url.getPort(), httpProxyUsername, httpProxyPassword)); } } MySqlDriverHelper.downloadMySQLJarIfNeeded(storageProperties, bootstrapDirPath, mysqlJarUrl, proxy); } catch (Exception e) { System.err.println("Error occurred while downloading MySQL jar. bootstrap dir: " + bootstrapDirPath); System.exit(1); throw new IllegalStateException("Shouldn't reach here"); } boolean disableValidateOnMigrate = commandLine.hasOption(DISABLE_VALIDATE_ON_MIGRATE); if (disableValidateOnMigrate) { System.out.println("Disabling validation on schema migrate"); } SchemaMigrationHelper schemaMigrationHelper = new SchemaMigrationHelper( SchemaFlywayFactory.get(storageProperties, scriptRootPath, !disableValidateOnMigrate)); try { schemaMigrationHelper.execute(schemaMigrationOptionSpecified); System.out .println(String.format("\"%s\" option successful", schemaMigrationOptionSpecified.toString())); } catch (Exception e) { System.err.println( String.format("\"%s\" option failed : %s", schemaMigrationOptionSpecified.toString(), e)); System.exit(1); } }
From source file:com.twentyn.patentSearch.DocumentIndexer.java
public static void main(String[] args) throws Exception { System.out.println("Starting up..."); System.out.flush();//from ww w .j ava 2s. co m Options opts = new Options(); opts.addOption(Option.builder("i").longOpt("input").hasArg().required() .desc("Input file or directory to index").build()); opts.addOption(Option.builder("x").longOpt("index").hasArg().required() .desc("Path to index file to generate").build()); opts.addOption(Option.builder("h").longOpt("help").desc("Print this help message and exit").build()); opts.addOption(Option.builder("v").longOpt("verbose").desc("Print verbose log output").build()); HelpFormatter helpFormatter = new HelpFormatter(); CommandLineParser cmdLineParser = new DefaultParser(); CommandLine cmdLine = null; try { cmdLine = cmdLineParser.parse(opts, args); } catch (ParseException e) { System.out.println("Caught exception when parsing command line: " + e.getMessage()); helpFormatter.printHelp("DocumentIndexer", opts); System.exit(1); } if (cmdLine.hasOption("help")) { helpFormatter.printHelp("DocumentIndexer", opts); System.exit(0); } if (cmdLine.hasOption("verbose")) { // With help from http://stackoverflow.com/questions/23434252/programmatically-change-log-level-in-log4j2 LoggerContext ctx = (LoggerContext) LogManager.getContext(false); Configuration ctxConfig = ctx.getConfiguration(); LoggerConfig logConfig = ctxConfig.getLoggerConfig(LogManager.ROOT_LOGGER_NAME); logConfig.setLevel(Level.DEBUG); ctx.updateLoggers(); LOGGER.debug("Verbose logging enabled"); } LOGGER.info("Opening index at " + cmdLine.getOptionValue("index")); Directory indexDir = FSDirectory.open(new File(cmdLine.getOptionValue("index")).toPath()); /* The standard analyzer is too aggressive with chemical entities (it strips structural annotations, for one * thing), and the whitespace analyzer doesn't do any case normalization or stop word elimination. This custom * analyzer appears to treat chemical entities better than the standard analyzer without admitting too much * cruft to the index. */ Analyzer analyzer = CustomAnalyzer.builder().withTokenizer("whitespace").addTokenFilter("lowercase") .addTokenFilter("stop").build(); IndexWriterConfig writerConfig = new IndexWriterConfig(analyzer); writerConfig.setOpenMode(IndexWriterConfig.OpenMode.CREATE_OR_APPEND); writerConfig.setRAMBufferSizeMB(1 << 10); IndexWriter indexWriter = new IndexWriter(indexDir, writerConfig); String inputFileOrDir = cmdLine.getOptionValue("input"); File splitFileOrDir = new File(inputFileOrDir); if (!(splitFileOrDir.exists())) { LOGGER.error("Unable to find directory at " + inputFileOrDir); System.exit(1); } DocumentIndexer indexer = new DocumentIndexer(indexWriter); PatentCorpusReader corpusReader = new PatentCorpusReader(indexer, splitFileOrDir); corpusReader.readPatentCorpus(); indexer.commitAndClose(); }
From source file:de.uni_koblenz.west.splendid.tools.NQuadSourceAggregator.java
public static void main(String[] args) { try {/*w ww . j a v a 2 s.c o m*/ // parse the command line arguments CommandLineParser parser = new GnuParser(); CommandLine cmd = parser.parse(OPTIONS, args); // print help message if (cmd.hasOption("h") || cmd.hasOption("help")) { new HelpFormatter().printHelp(USAGE, OPTIONS); System.exit(0); } // get input files (from option -i or all remaining parameters) String[] inputFiles = cmd.getOptionValues("i"); if (inputFiles == null) inputFiles = cmd.getArgs(); if (inputFiles.length == 0) { System.out.println("need at least one input file."); new HelpFormatter().printUsage(new PrintWriter(System.out, true), 80, USAGE); System.exit(1); } String outputFile = cmd.getOptionValue("o"); // process all input files new NQuadSourceAggregator().process(outputFile, inputFiles); } catch (ParseException exp) { // print parse error and display usage message System.out.println(exp.getMessage()); new HelpFormatter().printUsage(new PrintWriter(System.out, true), 80, USAGE, OPTIONS); } }
From source file:com.example.geomesa.hbase.HBaseQuickStart.java
public static void main(String[] args) throws Exception { // find out where -- in HBase -- the user wants to store data CommandLineParser parser = new BasicParser(); Options options = getCommonRequiredOptions(); CommandLine cmd = parser.parse(options, args); // verify that we can see this HBase destination in a GeoTools manner Map<String, Serializable> dsConf = getHBaseDataStoreConf(cmd); DataStore dataStore = DataStoreFinder.getDataStore(dsConf); assert dataStore != null; // establish specifics concerning the SimpleFeatureType to store String simpleFeatureTypeName = "QuickStart"; SimpleFeatureType simpleFeatureType = createSimpleFeatureType(simpleFeatureTypeName); // write Feature-specific metadata to the destination table in HBase // (first creating the table if it does not already exist); you only need // to create the FeatureType schema the *first* time you write any Features // of this type to the table System.out.println("Creating feature-type (schema): " + simpleFeatureTypeName); dataStore.createSchema(simpleFeatureType); // create new features locally, and add them to this table System.out.println("Creating new features"); FeatureCollection featureCollection = createNewFeatures(simpleFeatureType, 1000); System.out.println("Inserting new features"); insertFeatures(simpleFeatureTypeName, dataStore, featureCollection); // query a few Features from this table System.out.println("Submitting query"); queryFeatures(simpleFeatureTypeName, dataStore, "Where", -78.5, 37.5, -78.0, 38.0, "When", "2014-07-01T00:00:00.000Z", "2014-09-30T23:59:59.999Z", "(Who = 'Bierce')"); }