List of usage examples for org.apache.commons.cli PosixParser PosixParser
PosixParser
From source file:com.google.flightmap.parsing.faa.nfd.NfdAirspaceParser.java
public static void main(String args[]) { CommandLine line = null;/* w ww. ja v a 2 s . co m*/ try { final CommandLineParser parser = new PosixParser(); line = parser.parse(OPTIONS, args); } catch (ParseException pEx) { System.err.println(pEx.getMessage()); printHelp(line); System.exit(2); } if (line.hasOption(HELP_OPTION)) { printHelp(line); System.exit(0); } final String nfdPath = line.getOptionValue(NFD_OPTION); final File nfd = new File(nfdPath); final String dbPath = line.getOptionValue(AVIATION_DB_OPTION); final File db = new File(dbPath); try { (new NfdAirspaceParser(nfd, db)).execute(); } catch (Exception ex) { ex.printStackTrace(); System.exit(1); } }
From source file:com.netscape.cms.servlet.test.DRMTest.java
public static void main(String args[]) throws InvalidKeyException, NoSuchAlgorithmException, InvalidKeySpecException, SignatureException, IOException { String host = null;/*from w ww . j a v a2 s .com*/ String port = null; String token_pwd = null; String db_dir = "./"; String protocol = "http"; String clientCertNickname = "KRA Administrator of Instance pki-kra's SjcRedhat Domain ID"; // parse command line arguments Options options = new Options(); options.addOption("h", true, "Hostname of the DRM"); options.addOption("p", true, "Port of the DRM"); options.addOption("w", true, "Token password"); options.addOption("d", true, "Directory for tokendb"); options.addOption("s", true, "Attempt Optional Secure SSL connection"); options.addOption("c", true, "Optional SSL Client cert Nickname"); try { CommandLineParser parser = new PosixParser(); CommandLine cmd = parser.parse(options, args); if (cmd.hasOption("h")) { host = cmd.getOptionValue("h"); } else { System.err.println("Error: no hostname provided."); usage(options); } if (cmd.hasOption("p")) { port = cmd.getOptionValue("p"); } else { System.err.println("Error: no port provided"); usage(options); } if (cmd.hasOption("w")) { token_pwd = cmd.getOptionValue("w"); } else { System.err.println("Error: no token password provided"); usage(options); } if (cmd.hasOption("d")) { db_dir = cmd.getOptionValue("d"); } if (cmd.hasOption("s")) { if (cmd.getOptionValue("s") != null && cmd.getOptionValue("s").equals("true")) { protocol = "https"; } } if (cmd.hasOption("c")) { String nick = cmd.getOptionValue("c"); if (nick != null && protocol.equals("https")) { clientCertNickname = nick; } } } catch (ParseException e) { System.err.println("Error in parsing command line options: " + e.getMessage()); usage(options); } // used for crypto operations byte iv[] = { 0x1, 0x1, 0x1, 0x1, 0x1, 0x1, 0x1, 0x1 }; try { iv = genIV(8); } catch (Exception e) { log("Can't generate initialization vector use default: " + e.toString()); } // used for wrapping to send data to DRM String transportCert = null; // Data to be archived SymmetricKey vek = null; String passphrase = null; // Session keys and passphrases for recovery SymmetricKey sessionKey = null; byte[] wrappedRecoveryKey = null; String recoveryPassphrase = null; byte[] wrappedRecoveryPassphrase = null; // retrieved data (should match archived data) byte[] encryptedData = null; String recoveredKey = null; // various ids used in recovery/archival operations KeyId keyId = null; String clientKeyId = null; RequestId recoveryRequestId = null; // Variables for data structures from calls KeyRequestResponse requestResponse = null; Key keyData = null; KeyInfo keyInfo = null; // Initialize token try { CryptoManager.initialize(db_dir); } catch (AlreadyInitializedException e) { // it is ok if it is already initialized } catch (Exception e) { log("INITIALIZATION ERROR: " + e.toString()); System.exit(1); } // Set base URI and get client KRAClient client; SystemCertClient systemCertClient; KeyClient keyClient; NSSCryptoProvider nssCrypto; try { ClientConfig config = new ClientConfig(); config.setServerURI(protocol + "://" + host + ":" + port + "/kra"); config.setCertNickname(clientCertNickname); config.setCertDatabase(db_dir); config.setCertPassword(token_pwd); nssCrypto = new NSSCryptoProvider(config); client = new KRAClient(new PKIClient(config, nssCrypto)); systemCertClient = (SystemCertClient) client.getClient("systemcert"); keyClient = (KeyClient) client.getClient("key"); } catch (Exception e) { e.printStackTrace(); return; } // Test 1: Get transport certificate from DRM transportCert = systemCertClient.getTransportCert().getEncoded(); transportCert = transportCert.substring(CertData.HEADER.length(), transportCert.indexOf(CertData.FOOTER)); keyClient.setTransportCert(transportCert); log("Transport Cert retrieved from DRM: " + transportCert); // Test 2: Get list of completed key archival requests log("\n\nList of completed archival requests"); KeyRequestInfoCollection list = keyClient.listRequests("complete", "securityDataEnrollment"); if (list.getTotal() == 0) { log("No requests found"); } else { Iterator<KeyRequestInfo> iter = list.getEntries().iterator(); while (iter.hasNext()) { KeyRequestInfo info = iter.next(); printRequestInfo(info); } } // Test 3: Get list of key recovery requests log("\n\nList of completed recovery requests"); KeyRequestInfoCollection list2 = keyClient.listRequests("complete", "securityDataRecovery"); if (list2.getTotal() == 0) { log("No requests found"); } else { Iterator<KeyRequestInfo> iter2 = list2.getEntries().iterator(); while (iter2.hasNext()) { KeyRequestInfo info = iter2.next(); printRequestInfo(info); } } // Test 4: Generate and archive a symmetric key log("Archiving symmetric key"); clientKeyId = "UUID: 123-45-6789 VEK " + Calendar.getInstance().getTime().toString(); try { vek = nssCrypto.generateSessionKey(); byte[] encoded = nssCrypto.createPKIArchiveOptions(transportCert, vek, null, KeyRequestResource.DES3_ALGORITHM, 0, iv); KeyRequestResponse info = keyClient.archivePKIOptions(clientKeyId, KeyRequestResource.SYMMETRIC_KEY_TYPE, KeyRequestResource.DES3_ALGORITHM, 0, encoded); log("Archival Results:"); printRequestInfo(info.getRequestInfo()); keyId = info.getKeyId(); } catch (Exception e) { log("Exception in archiving symmetric key:" + e.getMessage()); e.printStackTrace(); } //Test 5: Get keyId for active key with client ID log("Getting key ID for symmetric key"); keyInfo = keyClient.getActiveKeyInfo(clientKeyId); printKeyInfo(keyInfo); KeyId keyId2 = keyInfo.getKeyId(); if (keyId2 == null) { log("No archived key found"); } else { log("Archived Key found: " + keyId); } if (!keyId.equals(keyId2)) { log("Error: key ids from search and archival do not match"); } else { log("Success: keyids from search and archival match."); } // Test 6: Submit a recovery request for the symmetric key using a session key log("Submitting a recovery request for the symmetric key using session key"); try { sessionKey = nssCrypto.generateSessionKey(); wrappedRecoveryKey = CryptoUtil.wrapSymmetricKey(nssCrypto.getManager(), nssCrypto.getToken(), transportCert, sessionKey); keyData = keyClient.retrieveKey(keyId, wrappedRecoveryKey); } catch (Exception e) { log("Exception in recovering symmetric key using session key: " + e.getMessage()); } encryptedData = keyData.getEncryptedData(); try { recoveredKey = Utils.base64encode(nssCrypto.unwrapWithSessionKey(encryptedData, sessionKey, KeyRequestResource.DES3_ALGORITHM, keyData.getNonceData())); } catch (Exception e) { log("Exception in unwrapping key: " + e.toString()); e.printStackTrace(); } if (!recoveredKey.equals(Utils.base64encode(vek.getEncoded()))) { log("Error: recovered and archived keys do not match!"); } else { log("Success: recoverd and archived keys match!"); } // Test 7: Submit a recovery request for the symmetric key using a passphrase log("Submitting a recovery request for the symmetric key using a passphrase"); recoveryPassphrase = "Gimme me keys please"; try { sessionKey = nssCrypto.generateSessionKey(); wrappedRecoveryPassphrase = nssCrypto.wrapWithSessionKey(recoveryPassphrase, iv, sessionKey, KeyRequestResource.DES3_ALGORITHM); wrappedRecoveryKey = nssCrypto.wrapSessionKeyWithTransportCert(sessionKey, transportCert); keyData = keyClient.retrieveKeyUsingWrappedPassphrase(keyId, wrappedRecoveryKey, wrappedRecoveryPassphrase, iv); } catch (Exception e) { log("Exception in recovering symmetric key using passphrase" + e.toString()); e.printStackTrace(); } encryptedData = keyData.getEncryptedData(); try { recoveredKey = Utils.base64encode(nssCrypto.unwrapWithPassphrase(encryptedData, recoveryPassphrase)); } catch (Exception e) { log("Error: unable to unwrap key using passphrase"); e.printStackTrace(); } if (recoveredKey == null || !recoveredKey.equals(Utils.base64encode(vek.getEncoded()))) { log("Error: recovered and archived keys do not match!"); } else { log("Success: recovered and archived keys do match!"); } passphrase = "secret12345"; // Test 8: Generate and archive a passphrase clientKeyId = "UUID: 123-45-6789 RKEK " + Calendar.getInstance().getTime().toString(); try { requestResponse = keyClient.archivePassphrase(clientKeyId, passphrase); log("Archival Results:"); printRequestInfo(requestResponse.getRequestInfo()); keyId = requestResponse.getKeyId(); } catch (Exception e) { log("Exception in archiving symmetric key:" + e.toString()); e.printStackTrace(); } //Test 9: Get keyId for active passphrase with client ID log("Getting key ID for passphrase"); keyInfo = keyClient.getActiveKeyInfo(clientKeyId); printKeyInfo(keyInfo); keyId2 = keyInfo.getKeyId(); if (keyId2 == null) { log("No archived key found"); } else { log("Archived Key found: " + keyId); } if (!keyId.equals(keyId2)) { log("Error: key ids from search and archival do not match"); } else { log("Success: key ids from search and archival do match!"); } // Test 10: Submit a recovery request for the passphrase using a session key log("Submitting a recovery request for the passphrase using session key"); sessionKey = null; wrappedRecoveryKey = null; try { keyData = keyClient.retrieveKeyByPassphrase(keyId, recoveryPassphrase); } catch (Exception e) { log("Exception in recovering passphrase using session key: " + e.getMessage()); } encryptedData = keyData.getEncryptedData(); try { recoveredKey = new String(nssCrypto.unwrapWithPassphrase(encryptedData, recoveryPassphrase), "UTF-8"); } catch (Exception e) { log("Exception in unwrapping key: " + e.toString()); e.printStackTrace(); } if (recoveredKey == null || !recoveredKey.equals(passphrase)) { log("Error: recovered and archived passphrases do not match!"); } else { log("Success: recovered and archived passphrases do match!"); } // Test 11: Submit a recovery request for the passphrase using a passphrase try { sessionKey = nssCrypto.generateSessionKey(); wrappedRecoveryKey = nssCrypto.wrapSessionKeyWithTransportCert(sessionKey, transportCert); wrappedRecoveryPassphrase = nssCrypto.wrapWithSessionKey(recoveryPassphrase, iv, sessionKey, KeyRequestResource.DES3_ALGORITHM); keyData = keyClient.retrieveKeyUsingWrappedPassphrase(keyId, wrappedRecoveryKey, wrappedRecoveryPassphrase, iv); } catch (Exception e1) { e1.printStackTrace(); System.out.println("Test 17: " + e1.getMessage()); System.exit(-1); } encryptedData = keyData.getEncryptedData(); try { recoveredKey = new String(nssCrypto.unwrapWithPassphrase(encryptedData, recoveryPassphrase), "UTF-8"); } catch (Exception e) { log("Error: cannot unwrap key using passphrase"); e.printStackTrace(); } if (recoveredKey == null || !recoveredKey.equals(passphrase)) { log("Error: recovered and archived passphrases do not match!"); } else { log("Success: recovered and archived passphrases do match!"); } // Test 12: Get key log("Getting passphrase: " + keyId); try { keyData = keyClient.retrieveKeyByPassphrase(keyId, recoveryPassphrase); } catch (Exception e1) { e1.printStackTrace(); } encryptedData = keyData.getEncryptedData(); try { recoveredKey = new String(nssCrypto.unwrapWithPassphrase(encryptedData, recoveryPassphrase), "UTF-8"); } catch (Exception e) { log("Error: Can't unwrap recovered key using passphrase"); e.printStackTrace(); } if (recoveredKey == null || !recoveredKey.equals(passphrase)) { log("Error: recovered and archived passphrases do not match!"); } else { log("Success: recovered and archived passphrases do match!"); } // Test 13: Get non-existent request RequestId requestId = new RequestId("0xabcdef"); log("Getting non-existent request: " + requestId.toHexString()); try { keyClient.getRequestInfo(requestId); log("Error: getting non-existent request does not throw an exception"); } catch (RequestNotFoundException e) { log("Success: getting non-existent request throws an exception: " + e.getMessage() + " (" + e.getRequestId().toHexString() + ")"); } // Test 14: Request x509 key recovery // This test requires to retrieve keyId and matching certificate // from installed instances of CA and DRM String keyID = "1"; String b64Certificate = "MIIC+TCCAeGgAwIBAgIBDDANBgkqhkiG9w0BAQsFADBOMSswKQYDVQQKDCJ1c2Vy" + "c3lzLnJlZGhhdC5jb20gU2VjdXJpdHkgRG9tYWluMR8wHQYDVQQDDBZDQSBTaWdu" + "aW5nIENlcnRpZmljYXRlMB4XDTEzMTAyNTE5MzQwM1oXDTE0MDQyMzE5MzQwM1ow" + "EzERMA8GCgmSJomT8ixkAQEMAXgwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGB" + "ALhLfGmKvxFsKXPh49q1QsluXU3WlyS1XnpDLgOAhgTNgO4sG6CpPdv6hZYIvQBb" + "ZQ5bhuML+NXK+Q+EIiNk1cUTxgL3a30sPzy6QaFWxwM8i4uXm4nCBYv7T+n4V6/O" + "xHIM2Ch/dviAb3vz+M9trErv9t+d2H8jNXT3sHuDb/kvAgMBAAGjgaAwgZ0wHwYD" + "VR0jBBgwFoAUh1cxWFRY+nMsx4odQQI1GqyFxP8wSwYIKwYBBQUHAQEEPzA9MDsG" + "CCsGAQUFBzABhi9odHRwOi8vZG9ndGFnMjAudXNlcnN5cy5yZWRoYXQuY29tOjgw" + "ODAvY2Evb2NzcDAOBgNVHQ8BAf8EBAMCBSAwHQYDVR0lBBYwFAYIKwYBBQUHAwIG" + "CCsGAQUFBwMEMA0GCSqGSIb3DQEBCwUAA4IBAQCvmbUzQOouE2LgQQcKfmgwwJMJ" + "9tMrPwDUtyFdaIFoPL4uZaujSscaN4IWK2r5vIMJ65jwYCI7sI9En2ZfO28J9dQj" + "lpqu6TaJ+xtaMk7OvXpVB7lJk73HAttMGjETlkoq/6EjxcugmJsDqVD0b2tO7Vd0" + "hroBe2uPDHM2ASewZF415lUcRh0URtmxSazTInbyxpmy1wgSJQ0C6fMCeT+hUFlA" + "0P4k1TIprapGVq7FpKcqlhK2gTBfTSnoO7gmXG/9MxJiYpb/Aph8ptXq6quHz1Mj" + "greWr3xTsy6gF2yphUEkGHh4v22XvK+FLx9Jb6zloMWA2GG9gpUpvMnl1fH4"; log("Requesting X509 key recovery."); recoveryRequestId = keyClient.recoverKey(new KeyId(keyID), null, null, null, b64Certificate) .getRequestInfo().getRequestId(); log("Requesting X509 key recovery request: " + recoveryRequestId); // Test 55: Approve x509 key recovery log("Approving X509 key recovery request: " + recoveryRequestId); keyClient.approveRequest(recoveryRequestId); // Test 16: Recover x509 key log("Recovering X509 key based on request: " + recoveryRequestId); try { // KeyData recoveredX509Key = client.recoverKey(recoveryRequestId, "netscape"); // log("Success: X509Key recovered: "+ recoveredX509Key.getP12Data()); } catch (RequestNotFoundException e) { log("Error: recovering X509Key"); } // Test 1: Get transport certificate from DRM transportCert = systemCertClient.getTransportCert().getEncoded(); transportCert = transportCert.substring(CertData.HEADER.length(), transportCert.indexOf(CertData.FOOTER)); log("Transport Cert retrieved from DRM: " + transportCert); // Test 17: Get list of completed key archival requests log("\n\nList of completed archival requests"); list = keyClient.listRequests("complete", IRequest.SYMKEY_GENERATION_REQUEST); if (list.getTotal() == 0) { log("No requests found"); } else { Iterator<KeyRequestInfo> iter = list.getEntries().iterator(); while (iter.hasNext()) { KeyRequestInfo info = iter.next(); printRequestInfo(info); } } // test 18: Generate symmetric key clientKeyId = "Symmetric Key #1234f " + Calendar.getInstance().getTime().toString(); List<String> usages = new ArrayList<String>(); usages.add(SymKeyGenerationRequest.DECRYPT_USAGE); usages.add(SymKeyGenerationRequest.ENCRYPT_USAGE); KeyRequestResponse genKeyResponse = keyClient.generateSymmetricKey(clientKeyId, KeyRequestResource.AES_ALGORITHM, 128, usages, null); printRequestInfo(genKeyResponse.getRequestInfo()); keyId = genKeyResponse.getKeyId(); // test 19: Get keyId for active key with client ID log("Getting key ID for symmetric key"); keyInfo = keyClient.getActiveKeyInfo(clientKeyId); printKeyInfo(keyInfo); keyId2 = keyInfo.getKeyId(); if (keyId2 == null) { log("No archived key found"); } else { log("Archived Key found: " + keyId); } if (!keyId.equals(keyId2)) { log("Error: key ids from search and archival do not match"); } else { log("Success: keyids from search and archival match."); } // Test 20: Submit a recovery request for the symmetric key using a session key log("Submitting a recovery request for the symmetric key using session key"); try { sessionKey = nssCrypto.generateSessionKey(); wrappedRecoveryKey = nssCrypto.wrapSessionKeyWithTransportCert(sessionKey, transportCert); keyData = keyClient.retrieveKey(keyId, wrappedRecoveryKey); } catch (Exception e) { log("Exception in recovering symmetric key using session key: " + e.getMessage()); } encryptedData = keyData.getEncryptedData(); try { recoveredKey = new String(nssCrypto.unwrapWithSessionKey(encryptedData, sessionKey, KeyRequestResource.DES3_ALGORITHM, keyData.getNonceData())); } catch (Exception e) { log("Exception in unwrapping key: " + e.toString()); e.printStackTrace(); } // test 21: Generate symmetric key - invalid algorithm try { genKeyResponse = keyClient.generateSymmetricKey("Symmetric Key #1235", "AFS", 128, usages, null); } catch (Exception e) { log("Exception: " + e); } // test 22: Generate symmetric key - invalid key size try { genKeyResponse = keyClient.generateSymmetricKey("Symmetric Key #1236", "AES", 0, usages, null); } catch (Exception e) { log("Exception: " + e); } // test 23: Generate symmetric key - usages not defined try { genKeyResponse = keyClient.generateSymmetricKey("Symmetric Key #1236", "DES", 56, null, null); } catch (Exception e) { log("Exception: " + e); } // Test 24: Generate and archive a symmetric key of type AES log("Archiving symmetric key"); clientKeyId = "UUID: 123-45-6789 VEK " + Calendar.getInstance().getTime().toString(); try { vek = nssCrypto.generateSymmetricKey(KeyRequestResource.AES_ALGORITHM, 128); byte[] encoded = CryptoUtil.createPKIArchiveOptions(nssCrypto.getManager(), nssCrypto.getToken(), transportCert, vek, null, KeyGenAlgorithm.DES3, 0, new IVParameterSpec(iv)); KeyRequestResponse response = keyClient.archivePKIOptions(clientKeyId, KeyRequestResource.SYMMETRIC_KEY_TYPE, KeyRequestResource.AES_ALGORITHM, 128, encoded); log("Archival Results:"); printRequestInfo(response.getRequestInfo()); keyId = response.getKeyId(); } catch (Exception e) { log("Exception in archiving symmetric key:" + e.getMessage()); e.printStackTrace(); } //Test 25: Get keyId for active key with client ID log("Getting key ID for symmetric key"); keyInfo = keyClient.getActiveKeyInfo(clientKeyId); printKeyInfo(keyInfo); keyId2 = keyInfo.getKeyId(); if (keyId2 == null) { log("No archived key found"); } else { log("Archived Key found: " + keyId); } if (!keyId.equals(keyId2)) { log("Error: key ids from search and archival do not match"); } else { log("Success: keyids from search and archival match."); } // Test 26: Submit a recovery request for the symmetric key log("Submitting a recovery request for the symmetric key without using session key"); try { keyData = keyClient.retrieveKey(keyId); } catch (Exception e) { log("Exception in recovering symmetric key using session key: " + e.getMessage()); } // Since no session key is provided externally, the retrieveKey method // generates a session key, wraps it with transport cert and completes the request. // The encrypted data is then unwrapped using the temporary session key and set to // the attribute privateData. recoveredKey = Utils.base64encode(keyData.getData()); if (!recoveredKey.equals(Utils.base64encode(vek.getEncoded()))) { log("Error: recovered and archived keys do not match!"); } else { log("Success: recoverd and archived keys match!"); } // Test 27: Get key info log("getting key info for existing key"); printKeyInfo(keyClient.getKeyInfo(keyId)); //Test 28: Modify status log("modify the key status"); keyClient.modifyKeyStatus(keyId, KeyResource.KEY_STATUS_INACTIVE); keyInfo = keyClient.getKeyInfo(keyId); printKeyInfo(keyInfo); //Test 29: Confirm no more active keys with this ID log("look for active keys with this id"); clientKeyId = keyInfo.getClientKeyID(); try { keyInfo = keyClient.getActiveKeyInfo(clientKeyId); printKeyInfo(keyInfo); } catch (ResourceNotFoundException e) { log("Success: ResourceNotFound exception thrown: " + e); } // Test asymmetric key generation. String[] algs = { "RSA", "DSA" }; for (int i = 0; i < algs.length; i++) { // Test 30: Generate Asymmetric keys - RSA key System.out.println("\nTesting asymmetric key generation for algorithm " + algs[i]); clientKeyId = "AsymKey #" + Calendar.getInstance().getTimeInMillis(); usages.clear(); usages.add(AsymKeyGenerationRequest.SIGN); usages.add(AsymKeyGenerationRequest.VERIFY); KeyRequestResponse response = keyClient.generateAsymmetricKey(clientKeyId, algs[i], 1024, usages, null); printRequestInfo(response.getRequestInfo()); System.out.println(); // Test 31: Get information of the newly generated asymmetric keys System.out.println("Fetch information of the newly generated asymmetric keys."); System.out.println(); KeyInfo info = keyClient.getKeyInfo(response.getKeyId()); printKeyInfo(info); System.out.println(); // Test 32: Retrieve private key data System.out.println("Retrieving and verifying the generated private key."); try { keyData = keyClient.retrieveKey(response.getKeyId()); } catch (Exception e) { log("Exception retrieving the private key data."); e.printStackTrace(); } // Test 33: Verify the generated key pair. if (isKeyPairValid(algs[i], keyData.getData(), info.getPublicKey())) { log("The key pair generated using " + algs[i] + " algorithm is valid."); } else { log("The key pair generated using " + algs[i] + " algorithm is invalid."); } System.out.println(); } // Test 34: }
From source file:com.hurence.logisland.plugin.PluginManager.java
public static void main(String... args) throws Exception { System.out.println(BannerLoader.loadBanner()); String logislandHome = new File( new File(PluginManager.class.getProtectionDomain().getCodeSource().getLocation().getPath()) .getParent()).getParent(); System.out.println("Using Logisland home: " + logislandHome); Options options = new Options(); OptionGroup mainGroup = new OptionGroup() .addOption(OptionBuilder.withDescription( "Install a component. It can be either a logisland plugin or a kafka connect module.") .withArgName("artifact").hasArgs(1).withLongOpt("install").create("i")) .addOption(OptionBuilder.withDescription( "Removes a component. It can be either a logisland plugin or a kafka connect module.") .withArgName("artifact").hasArgs(1).withLongOpt("remove").create("r")) .addOption(OptionBuilder.withDescription("List installed components.").withLongOpt("list") .create("l")); mainGroup.setRequired(true);/* w w w . j ava 2 s .c o m*/ options.addOptionGroup(mainGroup); options.addOption(OptionBuilder.withDescription("Print this help.").withLongOpt("help").create("h")); try { CommandLine commandLine = new PosixParser().parse(options, args); System.out.println(commandLine.getArgList()); if (commandLine.hasOption("l")) { listPlugins(); } else if (commandLine.hasOption("i")) { installPlugin(commandLine.getOptionValue("i"), logislandHome); } else if (commandLine.hasOption("r")) { removePlugin(commandLine.getOptionValue("r")); } else { printUsage(options); } } catch (ParseException e) { if (!options.hasOption("h")) { System.err.println(e.getMessage()); System.out.println(); } printUsage(options); } }
From source file:edu.msu.cme.rdp.probematch.cli.SliceToPrimer.java
public static void main(String[] args) throws Exception { //args = "--fedit-dist 4 --redit-dist=4 -k --max-length=400 --min-length=280 -o java_sliced_edit4.fasta TGCGAYCCSAARGCBGACTC ATSGCCATCATYTCRCCGGA /scratch/fishjord/tae_kwon_primer_match/all_genomes.fasta".split(" "); PatternBitMask64[] fprimers;/* ww w . java 2 s . co m*/ String[] fprimerStrs, rprimerStrs; PatternBitMask64[] rprimers; FastaWriter seqOut; PrintStream statsOut; int fEdit = 3; int rEdit = 3; int minLength = Integer.MIN_VALUE; int maxLength = Integer.MAX_VALUE; boolean allowAmbiguities = true; boolean keepPrimers = false; SequenceReader inSeqs; try { CommandLine line = new PosixParser().parse(options, args); if (line.hasOption("edit-dist")) { fEdit = rEdit = Integer.parseInt(line.getOptionValue("edit-dist")); if (line.hasOption("redit-dist") || line.hasOption("fedit-dist")) { throw new Exception("edit-dist, [fedit-dist, redit-dist] are mutually exclusive"); } } if (line.hasOption("fedit-dist")) { fEdit = Integer.parseInt(line.getOptionValue("fedit-dist")); } if (line.hasOption("no-ambiguities")) { allowAmbiguities = false; } if (line.hasOption("keep-primers")) { keepPrimers = true; } if (line.hasOption("redit-dist")) { rEdit = Integer.parseInt(line.getOptionValue("redit-dist")); } if (line.hasOption("seq-out")) { seqOut = new FastaWriter(new File(line.getOptionValue("seq-out"))); } else { throw new Exception("Must specify seq-out"); } if (line.hasOption("stats-out")) { statsOut = new PrintStream(new File(line.getOptionValue("stats-out"))); } else { statsOut = System.out; } if (line.hasOption("min-length")) { minLength = Integer.parseInt(line.getOptionValue("min-length")); } if (line.hasOption("max-length")) { maxLength = Integer.parseInt(line.getOptionValue("max-length")); } args = line.getArgs(); if (args.length != 3) { throw new Exception("Unexpected number of command line arguments"); } fprimers = translateStringPrimers(args[0].split(","), allowAmbiguities, false); fprimerStrs = args[0].split(","); rprimers = translateStringPrimers(args[1].split(","), allowAmbiguities, true); rprimerStrs = args[1].split(","); inSeqs = new SequenceReader(new File(args[2])); } catch (Exception e) { new HelpFormatter().printHelp("SliceToPrimer [options] <f,p,r,i,m,e,r> <r,p,r,i,m,e,r> <in_seq_file>", options); System.err.println("ERROR: " + e.getMessage()); return; } Sequence seq; statsOut.println( "orig_seqid\tsliced_seqid\tfprimer\tstart\tend\tscore\trprimer\tstart\tend\tscore\tlength"); ScoringMatrix sccoringMatrix = ScoringMatrix.getDefaultNuclMatrix(); DPMAligner[] faligners = new DPMAligner[fprimers.length]; for (int index = 0; index < faligners.length; index++) { faligners[index] = new DPMAligner(fprimerStrs[index], Integer.MAX_VALUE); } try { while ((seq = inSeqs.readNextSequence()) != null) { Set<PrimerMatch> fprimerMatches = new HashSet(); Set<PrimerMatch> rprimerMatches = new HashSet(); for (int index = 0; index < fprimers.length; index++) { PatternBitMask64 primer = fprimers[index]; for (BitVector64Match r : BitVector64.process(seq.getSeqString().toCharArray(), primer, fEdit) .getResults()) { PrimerMatch match = new PrimerMatch(); match.start = r.getPosition() - (primer.getPatternLength() + r.getScore()); match.end = r.getPosition(); match.score = r.getScore(); match.primerIndex = index; fprimerMatches.add(match); } } for (int index = 0; index < rprimers.length; index++) { PatternBitMask64 primer = rprimers[index]; for (BitVector64Match r : BitVector64.process(seq.getSeqString().toCharArray(), primer, rEdit) .getResults()) { PrimerMatch match = new PrimerMatch(); match.start = r.getPosition() - (primer.getPatternLength() + r.getScore()); match.end = r.getPosition(); match.score = r.getScore(); match.primerIndex = index; rprimerMatches.add(match); } } if (fprimerMatches.isEmpty() || rprimerMatches.isEmpty()) { statsOut.println(seq.getSeqName() + "\tEither/or no forward/reverse primer hits"); continue; } for (PrimerMatch fmatch : fprimerMatches) { PrimerMatch bestReverse = null; int bestScore = Integer.MAX_VALUE; for (PrimerMatch rmatch : rprimerMatches) { if (rmatch.start > fmatch.end && rmatch.start - fmatch.end < bestScore) { bestReverse = rmatch; bestScore = rmatch.start - fmatch.end; } } if (bestReverse == null) { statsOut.println(seq.getSeqName() + "\tNo reverse primer before " + fmatch.end); continue; } String slicedSeq = null; if (keepPrimers) { slicedSeq = seq.getSeqString().substring(fmatch.start, bestReverse.end); } else { slicedSeq = seq.getSeqString().substring(fmatch.end, bestReverse.start); } String seqid = seq.getSeqName() + "_" + fmatch.primerIndex + "_" + fmatch.start; if (slicedSeq.length() > minLength && slicedSeq.length() < maxLength) { seqOut.writeSeq(seqid, "", slicedSeq); } DPMAlignment seqs = faligners[fmatch.primerIndex] .align(seq.getSeqString().substring(fmatch.start, fmatch.end)); System.err.println(">" + seqid); System.err.println(fprimerStrs[fmatch.primerIndex]); System.err.println(seq.getSeqString().substring(fmatch.start, fmatch.end)); System.err.println(); System.err.println(seqs.getAlignedMatchFragment()); System.err.println(seqs.getAlignedProbe()); System.err.println(); statsOut.println(seq.getSeqName() + "\t" + seqid + "\t" + fmatch.primerIndex + "\t" + fmatch.start + "\t" + fmatch.end + "\t" + fmatch.score + "\t" + bestReverse.primerIndex + "\t" + bestReverse.start + "\t" + bestReverse.end + "\t" + bestReverse.score + "\t" + slicedSeq.length()); } } } catch (Exception e) { e.printStackTrace(); } finally { statsOut.close(); seqOut.close(); } }
From source file:edu.msu.cme.rdp.readseq.writers.StkWriter.java
public static void main(String[] args) throws Exception { Options options = new Options(); options.addOption("r", "removeref", false, "is set, do not write the GC reference sequences to output"); options.addOption("h", "header", true, "the header of the output file in case a differenet stk version, default is " + STK_HEADER); String header = STK_HEADER;// w w w.jav a2 s .c om boolean removeRef = false; try { CommandLine line = new PosixParser().parse(options, args); if (line.hasOption("removeref")) { removeRef = true; } if (line.hasOption("header")) { header = line.getOptionValue("header"); } args = line.getArgs(); if (args.length < 2) { throw new Exception("Need input and output files"); } } catch (Exception e) { new HelpFormatter().printHelp("USAGE: to-stk <input-file> <out-file>", options); System.err.println("ERROR: " + e.getMessage()); System.exit(1); return; } SequenceReader reader = new SequenceReader(new File(args[0])); PrintStream out = new PrintStream(new File(args[1])); StkWriter writer = new StkWriter(reader, out, header); reader = new SequenceReader(new File(args[0])); Sequence seq; while ((seq = reader.readNextSequence()) != null) { if (seq.getSeqName().startsWith("#") && removeRef) { continue; } writer.writeSeq(seq); } writer.writeEndOfBlock(); writer.close(); reader.close(); }
From source file:edu.nyu.vida.data_polygamy.feature_identification.IndexCreation.java
/** * @param args/*from w w w . jav a 2 s.com*/ */ @SuppressWarnings({ "deprecation" }) public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException { Options options = new Options(); Option forceOption = new Option("f", "force", false, "force the computation of the index and events " + "even if files already exist"); forceOption.setRequired(false); options.addOption(forceOption); Option thresholdOption = new Option("t", "use-custom-thresholds", false, "use custom thresholds for regular and rare events, defined in HDFS_HOME/" + FrameworkUtils.thresholdDir + " file"); thresholdOption.setRequired(false); options.addOption(thresholdOption); Option gOption = new Option("g", "group", true, "set group of datasets for which the indices and events" + " will be computed"); gOption.setRequired(true); gOption.setArgName("GROUP"); gOption.setArgs(Option.UNLIMITED_VALUES); options.addOption(gOption); Option machineOption = new Option("m", "machine", true, "machine identifier"); machineOption.setRequired(true); machineOption.setArgName("MACHINE"); machineOption.setArgs(1); options.addOption(machineOption); Option nodesOption = new Option("n", "nodes", true, "number of nodes"); nodesOption.setRequired(true); nodesOption.setArgName("NODES"); nodesOption.setArgs(1); options.addOption(nodesOption); Option s3Option = new Option("s3", "s3", false, "data on Amazon S3"); s3Option.setRequired(false); options.addOption(s3Option); Option awsAccessKeyIdOption = new Option("aws_id", "aws-id", true, "aws access key id; " + "this is required if the execution is on aws"); awsAccessKeyIdOption.setRequired(false); awsAccessKeyIdOption.setArgName("AWS-ACCESS-KEY-ID"); awsAccessKeyIdOption.setArgs(1); options.addOption(awsAccessKeyIdOption); Option awsSecretAccessKeyOption = new Option("aws_key", "aws-id", true, "aws secrect access key; " + "this is required if the execution is on aws"); awsSecretAccessKeyOption.setRequired(false); awsSecretAccessKeyOption.setArgName("AWS-SECRET-ACCESS-KEY"); awsSecretAccessKeyOption.setArgs(1); options.addOption(awsSecretAccessKeyOption); Option bucketOption = new Option("b", "s3-bucket", true, "bucket on s3; " + "this is required if the execution is on aws"); bucketOption.setRequired(false); bucketOption.setArgName("S3-BUCKET"); bucketOption.setArgs(1); options.addOption(bucketOption); Option helpOption = new Option("h", "help", false, "display this message"); helpOption.setRequired(false); options.addOption(helpOption); HelpFormatter formatter = new HelpFormatter(); CommandLineParser parser = new PosixParser(); CommandLine cmd = null; try { cmd = parser.parse(options, args); } catch (ParseException e) { formatter.printHelp("hadoop jar data-polygamy.jar " + "edu.nyu.vida.data_polygamy.feature_identification.IndexCreation", options, true); System.exit(0); } if (cmd.hasOption("h")) { formatter.printHelp("hadoop jar data-polygamy.jar " + "edu.nyu.vida.data_polygamy.feature_identification.IndexCreation", options, true); System.exit(0); } boolean s3 = cmd.hasOption("s3"); String s3bucket = ""; String awsAccessKeyId = ""; String awsSecretAccessKey = ""; if (s3) { if ((!cmd.hasOption("aws_id")) || (!cmd.hasOption("aws_key")) || (!cmd.hasOption("b"))) { System.out.println( "Arguments 'aws_id', 'aws_key', and 'b'" + " are mandatory if execution is on AWS."); formatter.printHelp("hadoop jar data-polygamy.jar " + "edu.nyu.vida.data_polygamy.feature_identification.IndexCreation", options, true); System.exit(0); } s3bucket = cmd.getOptionValue("b"); awsAccessKeyId = cmd.getOptionValue("aws_id"); awsSecretAccessKey = cmd.getOptionValue("aws_key"); } boolean snappyCompression = false; boolean bzip2Compression = false; String machine = cmd.getOptionValue("m"); int nbNodes = Integer.parseInt(cmd.getOptionValue("n")); Configuration s3conf = new Configuration(); if (s3) { s3conf.set("fs.s3.awsAccessKeyId", awsAccessKeyId); s3conf.set("fs.s3.awsSecretAccessKey", awsSecretAccessKey); s3conf.set("bucket", s3bucket); } String datasetNames = ""; String datasetIds = ""; ArrayList<String> shortDataset = new ArrayList<String>(); ArrayList<String> shortDatasetIndex = new ArrayList<String>(); HashMap<String, String> datasetAgg = new HashMap<String, String>(); HashMap<String, String> datasetId = new HashMap<String, String>(); HashMap<String, HashMap<Integer, Double>> datasetRegThreshold = new HashMap<String, HashMap<Integer, Double>>(); HashMap<String, HashMap<Integer, Double>> datasetRareThreshold = new HashMap<String, HashMap<Integer, Double>>(); Path path = null; FileSystem fs = FileSystem.get(new Configuration()); BufferedReader br; boolean removeExistingFiles = cmd.hasOption("f"); boolean isThresholdUserDefined = cmd.hasOption("t"); for (String dataset : cmd.getOptionValues("g")) { // getting aggregates String[] aggregate = FrameworkUtils.searchAggregates(dataset, s3conf, s3); if (aggregate.length == 0) { System.out.println("No aggregates found for " + dataset + "."); continue; } // getting aggregates header String aggregatesHeaderFileName = FrameworkUtils.searchAggregatesHeader(dataset, s3conf, s3); if (aggregatesHeaderFileName == null) { System.out.println("No aggregate header for " + dataset); continue; } String aggregatesHeader = s3bucket + FrameworkUtils.preProcessingDir + "/" + aggregatesHeaderFileName; shortDataset.add(dataset); datasetId.put(dataset, null); if (s3) { path = new Path(aggregatesHeader); fs = FileSystem.get(path.toUri(), s3conf); } else { path = new Path(fs.getHomeDirectory() + "/" + aggregatesHeader); } br = new BufferedReader(new InputStreamReader(fs.open(path))); datasetAgg.put(dataset, br.readLine().split("\t")[1]); br.close(); if (s3) fs.close(); } if (shortDataset.size() == 0) { System.out.println("No datasets to process."); System.exit(0); } // getting dataset id if (s3) { path = new Path(s3bucket + FrameworkUtils.datasetsIndexDir); fs = FileSystem.get(path.toUri(), s3conf); } else { path = new Path(fs.getHomeDirectory() + "/" + FrameworkUtils.datasetsIndexDir); } br = new BufferedReader(new InputStreamReader(fs.open(path))); String line = br.readLine(); while (line != null) { String[] dt = line.split("\t"); if (datasetId.containsKey(dt[0])) { datasetId.put(dt[0], dt[1]); datasetNames += dt[0] + ","; datasetIds += dt[1] + ","; } line = br.readLine(); } br.close(); datasetNames = datasetNames.substring(0, datasetNames.length() - 1); datasetIds = datasetIds.substring(0, datasetIds.length() - 1); Iterator<String> it = shortDataset.iterator(); while (it.hasNext()) { String dataset = it.next(); if (datasetId.get(dataset) == null) { System.out.println("No dataset id for " + dataset); System.exit(0); } } // getting user defined thresholds if (isThresholdUserDefined) { if (s3) { path = new Path(s3bucket + FrameworkUtils.thresholdDir); fs = FileSystem.get(path.toUri(), s3conf); } else { path = new Path(fs.getHomeDirectory() + "/" + FrameworkUtils.thresholdDir); } br = new BufferedReader(new InputStreamReader(fs.open(path))); line = br.readLine(); while (line != null) { // getting dataset name String dataset = line.trim(); HashMap<Integer, Double> regThresholds = new HashMap<Integer, Double>(); HashMap<Integer, Double> rareThresholds = new HashMap<Integer, Double>(); line = br.readLine(); while ((line != null) && (line.split("\t").length > 1)) { // getting attribute ids and thresholds String[] keyVals = line.trim().split("\t"); int att = Integer.parseInt(keyVals[0].trim()); regThresholds.put(att, Double.parseDouble(keyVals[1].trim())); rareThresholds.put(att, Double.parseDouble(keyVals[2].trim())); line = br.readLine(); } datasetRegThreshold.put(dataset, regThresholds); datasetRareThreshold.put(dataset, rareThresholds); } br.close(); } if (s3) fs.close(); // datasets that will use existing merge tree ArrayList<String> useMergeTree = new ArrayList<String>(); // creating index for each spatio-temporal resolution FrameworkUtils.createDir(s3bucket + FrameworkUtils.indexDir, s3conf, s3); HashSet<String> input = new HashSet<String>(); for (String dataset : shortDataset) { String indexCreationOutputFileName = s3bucket + FrameworkUtils.indexDir + "/" + dataset + "/"; String mergeTreeFileName = s3bucket + FrameworkUtils.mergeTreeDir + "/" + dataset + "/"; if (removeExistingFiles) { FrameworkUtils.removeFile(indexCreationOutputFileName, s3conf, s3); FrameworkUtils.removeFile(mergeTreeFileName, s3conf, s3); FrameworkUtils.createDir(mergeTreeFileName, s3conf, s3); } else if (datasetRegThreshold.containsKey(dataset)) { FrameworkUtils.removeFile(indexCreationOutputFileName, s3conf, s3); if (FrameworkUtils.fileExists(mergeTreeFileName, s3conf, s3)) { useMergeTree.add(dataset); } } if (!FrameworkUtils.fileExists(indexCreationOutputFileName, s3conf, s3)) { input.add(s3bucket + FrameworkUtils.aggregatesDir + "/" + dataset); shortDatasetIndex.add(dataset); } } if (input.isEmpty()) { System.out.println("All the input datasets have indices."); System.out.println("Use -f in the beginning of the command line to force the computation."); System.exit(0); } String aggregateDatasets = ""; it = input.iterator(); while (it.hasNext()) { aggregateDatasets += it.next() + ","; } Job icJob = null; Configuration icConf = new Configuration(); Machine machineConf = new Machine(machine, nbNodes); String jobName = "index"; String indexOutputDir = s3bucket + FrameworkUtils.indexDir + "/tmp/"; FrameworkUtils.removeFile(indexOutputDir, s3conf, s3); icConf.set("dataset-name", datasetNames); icConf.set("dataset-id", datasetIds); if (!useMergeTree.isEmpty()) { String useMergeTreeStr = ""; for (String dt : useMergeTree) { useMergeTreeStr += dt + ","; } icConf.set("use-merge-tree", useMergeTreeStr.substring(0, useMergeTreeStr.length() - 1)); } for (int i = 0; i < shortDataset.size(); i++) { String dataset = shortDataset.get(i); String id = datasetId.get(dataset); icConf.set("dataset-" + id + "-aggregates", datasetAgg.get(dataset)); if (datasetRegThreshold.containsKey(dataset)) { HashMap<Integer, Double> regThresholds = datasetRegThreshold.get(dataset); String thresholds = ""; for (int att : regThresholds.keySet()) { thresholds += String.valueOf(att) + "-" + String.valueOf(regThresholds.get(att)) + ","; } icConf.set("regular-" + id, thresholds.substring(0, thresholds.length() - 1)); } if (datasetRareThreshold.containsKey(dataset)) { HashMap<Integer, Double> rareThresholds = datasetRareThreshold.get(dataset); String thresholds = ""; for (int att : rareThresholds.keySet()) { thresholds += String.valueOf(att) + "-" + String.valueOf(rareThresholds.get(att)) + ","; } icConf.set("rare-" + id, thresholds.substring(0, thresholds.length() - 1)); } } icConf.set("mapreduce.tasktracker.map.tasks.maximum", String.valueOf(machineConf.getMaximumTasks())); icConf.set("mapreduce.tasktracker.reduce.tasks.maximum", String.valueOf(machineConf.getMaximumTasks())); icConf.set("mapreduce.jobtracker.maxtasks.perjob", "-1"); icConf.set("mapreduce.reduce.shuffle.parallelcopies", "20"); icConf.set("mapreduce.input.fileinputformat.split.minsize", "0"); icConf.set("mapreduce.task.io.sort.mb", "200"); icConf.set("mapreduce.task.io.sort.factor", "100"); //icConf.set("mapreduce.task.timeout", "1800000"); machineConf.setMachineConfiguration(icConf); if (s3) { machineConf.setMachineConfiguration(icConf); icConf.set("fs.s3.awsAccessKeyId", awsAccessKeyId); icConf.set("fs.s3.awsSecretAccessKey", awsSecretAccessKey); icConf.set("bucket", s3bucket); } if (snappyCompression) { icConf.set("mapreduce.map.output.compress", "true"); icConf.set("mapreduce.map.output.compress.codec", "org.apache.hadoop.io.compress.SnappyCodec"); //icConf.set("mapreduce.output.fileoutputformat.compress.codec", "org.apache.hadoop.io.compress.SnappyCodec"); } if (bzip2Compression) { icConf.set("mapreduce.map.output.compress", "true"); icConf.set("mapreduce.map.output.compress.codec", "org.apache.hadoop.io.compress.BZip2Codec"); //icConf.set("mapreduce.output.fileoutputformat.compress.codec", "org.apache.hadoop.io.compress.BZip2Codec"); } icJob = new Job(icConf); icJob.setJobName(jobName); icJob.setMapOutputKeyClass(AttributeResolutionWritable.class); icJob.setMapOutputValueClass(SpatioTemporalFloatWritable.class); icJob.setOutputKeyClass(AttributeResolutionWritable.class); icJob.setOutputValueClass(TopologyTimeSeriesWritable.class); //icJob.setOutputKeyClass(Text.class); //icJob.setOutputValueClass(Text.class); icJob.setMapperClass(IndexCreationMapper.class); icJob.setReducerClass(IndexCreationReducer.class); icJob.setNumReduceTasks(machineConf.getNumberReduces()); icJob.setInputFormatClass(SequenceFileInputFormat.class); //icJob.setOutputFormatClass(SequenceFileOutputFormat.class); LazyOutputFormat.setOutputFormatClass(icJob, SequenceFileOutputFormat.class); //LazyOutputFormat.setOutputFormatClass(icJob, TextOutputFormat.class); SequenceFileOutputFormat.setCompressOutput(icJob, true); SequenceFileOutputFormat.setOutputCompressionType(icJob, CompressionType.BLOCK); FileInputFormat.setInputDirRecursive(icJob, true); FileInputFormat.setInputPaths(icJob, aggregateDatasets.substring(0, aggregateDatasets.length() - 1)); FileOutputFormat.setOutputPath(icJob, new Path(indexOutputDir)); icJob.setJarByClass(IndexCreation.class); long start = System.currentTimeMillis(); icJob.submit(); icJob.waitForCompletion(true); System.out.println(jobName + "\t" + (System.currentTimeMillis() - start)); // moving files to right place for (String dataset : shortDatasetIndex) { String from = s3bucket + FrameworkUtils.indexDir + "/tmp/" + dataset + "/"; String to = s3bucket + FrameworkUtils.indexDir + "/" + dataset + "/"; FrameworkUtils.renameFile(from, to, s3conf, s3); } }
From source file:es.eucm.ead.exporter.ExporterMain.java
@SuppressWarnings("all") public static void main(String args[]) { Options options = new Options(); Option help = new Option("h", "help", false, "print this message"); Option quiet = new Option("q", "quiet", false, "be extra quiet"); Option verbose = new Option("v", "verbose", false, "be extra verbose"); Option legacy = OptionBuilder.withArgName("s> <t").hasArgs(3) .withDescription(//from www . j a va 2s . c o m "source is a version 1.x game; must specify\n" + "<simplify> if 'true', simplifies result\n" + "<translate> if 'true', enables translation") .withLongOpt("import").create("i"); Option war = OptionBuilder.withArgName("web-base").hasArg() .withDescription("WAR packaging (web app); " + "must specify\n<web-base> the base WAR directory") .withLongOpt("war").create("w"); Option jar = OptionBuilder.withDescription("JAR packaging (desktop)").withLongOpt("jar").create("j"); Option apk = OptionBuilder.withArgName("props> <adk> <d").hasArgs(3) .withDescription("APK packaging (android); must specify \n" + "<props> (a properties file) \n" + "<adk> (location of the ADK to use) \n" + "<deploy> ('true' to install & deploy)") .withLongOpt("apk").create("a"); // EAD option Option ead = OptionBuilder.withDescription("EAD packaging (eAdventure)").withLongOpt("ead").create("e"); options.addOption(legacy); options.addOption(help); options.addOption(quiet); options.addOption(verbose); options.addOption(jar); options.addOption(war); options.addOption(apk); options.addOption(ead); CommandLineParser parser = new PosixParser(); CommandLine cmd; try { cmd = parser.parse(options, args); } catch (ParseException pe) { System.err.println("Error parsing command-line: " + pe.getMessage()); showHelp(options); return; } // general options String[] extras = cmd.getArgs(); if (cmd.hasOption(help.getOpt()) || extras.length < 2) { showHelp(options); return; } if (cmd.hasOption(verbose.getOpt())) { verbosity = Verbose; } if (cmd.hasOption(quiet.getOpt())) { verbosity = Quiet; } // import String source = extras[0]; // optional import step if (cmd.hasOption(legacy.getOpt())) { String[] values = cmd.getOptionValues(legacy.getOpt()); AdventureConverter converter = new AdventureConverter(); if (values.length > 0 && values[0].equalsIgnoreCase("true")) { converter.setEnableSimplifications(true); } if (values.length > 1 && values[1].equalsIgnoreCase("true")) { converter.setEnableTranslations(true); } // set source for next steps to import-target source = converter.convert(source, null); } if (cmd.hasOption(jar.getOpt())) { if (checkFilesExist(cmd, options, source)) { JarExporter e = new JarExporter(); e.export(source, extras[1], verbosity.equals(Quiet) ? new QuietStream() : System.err); } } else if (cmd.hasOption(apk.getOpt())) { String[] values = cmd.getOptionValues(apk.getOpt()); if (checkFilesExist(cmd, options, values[0], values[1], source)) { AndroidExporter e = new AndroidExporter(); Properties props = new Properties(); File propsFile = new File(values[0]); try { props.load(new FileReader(propsFile)); props.setProperty(AndroidExporter.SDK_HOME, props.getProperty(AndroidExporter.SDK_HOME, values[1])); } catch (IOException ioe) { System.err.println("Could not load properties from " + propsFile.getAbsolutePath()); return; } e.export(source, extras[1], props, values.length > 2 && values[2].equalsIgnoreCase("true")); } } else if (cmd.hasOption(war.getOpt())) { if (checkFilesExist(cmd, options, extras[0])) { WarExporter e = new WarExporter(); e.setWarPath(cmd.getOptionValue(war.getOpt())); e.export(source, extras[1]); } } else if (cmd.hasOption(ead.getOpt())) { String destiny = extras[1]; if (!destiny.endsWith(".ead")) { destiny += ".ead"; } FileUtils.zip(new File(destiny), new File(source)); } else { showHelp(options); } }
From source file:eu.scape_project.arc2warc.Arc2WarcHadoopJob.java
/** * Main entry point.//from w w w . j a va2 s .c om * * @param args * @throws Exception */ public static void main(String[] args) throws Exception { Configuration conf = new Configuration(); // Command line interface config = new CliConfig(); CommandLineParser cmdParser = new PosixParser(); GenericOptionsParser gop = new GenericOptionsParser(conf, args); CommandLine cmd = cmdParser.parse(Options.OPTIONS, gop.getRemainingArgs()); if ((args.length == 0) || (cmd.hasOption(Options.HELP_OPT))) { Options.exit("Usage", 0); } else { Options.initOptions(cmd, config); } startHadoopJob(conf); }
From source file:fll.scheduler.GreedySolver.java
public static void main(final String[] args) throws InterruptedException { LogUtils.initializeLogging();/*from w ww. j av a2 s. c o m*/ final Options options = buildOptions(); // parse options boolean optimize = false; File datafile = null; try { final CommandLineParser parser = new PosixParser(); final CommandLine cmd = parser.parse(options, args); if (cmd.hasOption(OPTIMIZE_OPTION)) { optimize = true; } datafile = new File(cmd.getOptionValue(DATA_FILE_OPTION)); } catch (final org.apache.commons.cli.ParseException pe) { LOGGER.error(pe.getMessage()); usage(options); System.exit(1); } try { if (!datafile.canRead()) { LOGGER.fatal(datafile.getAbsolutePath() + " is not readable"); System.exit(4); } final GreedySolver solver = new GreedySolver(datafile, optimize); final long start = System.currentTimeMillis(); solver.solve(null); final long stop = System.currentTimeMillis(); LOGGER.info("Solve took: " + (stop - start) / 1000.0 + " seconds"); } catch (final ParseException e) { LOGGER.fatal(e, e); System.exit(5); } catch (final IOException e) { LOGGER.fatal("Error reading file", e); System.exit(4); } catch (final RuntimeException e) { } catch (final InvalidParametersException e) { LOGGER.fatal(e, e); System.exit(6); } }
From source file:com.netflix.aegisthus.tools.SSTableExport.java
@SuppressWarnings("rawtypes") public static void main(String[] args) throws IOException { String usage = String.format("Usage: %s <sstable>", SSTableExport.class.getName()); CommandLineParser parser = new PosixParser(); try {/*w w w . j a va2 s.c o m*/ cmd = parser.parse(options, args); } catch (ParseException e1) { System.err.println(e1.getMessage()); HelpFormatter formatter = new HelpFormatter(); formatter.printHelp(usage, options); System.exit(1); } if (cmd.getArgs().length != 1) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp(usage, options); System.exit(1); } Map<String, AbstractType> convertors = null; if (cmd.hasOption(COLUMN_NAME_TYPE)) { try { convertors = new HashMap<String, AbstractType>(); convertors.put(SSTableScanner.COLUMN_NAME_KEY, TypeParser.parse(cmd.getOptionValue(COLUMN_NAME_TYPE))); } catch (ConfigurationException e) { System.err.println(e.getMessage()); HelpFormatter formatter = new HelpFormatter(); formatter.printHelp(usage, options); System.exit(1); } catch (SyntaxException e) { System.err.println(e.getMessage()); HelpFormatter formatter = new HelpFormatter(); formatter.printHelp(usage, options); System.exit(1); } } Descriptor.Version version = null; if (cmd.hasOption(OPT_VERSION)) { version = new Descriptor.Version(cmd.getOptionValue(OPT_VERSION)); } if (cmd.hasOption(INDEX_SPLIT)) { String ssTableFileName; DataInput input = null; if ("-".equals(cmd.getArgs()[0])) { ssTableFileName = System.getProperty("aegisthus.file.name"); input = new DataInputStream(new BufferedInputStream(System.in, 65536 * 10)); } else { ssTableFileName = new File(cmd.getArgs()[0]).getAbsolutePath(); input = new DataInputStream( new BufferedInputStream(new FileInputStream(ssTableFileName), 65536 * 10)); } exportIndexSplit(ssTableFileName, input); } else if (cmd.hasOption(INDEX)) { String ssTableFileName = new File(cmd.getArgs()[0]).getAbsolutePath(); exportIndex(ssTableFileName); } else if (cmd.hasOption(ROWSIZE)) { String ssTableFileName = new File(cmd.getArgs()[0]).getAbsolutePath(); exportRowSize(ssTableFileName); } else if ("-".equals(cmd.getArgs()[0])) { if (version == null) { System.err.println("when streaming must supply file version"); HelpFormatter formatter = new HelpFormatter(); formatter.printHelp(usage, options); System.exit(1); } exportStream(version); } else { String ssTableFileName = new File(cmd.getArgs()[0]).getAbsolutePath(); FileInputStream fis = new FileInputStream(ssTableFileName); InputStream inputStream = new DataInputStream(new BufferedInputStream(fis, 65536 * 10)); long end = -1; if (cmd.hasOption(END)) { end = Long.valueOf(cmd.getOptionValue(END)); } if (cmd.hasOption(OPT_COMP)) { CompressionMetadata cm = new CompressionMetadata( new BufferedInputStream(new FileInputStream(cmd.getOptionValue(OPT_COMP)), 65536), fis.getChannel().size()); inputStream = new CompressionInputStream(inputStream, cm); end = cm.getDataLength(); } DataInputStream input = new DataInputStream(inputStream); if (version == null) { version = Descriptor.fromFilename(ssTableFileName).version; } SSTableScanner scanner = new SSTableScanner(input, convertors, end, version); if (cmd.hasOption(OPT_MAX_COLUMN_SIZE)) { scanner.setMaxColSize(Long.parseLong(cmd.getOptionValue(OPT_MAX_COLUMN_SIZE))); } export(scanner); if (cmd.hasOption(OPT_MAX_COLUMN_SIZE)) { if (scanner.getErrorRowCount() > 0) { System.err.println(String.format("%d rows were too large", scanner.getErrorRowCount())); } } } }