Example usage for java.lang String trim

List of usage examples for java.lang String trim

Introduction

In this page you can find the example usage for java.lang String trim.

Prototype

public String trim() 

Source Link

Document

Returns a string whose value is this string, with all leading and trailing space removed, where space is defined as any character whose codepoint is less than or equal to 'U+0020' (the space character).

Usage

From source file:CommandLineInterpreter.java

/**
 * Main method, command line input will get parsed here.
 *
 * @param args/*from   w ww . j  a v  a 2 s  . c om*/
 */
public static void main(String[] args) {
    // // test-arguments:
    // args = new String[] { "1.9", "-gui" };

    boolean success = false;
    if (args.length == 1) {
        String arg = args[0].trim().replaceAll("[-]+", "");
        if (arg.equals("help") || arg.equals("h"))
            printHelp(null);
    }

    if (args.length == 0) {
        printHelp("ONE ARGUMENT NEEDED");
    } else {
        try {
            boolean guiAlert = false;
            Float minVersion = null;
            File resourcesFile = null;

            // ------------------------------------------------ //
            // --
            // ------------------------------------------------ //
            final String minJavaVersionArgument = args[0];
            if (!minJavaVersionArgument.trim().isEmpty()) {
                try {
                    minVersion = Float.parseFloat(minJavaVersionArgument);
                } catch (Exception e) {
                    // do nothing
                }
            }
            if (minVersion == null || minVersion > 2 || minVersion < 1.6) {
                printHelp("VERSION STRING IS NOT VALID");
            }

            // ------------------------------------------------ //
            // --
            // ------------------------------------------------ //
            for (int i = 1; i < (args.length <= 3 ? args.length : 3); i++) {
                final String argument = args[i].trim();
                if (argument.equals("-gui")) {
                    guiAlert = true;
                } else {
                    String resourcesFilePath = argument;
                    if (!resourcesFilePath.isEmpty()) {
                        resourcesFile = new File(resourcesFilePath);
                        if (!resourcesFile.exists() || !resourcesFile.isFile() || !resourcesFile.canRead()) {
                            printHelp("RESOURCES FILE IS NOT VALID\n[" + resourcesFile.getAbsolutePath() + "]");
                        }
                    }
                }
            }
            // ------------------------------------------------ //
            // --
            // ------------------------------------------------ //

            success = checkJREVersion(minVersion, guiAlert);

            if (success && resourcesFile != null) {
                success = checkResources(resourcesFile, guiAlert);
            }

        } catch (Exception e) {
            success = false;
            e.printStackTrace();
        }
    }

    if (!success) {
        // set error exit code
        System.exit(1);
    }
}

From source file:edu.cmu.lti.oaqa.annographix.apps.SolrQueryApp.java

public static void main(String[] args) {
    Options options = new Options();

    options.addOption("u", null, true, "Solr URI");
    options.addOption("q", null, true, "Query");
    options.addOption("n", null, true, "Max # of results");
    options.addOption("o", null, true, "An optional TREC-style output file");
    options.addOption("w", null, false, "Do a warm-up query call, before each query");

    CommandLineParser parser = new org.apache.commons.cli.GnuParser();

    BufferedWriter trecOutFile = null;

    try {//from w  w  w.j  av  a 2  s  .co m
        CommandLine cmd = parser.parse(options, args);
        String queryFile = null, solrURI = null;

        if (cmd.hasOption("u")) {
            solrURI = cmd.getOptionValue("u");
        } else {
            Usage("Specify Solr URI");
        }

        SolrServerWrapper solr = new SolrServerWrapper(solrURI);

        if (cmd.hasOption("q")) {
            queryFile = cmd.getOptionValue("q");
        } else {
            Usage("Specify Query file");
        }

        int numRet = 100;

        if (cmd.hasOption("n")) {
            numRet = Integer.parseInt(cmd.getOptionValue("n"));
        }

        if (cmd.hasOption("o")) {
            trecOutFile = new BufferedWriter(new FileWriter(new File(cmd.getOptionValue("o"))));
        }

        List<String> fieldList = new ArrayList<String>();
        fieldList.add(UtilConst.ID_FIELD);
        fieldList.add(UtilConst.SCORE_FIELD);

        double totalTime = 0;
        double retQty = 0;

        ArrayList<Double> queryTimes = new ArrayList<Double>();

        boolean bDoWarmUp = cmd.hasOption("w");

        if (bDoWarmUp) {
            System.out.println("Using a warmup step!");
        }

        int queryQty = 0;
        for (String t : FileUtils.readLines(new File(queryFile))) {
            t = t.trim();
            if (t.isEmpty())
                continue;
            int ind = t.indexOf('|');
            if (ind < 0)
                throw new Exception("Wrong format, line: '" + t + "'");
            String qID = t.substring(0, ind);
            String q = t.substring(ind + 1);

            SolrDocumentList res = null;

            if (bDoWarmUp) {
                res = solr.runQuery(q, fieldList, numRet);
            }

            Long tm1 = System.currentTimeMillis();
            res = solr.runQuery(q, fieldList, numRet);
            Long tm2 = System.currentTimeMillis();
            retQty += res.getNumFound();
            System.out.println(qID + " Obtained: " + res.getNumFound() + " entries in " + (tm2 - tm1) + " ms");
            double delta = (tm2 - tm1);
            totalTime += delta;
            queryTimes.add(delta);
            ++queryQty;

            if (trecOutFile != null) {

                ArrayList<SolrRes> resArr = new ArrayList<SolrRes>();
                for (SolrDocument doc : res) {
                    String id = (String) doc.getFieldValue(UtilConst.ID_FIELD);
                    float score = (Float) doc.getFieldValue(UtilConst.SCORE_FIELD);
                    resArr.add(new SolrRes(id, "", score));
                }
                SolrRes[] results = resArr.toArray(new SolrRes[resArr.size()]);
                Arrays.sort(results);

                SolrEvalUtils.saveTrecResults(qID, results, trecOutFile, TREC_RUN, results.length);
            }
        }
        double devTime = 0, meanTime = totalTime / queryQty;
        for (int i = 0; i < queryQty; ++i) {
            double d = queryTimes.get(i) - meanTime;
            devTime += d * d;
        }
        devTime = Math.sqrt(devTime / (queryQty - 1));
        System.out.println(String.format("Query time, mean/standard dev: %.2f/%.2f (ms)", meanTime, devTime));
        System.out.println(String.format("Avg # of docs returned: %.2f", retQty / queryQty));

        solr.close();
        trecOutFile.close();
    } catch (ParseException e) {
        Usage("Cannot parse arguments");
    } catch (Exception e) {
        System.err.println("Terminating due to an exception: " + e);
        System.exit(1);
    }

}

From source file:org.hcmut.emr.SessionBuilder.java

public static void main(String[] args) throws FileNotFoundException, IOException {
    try (BufferedReader br = new BufferedReader(
            new FileReader("/home/sinhlk/myspace/emr/src/main/resources/patern"))) {
        ObjectMapper jsonMapper = new ObjectMapper();
        String line = br.readLine();
        Map<String, String> result = new HashMap<String, String>();
        List<NameValuePair> list = new ArrayList<>();

        while (line != null) {
            if (line != null && line != "") {
                list.add(new BasicNameValuePair(line.trim().toLowerCase(), SessionBuilder.buildValue(line)));
                result.put(line.trim().toLowerCase(), SessionBuilder.buildValue(line));
                line = br.readLine();//from w  ww  .  j a va 2  s. com
            }
        }
        System.out.println(jsonMapper.writeValueAsString(list));
        File file = new File("/home/sinhlk/myspace/emr/src/main/resources/session.js");
        jsonMapper.writeValue(file, list);
    }

}

From source file:org.test.LookupSVNUsers.java

/**
 * @param args//from  ww  w .  java2  s .  com
 * @throws IOException 
 */
public static void main(String[] args) throws IOException {

    if (args.length != 2) {
        log.error("USAGE: <svn users file(input)> <git authors file (output)>");

        System.exit(-1);
    }

    Set<String> unmatchedNameSet = new LinkedHashSet<String>();

    Map<String, GitUser> gitUserMap = new LinkedHashMap<String, GitUser>();

    String svnAuthorsFile = args[0];

    List<String> lines = FileUtils.readLines(new File(svnAuthorsFile));

    for (String line : lines) {

        // intentionally handle both upper and lower case varients of the same name.
        String svnUserName = line.trim();

        if (svnUserName.contains("("))
            continue; // skip over this line as we can't use it on the url

        if (gitUserMap.keySet().contains(svnUserName))
            continue; // skip this duplicate.

        log.info("starting on user = " + svnUserName);

        String gitName = extractFullName(svnUserName);

        if (gitName == null) {

            gitName = extractFullName(svnUserName.toLowerCase());
        }

        if (gitName == null) {
            unmatchedNameSet.add(svnUserName);
        } else {

            gitUserMap.put(svnUserName, new GitUser(svnUserName, gitName));
            log.info("mapped user (" + svnUserName + ") to: " + gitName);

        }

    }

    List<String> mergedList = new ArrayList<String>();

    mergedList.add("# GENERATED ");

    List<String> userNameList = new ArrayList<String>();

    userNameList.addAll(gitUserMap.keySet());

    Collections.sort(userNameList);

    for (String userName : userNameList) {

        GitUser gUser = gitUserMap.get(userName);

        mergedList.add(gUser.getSvnAuthor() + " = " + gUser.getGitUser() + " <" + gUser.getSvnAuthor()
                + "@users.sourceforge.net>");

    }

    for (String username : unmatchedNameSet) {

        log.warn("failed to match SVN User = " + username);

        // add in the unmatched entries as is.
        mergedList.add(username + " = " + username + " <" + username + "@users.sourceforge.net>");
    }

    FileUtils.writeLines(new File(args[1]), "UTF-8", mergedList);

}

From source file:com.asakusafw.generator.HadoopBulkLoaderDDLGenerator.java

/**
 * HadoopBulkLoader?????/*www  . ja  v a 2s .com*/
 *
 * @param args
 *            ??
 * @throws Exception
 *             ??????
 */
public static void main(String[] args) throws Exception {
    String outputTablesString = findVariable(ENV_BULKLOADER_TABLES, true);
    List<String> outputTableList = null;
    if (outputTablesString != null && !OUTPUT_TABLES_PROPERTY.equals(outputTablesString)) {
        String[] outputTables = outputTablesString.trim().split("\\s+");
        outputTableList = Arrays.asList(outputTables);
    }

    String ddlTemplate;
    InputStream in = HadoopBulkLoaderDDLGenerator.class.getResourceAsStream(TEMPLATE_DDL_FILENAME);
    try {
        ddlTemplate = IOUtils.toString(in);
    } finally {
        in.close();
    }
    StringBuilder sb = new StringBuilder();

    for (String tableName : args) {
        if (outputTableList == null || outputTableList.contains(tableName)) {
            String tableddl = ddlTemplate.replaceAll(TABLENAME_REPLACE_STRING, tableName);
            sb.append(tableddl);
        }
    }

    String outputFilePath = findVariable(ENV_BULKLOADER_GENDDL, true);
    if (outputFilePath == null) {
        throw new RuntimeException(
                "ASAKUSA_BULKLOADER_TABLES???????");
    }
    FileUtils.write(new File(outputFilePath), sb);
}

From source file:com.glaf.jbpm.action.MultiPooledTaskInstanceAction.java

public static void main(String[] args) throws Exception {
    String actorIdxy = "{joy,sam},{pp,qq},{kit,cora},{eyb2000,huangcw}";
    StringTokenizer st2 = new StringTokenizer(actorIdxy, ";");
    while (st2.hasMoreTokens()) {
        String elem2 = st2.nextToken();
        if (StringUtils.isNotEmpty(elem2)) {
            elem2 = elem2.trim();
            if ((elem2.length() > 0 && elem2.charAt(0) == '{') && elem2.endsWith("}")) {
                elem2 = elem2.substring(elem2.indexOf("{") + 1, elem2.indexOf("}"));
                Set<String> actorIds = new HashSet<String>();
                StringTokenizer st4 = new StringTokenizer(elem2, ",");
                while (st4.hasMoreTokens()) {
                    String elem4 = st4.nextToken();
                    elem4 = elem4.trim();
                    if (elem4.length() > 0) {
                        actorIds.add(elem4);
                    }/* w w  w  . ja va2s . c  o m*/
                }
                System.out.println(actorIds);
            }
        }
    }
}

From source file:eu.fbk.dkm.sectionextractor.WikipediaSectionTitlesExtractor.java

public static void main(String args[]) throws IOException {

    CommandLineWithLogger commandLineWithLogger = new CommandLineWithLogger();
    commandLineWithLogger.addOption(OptionBuilder.withArgName("file").hasArg()
            .withDescription("wikipedia xml dump file").isRequired().withLongOpt("wikipedia-dump").create("d"));
    commandLineWithLogger.addOption(OptionBuilder.withArgName("file").hasArg().withDescription("Filter file")
            .withLongOpt("filter").create("f"));
    commandLineWithLogger.addOption(OptionBuilder.withArgName("dir").hasArg().withDescription("output file")
            .isRequired().withLongOpt("output-file").create("o"));
    commandLineWithLogger.addOption(OptionBuilder.withArgName("int").hasArg()
            .withDescription("max depth (default " + MAX_DEPTH + ")").withLongOpt("max-depth").create("m"));
    commandLineWithLogger.addOption(OptionBuilder.withArgName("int").hasArg()
            .withDescription("max num of sections").withLongOpt("max-num").create("n"));
    commandLineWithLogger.addOption(new Option("l", "print titles"));

    commandLineWithLogger.addOption(OptionBuilder.withArgName("int").hasArg()
            .withDescription(/*from  ww w .j a  v a2  s. c  o m*/
                    "number of threads (default " + AbstractWikipediaXmlDumpParser.DEFAULT_THREADS_NUMBER + ")")
            .withLongOpt("num-threads").create("t"));
    commandLineWithLogger.addOption(OptionBuilder.withArgName("int").hasArg()
            .withDescription("number of pages to process (default all)").withLongOpt("num-pages").create("p"));
    commandLineWithLogger.addOption(OptionBuilder.withArgName("int").hasArg()
            .withDescription("receive notification every n pages (default "
                    + AbstractWikipediaExtractor.DEFAULT_NOTIFICATION_POINT + ")")
            .withLongOpt("notification-point").create("b"));

    CommandLine commandLine = null;
    try {
        commandLine = commandLineWithLogger.getCommandLine(args);
        PropertyConfigurator.configure(commandLineWithLogger.getLoggerProps());
    } catch (Exception e) {
        System.exit(1);
    }

    int numThreads = Integer.parseInt(commandLine.getOptionValue("num-threads",
            Integer.toString(AbstractWikipediaXmlDumpParser.DEFAULT_THREADS_NUMBER)));
    int numPages = Integer.parseInt(commandLine.getOptionValue("num-pages",
            Integer.toString(AbstractWikipediaExtractor.DEFAULT_NUM_PAGES)));
    int notificationPoint = Integer.parseInt(commandLine.getOptionValue("notification-point",
            Integer.toString(AbstractWikipediaExtractor.DEFAULT_NOTIFICATION_POINT)));

    int configuredDepth = Integer
            .parseInt(commandLine.getOptionValue("max-depth", Integer.toString(MAX_DEPTH)));
    int maxNum = Integer.parseInt(commandLine.getOptionValue("max-num", "0"));
    boolean printTitles = commandLine.hasOption("l");

    HashSet<String> pagesToConsider = null;
    String filterFileName = commandLine.getOptionValue("filter");
    if (filterFileName != null) {
        File filterFile = new File(filterFileName);
        if (filterFile.exists()) {
            pagesToConsider = new HashSet<>();
            List<String> lines = Files.readLines(filterFile, Charsets.UTF_8);
            for (String line : lines) {
                line = line.trim();
                if (line.length() == 0) {
                    continue;
                }

                line = line.replaceAll("\\s+", "_");

                pagesToConsider.add(line);
            }
        }
    }

    File outputFile = new File(commandLine.getOptionValue("output-file"));
    ExtractorParameters extractorParameters = new ExtractorParameters(
            commandLine.getOptionValue("wikipedia-dump"), outputFile.getAbsolutePath());

    WikipediaExtractor wikipediaPageParser = new WikipediaSectionTitlesExtractor(numThreads, numPages,
            extractorParameters.getLocale(), outputFile, configuredDepth, maxNum, printTitles, pagesToConsider);
    wikipediaPageParser.setNotificationPoint(notificationPoint);
    wikipediaPageParser.start(extractorParameters);

    logger.info("extraction ended " + new Date());

}

From source file:com.nextdoor.bender.S3SnsNotifier.java

public static void main(String[] args) throws ParseException, InterruptedException, IOException {
    formatter = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'").withZoneUTC();

    /*//from  ww  w  . j  av  a 2 s . com
     * Parse cli arguments
     */
    Options options = new Options();
    options.addOption(Option.builder().longOpt("bucket").hasArg().required()
            .desc("Name of S3 bucket to list s3 objects from").build());
    options.addOption(Option.builder().longOpt("key-file").hasArg().required()
            .desc("Local file of S3 keys to process").build());
    options.addOption(
            Option.builder().longOpt("sns-arn").hasArg().required().desc("SNS arn to publish to").build());
    options.addOption(Option.builder().longOpt("throttle-ms").hasArg()
            .desc("Amount of ms to wait between publishing to SNS").build());
    options.addOption(Option.builder().longOpt("processed-file").hasArg()
            .desc("Local file to use to store procssed S3 object names").build());
    options.addOption(Option.builder().longOpt("skip-processed").hasArg(false)
            .desc("Whether to skip S3 objects that have been processed").build());
    options.addOption(
            Option.builder().longOpt("dry-run").hasArg(false).desc("If set do not publish to SNS").build());

    CommandLineParser parser = new DefaultParser();
    CommandLine cmd = parser.parse(options, args);

    String bucket = cmd.getOptionValue("bucket");
    String keyFile = cmd.getOptionValue("key-file");
    String snsArn = cmd.getOptionValue("sns-arn");
    String processedFile = cmd.getOptionValue("processed-file", null);
    boolean skipProcessed = cmd.hasOption("skip-processed");
    dryRun = cmd.hasOption("dry-run");
    long throttle = Long.parseLong(cmd.getOptionValue("throttle-ms", "-1"));

    if (processedFile != null) {
        File file = new File(processedFile);

        if (!file.exists()) {
            logger.debug("creating local file to store processed s3 object names: " + processedFile);
            file.createNewFile();
        }
    }

    /*
     * Import S3 keys that have been processed
     */
    if (skipProcessed && processedFile != null) {
        try (BufferedReader br = new BufferedReader(new FileReader(processedFile))) {
            String line;
            while ((line = br.readLine()) != null) {
                alreadyPublished.add(line.trim());
            }
        }
    }

    /*
     * Setup writer for file containing processed S3 keys
     */
    FileWriter fw = null;
    BufferedWriter bw = null;
    if (processedFile != null) {
        fw = new FileWriter(processedFile, true);
        bw = new BufferedWriter(fw);
    }

    /*
     * Create clients
     */
    AmazonS3Client s3Client = new AmazonS3Client();
    AmazonSNSClient snsClient = new AmazonSNSClient();

    /*
     * Get S3 object list
     */
    try (BufferedReader br = new BufferedReader(new FileReader(keyFile))) {
        String line;
        while ((line = br.readLine()) != null) {
            String key = line.trim();

            if (alreadyPublished.contains(key)) {
                logger.info("skipping " + key);
            }

            ObjectMetadata om = s3Client.getObjectMetadata(bucket, key);

            S3EventNotification s3Notification = getS3Notification(key, bucket, om.getContentLength());

            String json = s3Notification.toJson();

            /*
             * Publish to SNS
             */
            if (publish(snsArn, json, snsClient, key) && processedFile != null) {
                bw.write(key + "\n");
                bw.flush();
            }

            if (throttle != -1) {
                Thread.sleep(throttle);
            }

        }
    }

    if (processedFile != null) {
        bw.close();
        fw.close();
    }
}

From source file:it.unipd.dei.ims.falcon.CmdLine.java

public static void main(String[] args) {

    // last argument is always index path
    Options options = new Options();
    // one of these actions has to be specified
    OptionGroup actionGroup = new OptionGroup();
    actionGroup.addOption(new Option("i", true, "perform indexing")); // if dir, all files, else only one file
    actionGroup.addOption(new Option("q", true, "perform a single query"));
    actionGroup.addOption(new Option("b", false, "perform a query batch (read from stdin)"));
    actionGroup.setRequired(true);/*from  w ww  .  j  av a 2 s  .c  o m*/
    options.addOptionGroup(actionGroup);

    // other options
    options.addOption(new Option("l", "segment-length", true, "length of a segment (# of chroma vectors)"));
    options.addOption(
            new Option("o", "segment-overlap", true, "overlap portion of a segment (# of chroma vectors)"));
    options.addOption(new Option("Q", "quantization-level", true, "quantization level for chroma vectors"));
    options.addOption(new Option("k", "min-kurtosis", true, "minimum kurtosis for indexing chroma vectors"));
    options.addOption(new Option("s", "sub-sampling", true, "sub-sampling of chroma features"));
    options.addOption(new Option("v", "verbose", false, "verbose output (including timing info)"));
    options.addOption(new Option("T", "transposition-estimator-strategy", true,
            "parametrization for the transposition estimator strategy"));
    options.addOption(new Option("t", "n-transp", true,
            "number of transposition; if not specified, no transposition is performed"));
    options.addOption(new Option("f", "force-transp", true, "force transposition by an amount of semitones"));
    options.addOption(new Option("p", "pruning", false,
            "enable query pruning; if -P is unspecified, use default strategy"));
    options.addOption(new Option("P", "pruning-custom", true, "custom query pruning strategy"));

    // parse
    HelpFormatter formatter = new HelpFormatter();
    CommandLineParser parser = new PosixParser();
    CommandLine cmd = null;
    try {
        cmd = parser.parse(options, args);
        if (cmd.getArgs().length != 1)
            throw new ParseException("no index path was specified");
    } catch (ParseException ex) {
        System.err.println("ERROR - parsing command line:");
        System.err.println(ex.getMessage());
        formatter.printHelp("falcon -{i,q,b} [options] index_path", options);
        return;
    }

    // default values
    final float[] DEFAULT_TRANSPOSITION_ESTIMATOR_STRATEGY = new float[] { 0.65192807f, 0.0f, 0.0f, 0.0f,
            0.3532628f, 0.4997167f, 0.0f, 0.41703504f, 0.0f, 0.16297342f, 0.0f, 0.0f };
    final String DEFAULT_QUERY_PRUNING_STRATEGY = "ntf:0.340765*[0.001694,0.995720];ndf:0.344143*[0.007224,0.997113];"
            + "ncf:0.338766*[0.001601,0.995038];nmf:0.331577*[0.002352,0.997884];"; // TODO not the final one

    int hashes_per_segment = Integer.parseInt(cmd.getOptionValue("l", "150"));
    int overlap_per_segment = Integer.parseInt(cmd.getOptionValue("o", "50"));
    int nranks = Integer.parseInt(cmd.getOptionValue("Q", "3"));
    int subsampling = Integer.parseInt(cmd.getOptionValue("s", "1"));
    double minkurtosis = Float.parseFloat(cmd.getOptionValue("k", "-100."));
    boolean verbose = cmd.hasOption("v");
    int ntransp = Integer.parseInt(cmd.getOptionValue("t", "1"));
    TranspositionEstimator tpe = null;
    if (cmd.hasOption("t")) {
        if (cmd.hasOption("T")) {
            // TODO this if branch is yet to test
            Pattern p = Pattern.compile("\\d\\.\\d*");
            LinkedList<Double> tokens = new LinkedList<Double>();
            Matcher m = p.matcher(cmd.getOptionValue("T"));
            while (m.find())
                tokens.addLast(new Double(cmd.getOptionValue("T").substring(m.start(), m.end())));
            float[] strategy = new float[tokens.size()];
            if (strategy.length != 12) {
                System.err.println("invalid transposition estimator strategy");
                System.exit(1);
            }
            for (int i = 0; i < strategy.length; i++)
                strategy[i] = new Float(tokens.pollFirst());
        } else {
            tpe = new TranspositionEstimator(DEFAULT_TRANSPOSITION_ESTIMATOR_STRATEGY);
        }
    } else if (cmd.hasOption("f")) {
        int[] transps = parseIntArray(cmd.getOptionValue("f"));
        tpe = new ForcedTranspositionEstimator(transps);
        ntransp = transps.length;
    }
    QueryPruningStrategy qpe = null;
    if (cmd.hasOption("p")) {
        if (cmd.hasOption("P")) {
            qpe = new StaticQueryPruningStrategy(cmd.getOptionValue("P"));
        } else {
            qpe = new StaticQueryPruningStrategy(DEFAULT_QUERY_PRUNING_STRATEGY);
        }
    }

    // action
    if (cmd.hasOption("i")) {
        try {
            Indexing.index(new File(cmd.getOptionValue("i")), new File(cmd.getArgs()[0]), hashes_per_segment,
                    overlap_per_segment, subsampling, nranks, minkurtosis, tpe, verbose);
        } catch (IndexingException ex) {
            Logger.getLogger(CmdLine.class.getName()).log(Level.SEVERE, null, ex);
        } catch (IOException ex) {
            Logger.getLogger(CmdLine.class.getName()).log(Level.SEVERE, null, ex);
        }
    }
    if (cmd.hasOption("q")) {
        String queryfilepath = cmd.getOptionValue("q");
        doQuery(cmd, queryfilepath, hashes_per_segment, overlap_per_segment, nranks, subsampling, tpe, ntransp,
                minkurtosis, qpe, verbose);
    }
    if (cmd.hasOption("b")) {
        try {
            long starttime = System.currentTimeMillis();
            BufferedReader in = new BufferedReader(new InputStreamReader(System.in));
            String line = null;
            while ((line = in.readLine()) != null && !line.trim().isEmpty())
                doQuery(cmd, line, hashes_per_segment, overlap_per_segment, nranks, subsampling, tpe, ntransp,
                        minkurtosis, qpe, verbose);
            in.close();
            long endtime = System.currentTimeMillis();
            System.out.println(String.format("total time: %ds", (endtime - starttime) / 1000));
        } catch (IOException ex) {
            Logger.getLogger(CmdLine.class.getName()).log(Level.SEVERE, null, ex);
        }
    }
}

From source file:com.lightboxtechnologies.spectrum.SequenceFileExport.java

public static void main(String[] args) throws Exception {
    final Configuration conf = new Configuration();

    final String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();

    String imageID;//from   w  w  w.  j  av  a2s  .  c  o  m
    String outpath;
    String friendlyname;
    final Set<String> exts = new HashSet<String>();

    if ("-f".equals(otherArgs[0])) {
        if (otherArgs.length != 4) {
            die();
        }

        // load extensions from file
        final Path extpath = new Path(otherArgs[1]);

        InputStream in = null;
        try {
            in = extpath.getFileSystem(conf).open(extpath);

            Reader r = null;
            try {
                r = new InputStreamReader(in);

                BufferedReader br = null;
                try {
                    br = new BufferedReader(r);

                    String line;
                    while ((line = br.readLine()) != null) {
                        exts.add(line.trim().toLowerCase());
                    }

                    br.close();
                } finally {
                    IOUtils.closeQuietly(br);
                }

                r.close();
            } finally {
                IOUtils.closeQuietly(r);
            }

            in.close();
        } finally {
            IOUtils.closeQuietly(in);
        }

        imageID = otherArgs[2];
        friendlyname = otherArgs[3];
        outpath = otherArgs[4];
    } else {
        if (otherArgs.length < 3) {
            die();
        }

        // read extensions from trailing args
        imageID = otherArgs[0];
        friendlyname = otherArgs[1];
        outpath = otherArgs[2];

        // lowercase all file extensions
        for (int i = 2; i < otherArgs.length; ++i) {
            exts.add(otherArgs[i].toLowerCase());
        }
    }

    conf.setStrings("extensions", exts.toArray(new String[exts.size()]));

    final Job job = SKJobFactory.createJobFromConf(imageID, friendlyname, "SequenceFileExport", conf);
    job.setJarByClass(SequenceFileExport.class);
    job.setMapperClass(SequenceFileExportMapper.class);
    job.setNumReduceTasks(0);

    job.setOutputKeyClass(BytesWritable.class);
    job.setOutputValueClass(MapWritable.class);

    job.setInputFormatClass(FsEntryHBaseInputFormat.class);
    FsEntryHBaseInputFormat.setupJob(job, imageID);

    job.setOutputFormatClass(SequenceFileOutputFormat.class);
    SequenceFileOutputFormat.setOutputCompressionType(job, SequenceFile.CompressionType.BLOCK);

    FileOutputFormat.setOutputPath(job, new Path(outpath));
    System.exit(job.waitForCompletion(true) ? 0 : 1);
}