Example usage for java.lang String length

List of usage examples for java.lang String length

Introduction

In this page you can find the example usage for java.lang String length.

Prototype

public int length() 

Source Link

Document

Returns the length of this string.

Usage

From source file:com.pari.nm.utils.backup.BackupRestore.java

/**
 * @param args/*from   w  ww .j a  v  a 2 s  . co m*/
 */
public static void main(String[] args) {
    com.maverick.ssh.LicenseManager.addLicense("----BEGIN 3SP LICENSE----\r\n" + "Product : J2SSH Maverick\r\n"
            + "Licensee: Pari Networks Inc.\r\n" + "Comments: Sreenivas Devalla\r\n"
            + "Type    : Foundation License\r\n" + "Created : 20-Jun-2007\r\n" + "\r\n"
            + "3787201A027FCA5BA600F3CF9CCEF4C85068187D70F94ABC\r\n"
            + "E7D7280AAFB06CE499DC968A4CB25795475D5B79FDDD6CB4\r\n"
            + "7971A60E947E84A4DADFAB2F89E2F52470182ED2EF429A2F\r\n"
            + "2EC6D8B49CAF167605A7F56C4EB736ECA7150819FCF04DC6\r\n"
            + "01B1404EA9BC83BEAA4AB2F4FC7AB344BEC08CF9DDDAAA34\r\n"
            + "EC80C1C14FA8BB1A8B47E86D393FAECD3C0E7C450E0D1FE3\r\n" + "----END 3SP LICENSE----\r\n");
    String mode = null;
    BufferedReader br = null;

    if (args.length < 9) {
        System.err.println("BackUpDatabase: Invalid Syntax.");
        System.err.println(
                "Usage - java BackUpDatabase <ftpserver> <ftpuser> <ftppassword> <ftpdir> <ftpfile> <localdir>  <backup | recovery> ");
        System.exit(-1);
    }

    try {

        mode = args[8];
        System.out.println("Request received with mode :" + mode + "\n");
        // BackupRestore tbk = BackupRestore.getInstance();
        BackupRestore tbk = new BackupRestore();
        if ((mode != null) && (mode.length() > 0) && mode.equalsIgnoreCase("recovery")) {
            File restoreDir = new File(args[7], args[6].substring(0, args[6].length() - 4));
            System.out.println("Restore Directory :" + restoreDir + "\n");
            if (!restoreDir.exists()) {
                try {
                    FTPServerType serverType = FTPServerType.valueOf(FTPServerType.class, args[0]);
                    System.out.println("Fetching the backup File :" + args[6] + "\n");
                    System.out.println("Please wait, it may take sometime....." + "\n");

                    if (tbk.fetchAndExtractBackupFile(serverType, args[1], Integer.parseInt(args[2]), args[3],
                            args[4], args[5], args[6], args[7]) == null) {
                        System.err.println("Error : Failed to fetch the backup File.\n");
                        System.exit(-1);
                    }
                    System.out.println("Successfully fetched the backup File :" + args[6] + "\n");
                } catch (Exception e) {
                    System.out.println(
                            "Error : Exception while fetching the backup file.Failed to restore the backup File.\n");
                    e.printStackTrace();
                    System.exit(-1);
                }
            }
            try {
                Thread.sleep(10000);
            } catch (Exception ee) {
                ee.printStackTrace();
            }

            System.out.println("Starting recovery ...\n");

            if (!File.separator.equals("\\")) {
                System.out.println("Stopping the Pari Server process.\n");
                Process p = Runtime.getRuntime().exec("killall -9 pari_server");
                MyReader min = new MyReader(p.getInputStream());
                MyReader merr = new MyReader(p.getErrorStream());

                try {
                    min.join(20000);
                } catch (Exception ee) {
                }

                try {
                    merr.join(20000);
                } catch (Exception ex) {
                }
            }

            if (!File.separator.equals("\\")) {
                System.out.println("Stopping the Pari Server process.\n");
                // Process p = Runtime.getRuntime().exec("killall -9 pari_server");
                Process p = Runtime.getRuntime().exec("/etc/init.d/dash stop");
                MyReader min = new MyReader(p.getInputStream());
                MyReader merr = new MyReader(p.getErrorStream());

                try {
                    min.join(20000);
                } catch (Exception ee) {
                }

                try {
                    merr.join(20000);
                } catch (Exception ex) {
                }
            }
            System.out.println("Start recovering the backup file.\n");
            if (tbk.doRecovery(args[0], args[1], args[2], args[3], args[4], args[5], args[6], args[7])) {
                System.out.println("Done recovering...\n");
                validateCSPCInstanace();
            } else {
                System.out.println("Failed to recover the backup File...\n");
            }

            try {
                Process p = null;
                String cmd = "";

                if (File.separator == "\\") {
                    cmd = "cmd /k start_server.cmd > pari.out 2>&1";
                } else {
                    cmd = "/etc/init.d/dash start";
                }

                System.err.println(cmd);
                Runtime.getRuntime().exec(cmd);
                Boolean flag = false;
                int count = 0;
                String[] nccmStatusCheckCmd = { "/bin/sh", "-c",
                        "netstat -an  | grep 42605 | grep LISTEN | wc -l" };

                do {
                    count++;
                    Thread.sleep(60000);
                    // The command output will be 1 if NCCM server started and Listening on port 42605 otherwise it
                    // will return 0
                    p = Runtime.getRuntime().exec(nccmStatusCheckCmd);
                    int ex = -1;
                    try {
                        ex = p.waitFor();
                    } catch (InterruptedException e) {
                        System.out.println("Normal execution, exception: " + e);
                    }
                    System.out.println("Normal execution, exit value: " + ex);
                    br = new BufferedReader(new InputStreamReader(p.getInputStream()));
                    String thisLine = null;
                    while ((thisLine = br.readLine()) != null) {
                        System.out.println("Command Execution Result:" + thisLine);
                        if (thisLine.equals("1")) {
                            flag = true;
                            break;
                        }
                    }
                    System.out.println("Count - " + count);
                    BufferedReader error = new BufferedReader(new InputStreamReader(p.getErrorStream()));
                    while ((thisLine = error.readLine()) != null) {
                        System.out.println(thisLine);
                    }

                } while ((!flag) && count < 30);

                if (flag) {
                    // System.out.println("NCCM Server came to listening state: after " + count + " mins");
                    // Runtime.getRuntime().exec("sh $DASH_HOME/webui/tomcat/bin/shutdown.sh");
                    Thread.sleep(60000);
                    System.out.println("NCCM Server came to listening state: after " + count + " mins");
                    // Runtime.getRuntime().exec("sh $DASH_HOME/webui/tomcat/bin/startup.sh");
                } else {
                    System.out.println("NCCM Server didn't come to listening state: last " + count + " mins");
                    System.out.println("Please verify NCCM Server and start tomcat server manually.");
                }
                System.exit(1);
            } catch (Exception ee) {
                ee.printStackTrace();
            }
        } else if ((mode != null) && (mode.length() > 0) && mode.equalsIgnoreCase("ftplist")) {
            PariFTP pftp = new PariFTP("10.100.1.20", "guest", "guest", "/");
            String[] list = pftp.getRemoteListing();

            System.out.println("List of Files\n");

            for (int i = 0; (list != null) && (i < list.length); i++) {
                System.out.println(list[i] + "\n");
            }
        } else {
            System.out.println("Mode \t" + mode + "\t not supported\n");
        }

        System.exit(-1);
    } catch (Exception e) {
        e.printStackTrace();
    } finally {
        try {
            if (br != null) {
                br.close();
            }
        } catch (Exception ex) {
            ex.printStackTrace();
        }
    }
}

From source file:examples.nntp.PostMessage.java

public static void main(String[] args) {
    String from, subject, newsgroup, filename, server, organization;
    String references;
    BufferedReader stdin;/* w ww . j  a v  a2 s .  c o m*/
    FileReader fileReader = null;
    SimpleNNTPHeader header;
    NNTPClient client;

    if (args.length < 1) {
        System.err.println("Usage: post newsserver");
        System.exit(1);
    }

    server = args[0];

    stdin = new BufferedReader(new InputStreamReader(System.in));

    try {
        System.out.print("From: ");
        System.out.flush();

        from = stdin.readLine();

        System.out.print("Subject: ");
        System.out.flush();

        subject = stdin.readLine();

        header = new SimpleNNTPHeader(from, subject);

        System.out.print("Newsgroup: ");
        System.out.flush();

        newsgroup = stdin.readLine();
        header.addNewsgroup(newsgroup);

        while (true) {
            System.out.print("Additional Newsgroup <Hit enter to end>: ");
            System.out.flush();

            // Of course you don't want to do this because readLine() may be null
            newsgroup = stdin.readLine().trim();

            if (newsgroup.length() == 0) {
                break;
            }

            header.addNewsgroup(newsgroup);
        }

        System.out.print("Organization: ");
        System.out.flush();

        organization = stdin.readLine();

        System.out.print("References: ");
        System.out.flush();

        references = stdin.readLine();

        if (organization != null && organization.length() > 0) {
            header.addHeaderField("Organization", organization);
        }

        if (references != null && references.length() > 0) {
            header.addHeaderField("References", references);
        }

        header.addHeaderField("X-Newsreader", "NetComponents");

        System.out.print("Filename: ");
        System.out.flush();

        filename = stdin.readLine();

        try {
            fileReader = new FileReader(filename);
        } catch (FileNotFoundException e) {
            System.err.println("File not found. " + e.getMessage());
            System.exit(1);
        }

        client = new NNTPClient();
        client.addProtocolCommandListener(new PrintCommandListener(new PrintWriter(System.out), true));

        client.connect(server);

        if (!NNTPReply.isPositiveCompletion(client.getReplyCode())) {
            client.disconnect();
            System.err.println("NNTP server refused connection.");
            System.exit(1);
        }

        if (client.isAllowedToPost()) {
            Writer writer = client.postArticle();

            if (writer != null) {
                writer.write(header.toString());
                Util.copyReader(fileReader, writer);
                writer.close();
                client.completePendingCommand();
            }
        }

        if (fileReader != null) {
            fileReader.close();
        }

        client.logout();

        client.disconnect();
    } catch (IOException e) {
        e.printStackTrace();
        System.exit(1);
    }
}

From source file:edu.nyu.tandon.query.Query.java

@SuppressWarnings("unchecked")
public static void main(final String[] arg) throws Exception {

    SimpleJSAP jsap = new SimpleJSAP(Query.class.getName(),
            "Loads indices relative to a collection, possibly loads the collection, and answers to queries.",
            new Parameter[] {
                    new FlaggedOption("collection", JSAP.STRING_PARSER, JSAP.NO_DEFAULT, JSAP.NOT_REQUIRED, 'c',
                            "collection", "The collection of documents indexed by the given indices."),
                    new FlaggedOption("objectCollection",
                            new ObjectParser(DocumentCollection.class, MG4JClassParser.PACKAGE),
                            JSAP.NO_DEFAULT, JSAP.NOT_REQUIRED, 'o', "object-collection",
                            "An object specification describing a document collection."),
                    new FlaggedOption("titleList", JSAP.STRING_PARSER, JSAP.NO_DEFAULT, JSAP.NOT_REQUIRED, 't',
                            "title-list",
                            "A serialized big list of titles (will override collection titles if specified)."),
                    new FlaggedOption("titleFile", JSAP.STRING_PARSER, JSAP.NO_DEFAULT, JSAP.NOT_REQUIRED, 'T',
                            "title-file",
                            "A file of newline-separated, UTF-8 titles (will override collection titles if specified)."),
                    new FlaggedOption("input", JSAP.STRING_PARSER, JSAP.NO_DEFAULT, JSAP.NOT_REQUIRED, 'I',
                            "input", "A file containing the input."),
                    new Switch("noSizes", 'n', "no-sizes",
                            "Disable loading document sizes (they are necessary for BM25 scoring)."),
                    new Switch("http", 'h', "http", "Starts an HTTP query server."),
                    new Switch("verbose", 'v', "verbose", "Print full exception stack traces."),
                    new FlaggedOption("itemClass", MG4JClassParser.getParser(), JSAP.NO_DEFAULT,
                            JSAP.NOT_REQUIRED, 'i', "item-class",
                            "The class that will handle item display in the HTTP server."),
                    new FlaggedOption("itemMimeType", JSAP.STRING_PARSER, "text/html", JSAP.NOT_REQUIRED, 'm',
                            "item-mime-type",
                            "A MIME type suggested to the class handling item display in the HTTP server."),
                    new FlaggedOption("port", JSAP.INTEGER_PARSER, "4242", JSAP.NOT_REQUIRED, 'p', "port",
                            "The port on localhost where the server will appear."),
                    new UnflaggedOption("basenameWeight", JSAP.STRING_PARSER, JSAP.NO_DEFAULT, JSAP.REQUIRED,
                            JSAP.GREEDY,
                            "The indices that the servlet will use. Indices are specified using their basename, optionally followed by a colon and a double representing the weight used to score results from that index. Indices without a specified weight are weighted 1."),

                    new Switch("noMplex", 'P', "noMplex", "Starts with multiplex disabled."),
                    new FlaggedOption("results", JSAP.INTEGER_PARSER, "1000", JSAP.NOT_REQUIRED, 'r', "results",
                            "The # of results to display"),
                    new FlaggedOption("mode", JSAP.STRING_PARSER, JSAP.NO_DEFAULT, JSAP.NOT_REQUIRED, 'M',
                            "time", "The results display mode") });

    final JSAPResult jsapResult = jsap.parse(arg);
    if (jsap.messagePrinted())
        return;//from w w  w .j  a  va2 s  .c  o  m

    final DocumentCollection documentCollection = (DocumentCollection) (jsapResult.userSpecified("collection")
            ? AbstractDocumentSequence.load(jsapResult.getString("collection"))
            : jsapResult.userSpecified("objectCollection") ? jsapResult.getObject("objectCollection") : null);
    final BigList<? extends CharSequence> titleList = (BigList<? extends CharSequence>) (jsapResult
            .userSpecified("titleList")
                    ? BinIO.loadObject(jsapResult.getString("titleList"))
                    : jsapResult.userSpecified("titleFile")
                            ? new FileLinesBigList(jsapResult.getString("titleFile"), "UTF-8")
                            : null);
    final String[] basenameWeight = jsapResult.getStringArray("basenameWeight");
    final Object2ReferenceLinkedOpenHashMap<String, Index> indexMap = new Object2ReferenceLinkedOpenHashMap<String, Index>(
            Hash.DEFAULT_INITIAL_SIZE, .5f);
    final Reference2DoubleOpenHashMap<Index> index2Weight = new Reference2DoubleOpenHashMap<Index>();
    final boolean verbose = jsapResult.getBoolean("verbose");
    final boolean loadSizes = !jsapResult.getBoolean("noSizes");
    Query.loadIndicesFromSpec(basenameWeight, loadSizes, documentCollection, indexMap, index2Weight);

    final long numberOfDocuments = indexMap.values().iterator().next().numberOfDocuments;
    if (titleList != null && titleList.size64() != numberOfDocuments)
        throw new IllegalArgumentException("The number of titles (" + titleList.size64()
                + " and the number of documents (" + numberOfDocuments + ") do not match");

    final Object2ObjectOpenHashMap<String, TermProcessor> termProcessors = new Object2ObjectOpenHashMap<String, TermProcessor>(
            indexMap.size());
    for (String alias : indexMap.keySet())
        termProcessors.put(alias, indexMap.get(alias).termProcessor);

    final SimpleParser simpleParser = new SimpleParser(indexMap.keySet(), indexMap.firstKey(), termProcessors);

    final Reference2ReferenceMap<Index, Object> index2Parser = new Reference2ReferenceOpenHashMap<Index, Object>();
    /*
    // Fetch parsers for payload-based fields.
    for( Index index: indexMap.values() ) if ( index.hasPayloads ) {
     if ( index.payload.getClass() == DatePayload.class ) index2Parser.put( index, DateFormat.getDateInstance( DateFormat.SHORT, Locale.UK ) );
    }
    */

    final QueryEngine queryEngine = new QueryEngine(simpleParser, new DocumentIteratorBuilderVisitor(indexMap,
            index2Parser, indexMap.get(indexMap.firstKey()), MAX_STEMMING), indexMap);
    queryEngine.setWeights(index2Weight);
    queryEngine.score(new Scorer[] { new BM25Scorer(), new VignaScorer() }, new double[] { 1, 1 });
    // We set up an interval selector only if there is a collection for snippeting
    queryEngine.intervalSelector = documentCollection != null ? new IntervalSelector(4, 40)
            : new IntervalSelector();

    queryEngine.multiplex = !jsapResult.userSpecified("moPlex") || jsapResult.getBoolean("noMplex");

    queryEngine.equalize(1000);

    Query query = new Query(queryEngine);
    query.displayMode = OutputType.TIME;

    query.maxOutput = jsapResult.getInt("results", 1000);

    query.interpretCommand("$score BM25Scorer");

    String q;

    System.err.println(
            "Welcome to the MG4J query class (setup with $mode snippet, $score BM25Scorer VignaScorer, $mplex on, $equalize 1000, $select "
                    + (documentCollection != null ? "4 40" : "all") + ")");
    System.err.println("Please type $ for help.");

    String prompt = indexMap.keySet().toString() + ">";
    int n;

    try {
        final BufferedReader br = new BufferedReader(new InputStreamReader(
                jsapResult.userSpecified("input") ? new FileInputStream(jsapResult.getString("input"))
                        : System.in));
        final ObjectArrayList<DocumentScoreInfo<Reference2ObjectMap<Index, SelectedInterval[]>>> results = new ObjectArrayList<DocumentScoreInfo<Reference2ObjectMap<Index, SelectedInterval[]>>>();

        for (;;) {
            if (query.displayMode != OutputType.TREC)
                System.out.print(prompt);
            q = br.readLine();
            if (q == null) {
                System.err.println();
                break; // CTRL-D
            }
            if (q.length() == 0)
                continue;
            if (q.charAt(0) == '$') {
                if (!query.interpretCommand(q))
                    break;
                continue;
            }

            long time = -System.nanoTime();

            try {
                n = queryEngine.process(q, 0, query.maxOutput, results);
            } catch (QueryParserException e) {
                if (verbose)
                    e.getCause().printStackTrace(System.err);
                else
                    System.err.println(e.getCause());
                continue;
            } catch (Exception e) {
                if (verbose)
                    e.printStackTrace(System.err);
                else
                    System.err.println(e);
                continue;
            }

            time += System.nanoTime();
            query.output(results, documentCollection, titleList, TextMarker.TEXT_BOLDFACE);
            System.err.println(results.size() + " results; " + n + " documents examined; " + time / 1000000.
                    + " ms; " + Util.format((n * 1000000000.0) / time) + " documents/s, "
                    + Util.format(time / (double) n) + " ns/document");
        }

    } finally {
        if (query.output != System.out)
            query.output.close();
    }
}

From source file:edu.nyu.vida.data_polygamy.relationship_computation.Relationship.java

/**
 * @param args//from  www  .j  a v  a  2 s .com
 * @throws ParseException 
 */
@SuppressWarnings({ "deprecation" })
public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {

    Options options = new Options();

    Option forceOption = new Option("f", "force", false,
            "force the computation of the relationship " + "even if files already exist");
    forceOption.setRequired(false);
    options.addOption(forceOption);

    Option scoreOption = new Option("sc", "score", true, "set threhsold for relationship score");
    scoreOption.setRequired(false);
    scoreOption.setArgName("SCORE THRESHOLD");
    options.addOption(scoreOption);

    Option strengthOption = new Option("st", "strength", true, "set threhsold for relationship strength");
    strengthOption.setRequired(false);
    strengthOption.setArgName("STRENGTH THRESHOLD");
    options.addOption(strengthOption);

    Option completeRandomizationOption = new Option("c", "complete-randomization", false,
            "use complete randomization when performing significance tests");
    completeRandomizationOption.setRequired(false);
    options.addOption(completeRandomizationOption);

    Option idOption = new Option("id", "ids", false, "output id instead of names for datasets and attributes");
    idOption.setRequired(false);
    options.addOption(idOption);

    Option g1Option = new Option("g1", "first-group", true, "set first group of datasets");
    g1Option.setRequired(true);
    g1Option.setArgName("FIRST GROUP");
    g1Option.setArgs(Option.UNLIMITED_VALUES);
    options.addOption(g1Option);

    Option g2Option = new Option("g2", "second-group", true, "set second group of datasets");
    g2Option.setRequired(false);
    g2Option.setArgName("SECOND GROUP");
    g2Option.setArgs(Option.UNLIMITED_VALUES);
    options.addOption(g2Option);

    Option machineOption = new Option("m", "machine", true, "machine identifier");
    machineOption.setRequired(true);
    machineOption.setArgName("MACHINE");
    machineOption.setArgs(1);
    options.addOption(machineOption);

    Option nodesOption = new Option("n", "nodes", true, "number of nodes");
    nodesOption.setRequired(true);
    nodesOption.setArgName("NODES");
    nodesOption.setArgs(1);
    options.addOption(nodesOption);

    Option s3Option = new Option("s3", "s3", false, "data on Amazon S3");
    s3Option.setRequired(false);
    options.addOption(s3Option);

    Option awsAccessKeyIdOption = new Option("aws_id", "aws-id", true,
            "aws access key id; " + "this is required if the execution is on aws");
    awsAccessKeyIdOption.setRequired(false);
    awsAccessKeyIdOption.setArgName("AWS-ACCESS-KEY-ID");
    awsAccessKeyIdOption.setArgs(1);
    options.addOption(awsAccessKeyIdOption);

    Option awsSecretAccessKeyOption = new Option("aws_key", "aws-id", true,
            "aws secrect access key; " + "this is required if the execution is on aws");
    awsSecretAccessKeyOption.setRequired(false);
    awsSecretAccessKeyOption.setArgName("AWS-SECRET-ACCESS-KEY");
    awsSecretAccessKeyOption.setArgs(1);
    options.addOption(awsSecretAccessKeyOption);

    Option bucketOption = new Option("b", "s3-bucket", true,
            "bucket on s3; " + "this is required if the execution is on aws");
    bucketOption.setRequired(false);
    bucketOption.setArgName("S3-BUCKET");
    bucketOption.setArgs(1);
    options.addOption(bucketOption);

    Option helpOption = new Option("h", "help", false, "display this message");
    helpOption.setRequired(false);
    options.addOption(helpOption);

    Option removeOption = new Option("r", "remove-not-significant", false,
            "remove relationships that are not" + "significant from the final output");
    removeOption.setRequired(false);
    options.addOption(removeOption);

    HelpFormatter formatter = new HelpFormatter();
    CommandLineParser parser = new PosixParser();
    CommandLine cmd = null;

    try {
        cmd = parser.parse(options, args);
    } catch (ParseException e) {
        formatter.printHelp("hadoop jar data-polygamy.jar "
                + "edu.nyu.vida.data_polygamy.relationship_computation.Relationship", options, true);
        System.exit(0);
    }

    if (cmd.hasOption("h")) {
        formatter.printHelp("hadoop jar data-polygamy.jar "
                + "edu.nyu.vida.data_polygamy.relationship_computation.Relationship", options, true);
        System.exit(0);
    }

    boolean s3 = cmd.hasOption("s3");
    String s3bucket = "";
    String awsAccessKeyId = "";
    String awsSecretAccessKey = "";

    if (s3) {
        if ((!cmd.hasOption("aws_id")) || (!cmd.hasOption("aws_key")) || (!cmd.hasOption("b"))) {
            System.out.println(
                    "Arguments 'aws_id', 'aws_key', and 'b'" + " are mandatory if execution is on AWS.");
            formatter.printHelp(
                    "hadoop jar data-polygamy.jar "
                            + "edu.nyu.vida.data_polygamy.relationship_computation.Relationship",
                    options, true);
            System.exit(0);
        }
        s3bucket = cmd.getOptionValue("b");
        awsAccessKeyId = cmd.getOptionValue("aws_id");
        awsSecretAccessKey = cmd.getOptionValue("aws_key");
    }

    boolean snappyCompression = false;
    boolean bzip2Compression = false;
    String machine = cmd.getOptionValue("m");
    int nbNodes = Integer.parseInt(cmd.getOptionValue("n"));

    Configuration s3conf = new Configuration();
    if (s3) {
        s3conf.set("fs.s3.awsAccessKeyId", awsAccessKeyId);
        s3conf.set("fs.s3.awsSecretAccessKey", awsSecretAccessKey);
        s3conf.set("bucket", s3bucket);
    }

    Path path = null;
    FileSystem fs = FileSystem.get(new Configuration());

    ArrayList<String> shortDataset = new ArrayList<String>();
    ArrayList<String> firstGroup = new ArrayList<String>();
    ArrayList<String> secondGroup = new ArrayList<String>();
    HashMap<String, String> datasetAgg = new HashMap<String, String>();

    boolean removeNotSignificant = cmd.hasOption("r");
    boolean removeExistingFiles = cmd.hasOption("f");
    boolean completeRandomization = cmd.hasOption("c");
    boolean hasScoreThreshold = cmd.hasOption("sc");
    boolean hasStrengthThreshold = cmd.hasOption("st");
    boolean outputIds = cmd.hasOption("id");
    String scoreThreshold = hasScoreThreshold ? cmd.getOptionValue("sc") : "";
    String strengthThreshold = hasStrengthThreshold ? cmd.getOptionValue("st") : "";

    // all datasets
    ArrayList<String> all_datasets = new ArrayList<String>();
    if (s3) {
        path = new Path(s3bucket + FrameworkUtils.datasetsIndexDir);
        fs = FileSystem.get(path.toUri(), s3conf);
    } else {
        path = new Path(fs.getHomeDirectory() + "/" + FrameworkUtils.datasetsIndexDir);
    }
    BufferedReader br = new BufferedReader(new InputStreamReader(fs.open(path)));
    String line = br.readLine();
    while (line != null) {
        all_datasets.add(line.split("\t")[0]);
        line = br.readLine();
    }
    br.close();
    if (s3)
        fs.close();
    String[] all_datasets_array = new String[all_datasets.size()];
    all_datasets.toArray(all_datasets_array);

    String[] firstGroupCmd = cmd.getOptionValues("g1");
    String[] secondGroupCmd = cmd.hasOption("g2") ? cmd.getOptionValues("g2") : all_datasets_array;
    addDatasets(firstGroupCmd, firstGroup, shortDataset, datasetAgg, path, fs, s3conf, s3, s3bucket);
    addDatasets(secondGroupCmd, secondGroup, shortDataset, datasetAgg, path, fs, s3conf, s3, s3bucket);

    if (shortDataset.size() == 0) {
        System.out.println("No datasets to process.");
        System.exit(0);
    }

    if (firstGroup.isEmpty()) {
        System.out.println("No indices from datasets in G1.");
        System.exit(0);
    }

    if (secondGroup.isEmpty()) {
        System.out.println("No indices from datasets in G2.");
        System.exit(0);
    }

    // getting dataset ids

    String datasetNames = "";
    String datasetIds = "";
    HashMap<String, String> datasetId = new HashMap<String, String>();
    Iterator<String> it = shortDataset.iterator();
    while (it.hasNext()) {
        datasetId.put(it.next(), null);
    }

    if (s3) {
        path = new Path(s3bucket + FrameworkUtils.datasetsIndexDir);
        fs = FileSystem.get(path.toUri(), s3conf);
    } else {
        path = new Path(fs.getHomeDirectory() + "/" + FrameworkUtils.datasetsIndexDir);
    }
    br = new BufferedReader(new InputStreamReader(fs.open(path)));
    line = br.readLine();
    while (line != null) {
        String[] dt = line.split("\t");
        all_datasets.add(dt[0]);
        if (datasetId.containsKey(dt[0])) {
            datasetId.put(dt[0], dt[1]);
            datasetNames += dt[0] + ",";
            datasetIds += dt[1] + ",";
        }
        line = br.readLine();
    }
    br.close();
    if (s3)
        fs.close();

    datasetNames = datasetNames.substring(0, datasetNames.length() - 1);
    datasetIds = datasetIds.substring(0, datasetIds.length() - 1);
    it = shortDataset.iterator();
    while (it.hasNext()) {
        String dataset = it.next();
        if (datasetId.get(dataset) == null) {
            System.out.println("No dataset id for " + dataset);
            System.exit(0);
        }
    }

    String firstGroupStr = "";
    String secondGroupStr = "";
    for (String dataset : firstGroup) {
        firstGroupStr += datasetId.get(dataset) + ",";
    }
    for (String dataset : secondGroup) {
        secondGroupStr += datasetId.get(dataset) + ",";
    }
    firstGroupStr = firstGroupStr.substring(0, firstGroupStr.length() - 1);
    secondGroupStr = secondGroupStr.substring(0, secondGroupStr.length() - 1);

    String relationshipsDir = "";
    if (outputIds) {
        relationshipsDir = FrameworkUtils.relationshipsIdsDir;
    } else {
        relationshipsDir = FrameworkUtils.relationshipsDir;
    }

    FrameworkUtils.createDir(s3bucket + relationshipsDir, s3conf, s3);

    String random = completeRandomization ? "complete" : "restricted";

    String indexInputDirs = "";
    String noRelationship = "";

    HashSet<String> dirs = new HashSet<String>();

    String dataset1;
    String dataset2;
    String datasetId1;
    String datasetId2;
    for (int i = 0; i < firstGroup.size(); i++) {
        for (int j = 0; j < secondGroup.size(); j++) {

            if (Integer.parseInt(datasetId.get(firstGroup.get(i))) < Integer
                    .parseInt(datasetId.get(secondGroup.get(j)))) {
                dataset1 = firstGroup.get(i);
                dataset2 = secondGroup.get(j);
            } else {
                dataset1 = secondGroup.get(j);
                dataset2 = firstGroup.get(i);
            }

            datasetId1 = datasetId.get(dataset1);
            datasetId2 = datasetId.get(dataset2);

            if (dataset1.equals(dataset2))
                continue;
            String correlationOutputFileName = s3bucket + relationshipsDir + "/" + dataset1 + "-" + dataset2
                    + "/";

            if (removeExistingFiles) {
                FrameworkUtils.removeFile(correlationOutputFileName, s3conf, s3);
            }
            if (!FrameworkUtils.fileExists(correlationOutputFileName, s3conf, s3)) {
                dirs.add(s3bucket + FrameworkUtils.indexDir + "/" + dataset1);
                dirs.add(s3bucket + FrameworkUtils.indexDir + "/" + dataset2);
            } else {
                noRelationship += datasetId1 + "-" + datasetId2 + ",";
            }
        }
    }

    if (dirs.isEmpty()) {
        System.out.println("All the relationships were already computed.");
        System.out.println("Use -f in the beginning of the command line to force the computation.");
        System.exit(0);
    }

    for (String dir : dirs) {
        indexInputDirs += dir + ",";
    }

    Configuration conf = new Configuration();
    Machine machineConf = new Machine(machine, nbNodes);

    String jobName = "relationship" + "-" + random;
    String relationshipOutputDir = s3bucket + relationshipsDir + "/tmp/";

    FrameworkUtils.removeFile(relationshipOutputDir, s3conf, s3);

    for (int i = 0; i < shortDataset.size(); i++) {
        conf.set("dataset-" + datasetId.get(shortDataset.get(i)) + "-agg", datasetAgg.get(shortDataset.get(i)));
    }
    for (int i = 0; i < shortDataset.size(); i++) {
        conf.set("dataset-" + datasetId.get(shortDataset.get(i)) + "-agg-size",
                Integer.toString(datasetAgg.get(shortDataset.get(i)).split(",").length));
    }
    conf.set("dataset-keys", datasetIds);
    conf.set("dataset-names", datasetNames);
    conf.set("first-group", firstGroupStr);
    conf.set("second-group", secondGroupStr);
    conf.set("complete-random", String.valueOf(completeRandomization));
    conf.set("output-ids", String.valueOf(outputIds));
    conf.set("complete-random-str", random);
    conf.set("main-dataset-id", datasetId.get(shortDataset.get(0)));
    conf.set("remove-not-significant", String.valueOf(removeNotSignificant));
    if (noRelationship.length() > 0) {
        conf.set("no-relationship", noRelationship.substring(0, noRelationship.length() - 1));
    }
    if (hasScoreThreshold) {
        conf.set("score-threshold", scoreThreshold);
    }
    if (hasStrengthThreshold) {
        conf.set("strength-threshold", strengthThreshold);
    }

    conf.set("mapreduce.tasktracker.map.tasks.maximum", String.valueOf(machineConf.getMaximumTasks()));
    conf.set("mapreduce.tasktracker.reduce.tasks.maximum", String.valueOf(machineConf.getMaximumTasks()));
    conf.set("mapreduce.jobtracker.maxtasks.perjob", "-1");
    conf.set("mapreduce.reduce.shuffle.parallelcopies", "20");
    conf.set("mapreduce.input.fileinputformat.split.minsize", "0");
    conf.set("mapreduce.task.io.sort.mb", "200");
    conf.set("mapreduce.task.io.sort.factor", "100");
    conf.set("mapreduce.task.timeout", "2400000");

    if (s3) {
        machineConf.setMachineConfiguration(conf);
        conf.set("fs.s3.awsAccessKeyId", awsAccessKeyId);
        conf.set("fs.s3.awsSecretAccessKey", awsSecretAccessKey);
        conf.set("bucket", s3bucket);
    }

    if (snappyCompression) {
        conf.set("mapreduce.map.output.compress", "true");
        conf.set("mapreduce.map.output.compress.codec", "org.apache.hadoop.io.compress.SnappyCodec");
        //conf.set("mapreduce.output.fileoutputformat.compress.codec", "org.apache.hadoop.io.compress.SnappyCodec");
    }
    if (bzip2Compression) {
        conf.set("mapreduce.map.output.compress", "true");
        conf.set("mapreduce.map.output.compress.codec", "org.apache.hadoop.io.compress.BZip2Codec");
        //conf.set("mapreduce.output.fileoutputformat.compress.codec", "org.apache.hadoop.io.compress.BZip2Codec");
    }

    Job job = new Job(conf);
    job.setJobName(jobName);

    job.setMapOutputKeyClass(PairAttributeWritable.class);
    job.setMapOutputValueClass(TopologyTimeSeriesWritable.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(Text.class);

    job.setMapperClass(CorrelationMapper.class);
    job.setReducerClass(CorrelationReducer.class);
    job.setNumReduceTasks(machineConf.getNumberReduces());

    job.setInputFormatClass(SequenceFileInputFormat.class);
    //job.setOutputFormatClass(TextOutputFormat.class);
    LazyOutputFormat.setOutputFormatClass(job, TextOutputFormat.class);

    FileInputFormat.setInputDirRecursive(job, true);
    FileInputFormat.setInputPaths(job, indexInputDirs.substring(0, indexInputDirs.length() - 1));
    FileOutputFormat.setOutputPath(job, new Path(relationshipOutputDir));

    job.setJarByClass(Relationship.class);

    long start = System.currentTimeMillis();
    job.submit();
    job.waitForCompletion(true);
    System.out.println(jobName + "\t" + (System.currentTimeMillis() - start));

    // moving files to right place
    for (int i = 0; i < firstGroup.size(); i++) {
        for (int j = 0; j < secondGroup.size(); j++) {

            if (Integer.parseInt(datasetId.get(firstGroup.get(i))) < Integer
                    .parseInt(datasetId.get(secondGroup.get(j)))) {
                dataset1 = firstGroup.get(i);
                dataset2 = secondGroup.get(j);
            } else {
                dataset1 = secondGroup.get(j);
                dataset2 = firstGroup.get(i);
            }

            if (dataset1.equals(dataset2))
                continue;

            String from = s3bucket + relationshipsDir + "/tmp/" + dataset1 + "-" + dataset2 + "/";
            String to = s3bucket + relationshipsDir + "/" + dataset1 + "-" + dataset2 + "/";
            FrameworkUtils.renameFile(from, to, s3conf, s3);
        }
    }
}

From source file:org.apache.cxf.cwiki.SiteExporter.java

public static void main(String[] args) throws Exception {
    Authenticator.setDefault(new Authenticator() {
        protected PasswordAuthentication getPasswordAuthentication() {
            return new PasswordAuthentication(userName, password.toCharArray());
        }/*from   w  ww  . ja v  a 2 s.  c o  m*/
    });
    ListIterator<String> it = Arrays.asList(args).listIterator();
    List<String> files = new ArrayList<String>();
    boolean forceAll = false;
    int maxThreads = -1;
    while (it.hasNext()) {
        String s = it.next();
        if ("-debug".equals(s)) {
            debug = true;
        } else if ("-user".equals(s)) {
            userName = it.next();
        } else if ("-password".equals(s)) {
            password = it.next();
        } else if ("-d".equals(s)) {
            rootOutputDir = new File(it.next());
        } else if ("-force".equals(s)) {
            forceAll = true;
        } else if ("-svn".equals(s)) {
            svn = true;
        } else if ("-commit".equals(s)) {
            commit = true;
        } else if ("-maxThreads".equals(s)) {
            maxThreads = Integer.parseInt(it.next());
        } else if (s != null && s.length() > 0) {
            files.add(s);
        }
    }

    List<SiteExporter> exporters = new ArrayList<SiteExporter>();
    for (String file : files) {
        exporters.add(new SiteExporter(file, forceAll));
    }
    List<SiteExporter> modified = new ArrayList<SiteExporter>();
    for (SiteExporter exporter : exporters) {
        if (exporter.initialize()) {
            modified.add(exporter);
        }
    }

    // render stuff only if needed
    if (!modified.isEmpty()) {
        setSiteExporters(exporters);

        if (maxThreads <= 0) {
            maxThreads = modified.size();
        }

        ExecutorService executor = Executors.newFixedThreadPool(maxThreads, new ThreadFactory() {
            public Thread newThread(Runnable r) {
                Thread t = new Thread(r);
                t.setDaemon(true);
                return t;
            }
        });
        List<Future<?>> futures = new ArrayList<Future<?>>(modified.size());
        for (SiteExporter exporter : modified) {
            futures.add(executor.submit(exporter));
        }
        for (Future<?> t : futures) {
            t.get();
        }
    }

    if (commit) {
        File file = FileUtils.createTempFile("svncommit", "txt");
        FileWriter writer = new FileWriter(file);
        writer.write(svnCommitMessage.toString());
        writer.close();
        callSvn(rootOutputDir, "commit", "-F", file.getAbsolutePath(), rootOutputDir.getAbsolutePath());
        svnCommitMessage.setLength(0);
    }
}

From source file:com.adito.server.Main.java

/**
 * Entry point//from  w  w  w  .j av  a  2 s .  c o  m
 * 
 * @param args
 * @throws Throwable
 */
public static void main(String[] args) throws Throwable {

    // This is a hack to allow the Install4J installer to get the java
    // runtime that will be used
    if (args.length > 0 && args[0].equals("--jvmdir")) {
        System.out.println(SystemProperties.get("java.home"));
        System.exit(0);
    }
    useWrapper = System.getProperty("wrapper.key") != null;
    final Main main = new Main();
    ContextHolder.setContext(main);

    if (useWrapper) {
        WrapperManager.start(main, args);
    } else {
        Integer returnCode = main.start(args);
        if (returnCode != null) {
            if (main.gui) {
                if (main.startupException == null) {
                    main.startupException = new Exception("An exit code of " + returnCode + " was returned.");
                }
                try {
                    if (SystemProperties.get("os.name").toLowerCase().startsWith("windows")) {
                        UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName());
                    }
                } catch (Exception e) {
                }
                String mesg = main.startupException.getMessage() == null ? "No message supplied."
                        : main.startupException.getMessage();
                StringBuffer buf = new StringBuffer();
                int l = 0;
                char ch = ' ';
                for (int i = 0; i < mesg.length(); i++) {
                    ch = mesg.charAt(i);
                    if (l > 50 && ch == ' ') {
                        buf.append("\n");
                        l = 0;
                    } else {
                        if (ch == '\n') {
                            l = 0;
                        } else {
                            l++;
                        }
                        buf.append(ch);
                    }
                }
                mesg = buf.toString();
                final String fMesg = mesg;
                SwingUtilities.invokeAndWait(new Runnable() {
                    public void run() {
                        JOptionPane.showMessageDialog(null, fMesg, "Startup Error", JOptionPane.ERROR_MESSAGE);
                    }
                });
            }
            System.exit(returnCode.intValue());
        } else {
            Runtime.getRuntime().addShutdownHook(new Thread() {
                public void run() {
                    if (!main.shuttingDown) {
                        main.stop(0);
                    }
                }
            });
        }
    }
}

From source file:jmxbf.java

public static void main(String[] args) throws IOException, MalformedObjectNameException {

    String HOST = "";
    String PORT = "";
    String usersFile = "";
    String pwdFile = "";

    CommandLine cmd = getParsedCommandLine(args);

    if (cmd != null) {

        HOST = cmd.getOptionValue("host");
        PORT = cmd.getOptionValue("port");
        usersFile = cmd.getOptionValue("usernames-file");
        pwdFile = cmd.getOptionValue("passwords-file");

    } else {/*from w  w w  .  j  a v  a  2 s.  c o m*/

        System.exit(1);
    }

    String finalResults = "";

    BufferedReader users = new BufferedReader(new FileReader(usersFile));
    BufferedReader pwds = new BufferedReader(new FileReader(pwdFile));

    JMXServiceURL url = new JMXServiceURL("service:jmx:rmi:///jndi/rmi://" + HOST + ":" + PORT + "/jmxrmi");
    //new JMXServiceURL("service:jmx:remoting-jmx://" + HOST + ":" + PORT);

    String user = null;
    boolean found = false;
    while ((user = users.readLine()) != null) {
        String pwd = null;
        while ((pwd = pwds.readLine()) != null) {
            //System.out.println(user+":"+pwd);

            Map<String, String[]> env = new HashMap<>();
            String[] credentials = { user, pwd };
            env.put(JMXConnector.CREDENTIALS, credentials);
            try {

                JMXConnector jmxConnector = JMXConnectorFactory.connect(url, env);

                System.out.println();
                System.out.println();
                System.out.println();
                System.out.println(
                        "[+] ###SUCCESS### - We got a valid connection for: " + user + ":" + pwd + "\r\n\r\n");
                finalResults = finalResults + "\n" + user + ":" + pwd;
                jmxConnector.close();
                found = true;
                break;

            } catch (java.lang.SecurityException e) {
                System.out.println("Auth failed!!!\r\n");

            }
        }
        if (found) {
            System.out.println("Found some valid credentials - continuing brute force");
            found = false;

        }
        //closing and reopening pwds
        pwds.close();
        pwds = new BufferedReader(new FileReader(pwdFile));

    }

    users.close();
    //print final results
    if (finalResults.length() != 0) {
        System.out.println("The following valid credentials were found:\n");
        System.out.println(finalResults);
    }

}

From source file:kilim.http.HttpRequestParser.java

public static void main(String args[]) throws Exception {
    /// Testing//from  w w w .  j ava  2s .c o  m
    String s = "GET /favicon.ico#test HTTP/1.1\r\n" + "Host: localhost:7262\r\n"
            + "User-Agent: Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.5; en-US; rv:1.9.0.10) Gecko/2009042315 Firefox/3.0.10 Ubiquity/0.1.5\r\n"
            + "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8\r\n"
            + "Accept-Language: en-us,en;q=0.5\r\n" + "Accept-Encoding: gzip,deflate\r\n"
            + "Accept-Charset: ISO-8859-1,utf-8;q=0.7,*;q=0.7\r\n" + "Keep-Alive: 300\r\n"
            + "Connection: keep-alive\r\n\r\n";
    log.info("Input Request: (" + s.length() + " bytes)");
    log.info(s);
    byte[] data = s.getBytes();
    int len = data.length;

    log.info("=============================================================");
    HttpRequest req = new HttpRequest();
    req.buffer = ByteBuffer.allocate(2048);
    req.buffer.put(data);
    initHeader(req, len);
    log.info(req);
}

From source file:it.unimi.di.big.mg4j.query.Query.java

@SuppressWarnings("unchecked")
public static void main(final String[] arg) throws Exception {

    SimpleJSAP jsap = new SimpleJSAP(Query.class.getName(),
            "Loads indices relative to a collection, possibly loads the collection, and answers to queries.",
            new Parameter[] {
                    new FlaggedOption("collection", JSAP.STRING_PARSER, JSAP.NO_DEFAULT, JSAP.NOT_REQUIRED, 'c',
                            "collection", "The collection of documents indexed by the given indices."),
                    new FlaggedOption("objectCollection",
                            new ObjectParser(DocumentCollection.class, MG4JClassParser.PACKAGE),
                            JSAP.NO_DEFAULT, JSAP.NOT_REQUIRED, 'o', "object-collection",
                            "An object specification describing a document collection."),
                    new FlaggedOption("titleList", JSAP.STRING_PARSER, JSAP.NO_DEFAULT, JSAP.NOT_REQUIRED, 't',
                            "title-list",
                            "A serialized big list of titles (will override collection titles if specified)."),
                    new FlaggedOption("titleFile", JSAP.STRING_PARSER, JSAP.NO_DEFAULT, JSAP.NOT_REQUIRED, 'T',
                            "title-file",
                            "A file of newline-separated, UTF-8 titles (will override collection titles if specified)."),
                    new FlaggedOption("input", JSAP.STRING_PARSER, JSAP.NO_DEFAULT, JSAP.NOT_REQUIRED, 'I',
                            "input", "A file containing the input."),
                    new Switch("noSizes", 'n', "no-sizes",
                            "Disable loading document sizes (they are necessary for BM25 scoring)."),
                    new Switch("http", 'h', "http", "Starts an HTTP query server."),
                    new Switch("verbose", 'v', "verbose", "Print full exception stack traces."),
                    new FlaggedOption("itemClass", MG4JClassParser.getParser(), JSAP.NO_DEFAULT,
                            JSAP.NOT_REQUIRED, 'i', "item-class",
                            "The class that will handle item display in the HTTP server."),
                    new FlaggedOption("itemMimeType", JSAP.STRING_PARSER, "text/html", JSAP.NOT_REQUIRED, 'm',
                            "item-mime-type",
                            "A MIME type suggested to the class handling item display in the HTTP server."),
                    new FlaggedOption("port", JSAP.INTEGER_PARSER, "4242", JSAP.NOT_REQUIRED, 'p', "port",
                            "The port on localhost where the server will appear."),
                    new UnflaggedOption("basenameWeight", JSAP.STRING_PARSER, JSAP.NO_DEFAULT, JSAP.REQUIRED,
                            JSAP.GREEDY,
                            "The indices that the servlet will use. Indices are specified using their basename, optionally followed by a colon and a double representing the weight used to score results from that index. Indices without a specified weight are weighted 1.") });

    final JSAPResult jsapResult = jsap.parse(arg);
    if (jsap.messagePrinted())
        return;//from  w w  w.  j  a va  2s  .c  o m

    final DocumentCollection documentCollection = (DocumentCollection) (jsapResult.userSpecified("collection")
            ? AbstractDocumentSequence.load(jsapResult.getString("collection"))
            : jsapResult.userSpecified("objectCollection") ? jsapResult.getObject("objectCollection") : null);
    final BigList<? extends CharSequence> titleList = (BigList<? extends CharSequence>) (jsapResult
            .userSpecified("titleList")
                    ? BinIO.loadObject(jsapResult.getString("titleList"))
                    : jsapResult.userSpecified("titleFile")
                            ? new FileLinesBigList(jsapResult.getString("titleFile"), "UTF-8")
                            : null);
    final String[] basenameWeight = jsapResult.getStringArray("basenameWeight");
    final Object2ReferenceLinkedOpenHashMap<String, Index> indexMap = new Object2ReferenceLinkedOpenHashMap<String, Index>(
            Hash.DEFAULT_INITIAL_SIZE, .5f);
    final Reference2DoubleOpenHashMap<Index> index2Weight = new Reference2DoubleOpenHashMap<Index>();
    final boolean verbose = jsapResult.getBoolean("verbose");
    final boolean loadSizes = !jsapResult.getBoolean("noSizes");
    Query.loadIndicesFromSpec(basenameWeight, loadSizes, documentCollection, indexMap, index2Weight);

    final long numberOfDocuments = indexMap.values().iterator().next().numberOfDocuments;
    if (titleList != null && titleList.size64() != numberOfDocuments)
        throw new IllegalArgumentException("The number of titles (" + titleList.size64()
                + " and the number of documents (" + numberOfDocuments + ") do not match");

    final Object2ObjectOpenHashMap<String, TermProcessor> termProcessors = new Object2ObjectOpenHashMap<String, TermProcessor>(
            indexMap.size());
    for (String alias : indexMap.keySet())
        termProcessors.put(alias, indexMap.get(alias).termProcessor);

    final SimpleParser simpleParser = new SimpleParser(indexMap.keySet(), indexMap.firstKey(), termProcessors);

    final Reference2ReferenceMap<Index, Object> index2Parser = new Reference2ReferenceOpenHashMap<Index, Object>();
    /*
    // Fetch parsers for payload-based fields.
    for( Index index: indexMap.values() ) if ( index.hasPayloads ) {
       if ( index.payload.getClass() == DatePayload.class ) index2Parser.put( index, DateFormat.getDateInstance( DateFormat.SHORT, Locale.UK ) );
    }
    */

    final QueryEngine queryEngine = new QueryEngine(simpleParser, new DocumentIteratorBuilderVisitor(indexMap,
            index2Parser, indexMap.get(indexMap.firstKey()), MAX_STEMMING), indexMap);
    queryEngine.setWeights(index2Weight);
    queryEngine.score(new Scorer[] { new BM25Scorer(), new VignaScorer() }, new double[] { 1, 1 });
    // We set up an interval selector only if there is a collection for snippeting
    queryEngine.intervalSelector = documentCollection != null ? new IntervalSelector(4, 40)
            : new IntervalSelector();
    queryEngine.multiplex = true;
    queryEngine.equalize(1000);

    Query query = new Query(queryEngine);
    query.displayMode = OutputType.SNIPPET;

    String q;

    System.err.println(
            "Welcome to the MG4J query class (setup with $mode snippet, $score BM25Scorer VignaScorer, $mplex on, $equalize 1000, $select "
                    + (documentCollection != null ? "4 40" : "all") + ")");
    System.err.println("Please type $ for help.");

    String prompt = indexMap.keySet().toString() + ">";
    int n;

    HttpQueryServer httpQueryServer = null;
    if (jsapResult.getBoolean("http"))
        httpQueryServer = new HttpQueryServer(queryEngine, documentCollection, jsapResult.getClass("itemClass"),
                jsapResult.getString("itemMimeType"), jsapResult.getInt("port"), titleList);
    try {
        final BufferedReader br = new BufferedReader(new InputStreamReader(
                jsapResult.userSpecified("input") ? new FileInputStream(jsapResult.getString("input"))
                        : System.in));
        final ObjectArrayList<DocumentScoreInfo<Reference2ObjectMap<Index, SelectedInterval[]>>> results = new ObjectArrayList<DocumentScoreInfo<Reference2ObjectMap<Index, SelectedInterval[]>>>();

        for (;;) {
            System.out.print(prompt);
            q = br.readLine();
            if (q == null) {
                System.err.println();
                break; // CTRL-D
            }
            if (q.length() == 0)
                continue;
            if (q.charAt(0) == '$') {
                if (!query.interpretCommand(q))
                    break;
                continue;
            }

            long time = -System.nanoTime();

            try {
                n = queryEngine.process(q, 0, query.maxOutput, results);
            } catch (QueryParserException e) {
                if (verbose)
                    e.getCause().printStackTrace(System.err);
                else
                    System.err.println(e.getCause());
                continue;
            } catch (Exception e) {
                if (verbose)
                    e.printStackTrace(System.err);
                else
                    System.err.println(e);
                continue;
            }

            time += System.nanoTime();
            query.output(results, documentCollection, titleList, TextMarker.TEXT_BOLDFACE);
            System.err.println(results.size() + " results; " + n + " documents examined; " + time / 1000000.
                    + " ms; " + Util.format((n * 1000000000.0) / time) + " documents/s, "
                    + Util.format(time / (double) n) + " ns/document");
        }

    } finally {
        if (httpQueryServer != null)
            httpQueryServer.server.stop();
        if (query.output != System.out)
            query.output.close();
    }
}

From source file:com.joliciel.lefff.Lefff.java

/**
 * @param args//w w w . j  a v  a  2  s. c o  m
 */
public static void main(String[] args) throws Exception {
    long startTime = (new Date()).getTime();
    String command = args[0];

    String memoryBaseFilePath = "";
    String lefffFilePath = "";
    String posTagSetPath = "";
    String posTagMapPath = "";
    String word = null;
    List<String> categories = null;
    int startLine = -1;
    int stopLine = -1;

    boolean firstArg = true;
    for (String arg : args) {
        if (firstArg) {
            firstArg = false;
            continue;
        }
        int equalsPos = arg.indexOf('=');
        String argName = arg.substring(0, equalsPos);
        String argValue = arg.substring(equalsPos + 1);
        if (argName.equals("memoryBase"))
            memoryBaseFilePath = argValue;
        else if (argName.equals("lefffFile"))
            lefffFilePath = argValue;
        else if (argName.equals("startLine"))
            startLine = Integer.parseInt(argValue);
        else if (argName.equals("stopLine"))
            stopLine = Integer.parseInt(argValue);
        else if (argName.equals("posTagSet"))
            posTagSetPath = argValue;
        else if (argName.equals("posTagMap"))
            posTagMapPath = argValue;
        else if (argName.equals("word"))
            word = argValue;
        else if (argName.equals("categories")) {
            String[] parts = argValue.split(",");
            categories = new ArrayList<String>();
            for (String part : parts) {
                categories.add(part);
            }
        } else
            throw new RuntimeException("Unknown argument: " + argName);
    }

    final LefffServiceLocator locator = new LefffServiceLocator();
    locator.setDataSourcePropertiesFile("jdbc-live.properties");

    TalismaneServiceLocator talismaneServiceLocator = TalismaneServiceLocator.getInstance();

    final LefffService lefffService = locator.getLefffService();
    if (command.equals("load")) {
        if (lefffFilePath.length() == 0)
            throw new RuntimeException("Required argument: lefffFile");
        final LefffLoader loader = lefffService.getLefffLoader();
        File file = new File(lefffFilePath);
        if (startLine > 0)
            loader.setStartLine(startLine);
        if (stopLine > 0)
            loader.setStopLine(stopLine);

        loader.LoadFile(file);
    } else if (command.equals("serialiseBase")) {
        if (memoryBaseFilePath.length() == 0)
            throw new RuntimeException("Required argument: memoryBase");
        if (posTagSetPath.length() == 0)
            throw new RuntimeException("Required argument: posTagSet");
        if (posTagMapPath.length() == 0)
            throw new RuntimeException("Required argument: posTagMap");

        PosTaggerServiceLocator posTaggerServiceLocator = talismaneServiceLocator.getPosTaggerServiceLocator();
        PosTaggerService posTaggerService = posTaggerServiceLocator.getPosTaggerService();
        File posTagSetFile = new File(posTagSetPath);
        PosTagSet posTagSet = posTaggerService.getPosTagSet(posTagSetFile);

        File posTagMapFile = new File(posTagMapPath);
        LefffPosTagMapper posTagMapper = lefffService.getPosTagMapper(posTagMapFile, posTagSet);

        Map<PosTagSet, LefffPosTagMapper> posTagMappers = new HashMap<PosTagSet, LefffPosTagMapper>();
        posTagMappers.put(posTagSet, posTagMapper);

        LefffMemoryLoader loader = new LefffMemoryLoader();
        LefffMemoryBase memoryBase = loader.loadMemoryBaseFromDatabase(lefffService, posTagMappers, categories);
        File memoryBaseFile = new File(memoryBaseFilePath);
        memoryBaseFile.delete();
        loader.serializeMemoryBase(memoryBase, memoryBaseFile);
    } else if (command.equals("deserialiseBase")) {
        if (memoryBaseFilePath.length() == 0)
            throw new RuntimeException("Required argument: memoryBase");

        LefffMemoryLoader loader = new LefffMemoryLoader();
        File memoryBaseFile = new File(memoryBaseFilePath);
        LefffMemoryBase memoryBase = loader.deserializeMemoryBase(memoryBaseFile);

        String[] testWords = new String[] { "avoir" };
        if (word != null) {
            testWords = word.split(",");
        }

        for (String testWord : testWords) {
            Set<PosTag> possiblePosTags = memoryBase.findPossiblePosTags(testWord);
            LOG.debug("##### PosTags for '" + testWord + "': " + possiblePosTags.size());
            int i = 1;
            for (PosTag posTag : possiblePosTags) {
                LOG.debug("### PosTag " + (i++) + ":" + posTag);
            }

            List<? extends LexicalEntry> entriesForWord = memoryBase.getEntries(testWord);
            LOG.debug("##### Entries for '" + testWord + "': " + entriesForWord.size());
            i = 1;
            for (LexicalEntry entry : entriesForWord) {
                LOG.debug("### Entry " + (i++) + ":" + entry.getWord());
                LOG.debug("Category " + entry.getCategory());
                LOG.debug("Predicate " + entry.getPredicate());
                LOG.debug("Lemma " + entry.getLemma());
                LOG.debug("Morphology " + entry.getMorphology());
            }

            List<? extends LexicalEntry> entriesForLemma = memoryBase.getEntriesForLemma(testWord, "");
            LOG.debug("##### Entries for '" + testWord + "' lemma: " + entriesForLemma.size());
            for (LexicalEntry entry : entriesForLemma) {
                LOG.debug("### Entry " + entry.getWord());
                LOG.debug("Category " + entry.getCategory());
                LOG.debug("Predicate " + entry.getPredicate());
                LOG.debug("Lemma " + entry.getLemma());
                LOG.debug("Morphology " + entry.getMorphology());
                for (PredicateArgument argument : entry.getPredicateArguments()) {
                    LOG.debug("Argument: " + argument.getFunction() + ",Optional? " + argument.isOptional());
                    for (String realisation : argument.getRealisations()) {
                        LOG.debug("Realisation: " + realisation);
                    }
                }
            }
        }

    } else {
        System.out.println("Usage : Lefff load filepath");
    }
    long endTime = (new Date()).getTime() - startTime;
    LOG.debug("Total runtime: " + ((double) endTime / 1000) + " seconds");
}