Example usage for java.lang String length

List of usage examples for java.lang String length

Introduction

In this page you can find the example usage for java.lang String length.

Prototype

public int length() 

Source Link

Document

Returns the length of this string.

Usage

From source file:edu.nyu.tandon.tool.BinnedRawHits.java

@SuppressWarnings("unchecked")
public static void main(final String[] arg) throws Exception {

    SimpleJSAP jsap = new SimpleJSAP(BinnedRawHits.class.getName(),
            "Loads indices relative to a collection, possibly loads the collection, and answers to queries.",
            new Parameter[] {
                    new FlaggedOption("collection", JSAP.STRING_PARSER, JSAP.NO_DEFAULT, JSAP.NOT_REQUIRED, 'c',
                            "collection", "The collection of documents indexed by the given indices."),
                    new FlaggedOption("objectCollection",
                            new ObjectParser(DocumentCollection.class, MG4JClassParser.PACKAGE),
                            JSAP.NO_DEFAULT, JSAP.NOT_REQUIRED, 'o', "object-collection",
                            "An object specification describing a document collection."),
                    new FlaggedOption("titleList", JSAP.STRING_PARSER, JSAP.NO_DEFAULT, JSAP.NOT_REQUIRED, 't',
                            "title-list",
                            "A serialized big list of titles (will override collection titles if specified)."),
                    new FlaggedOption("titleFile", JSAP.STRING_PARSER, JSAP.NO_DEFAULT, JSAP.NOT_REQUIRED, 'T',
                            "title-file",
                            "A file of newline-separated, UTF-8 titles (will override collection titles if specified)."),
                    new FlaggedOption("input", JSAP.STRING_PARSER, JSAP.NO_DEFAULT, JSAP.REQUIRED, 'I', "input",
                            "A file containing the input."),
                    new Switch("noSizes", 'n', "no-sizes",
                            "Disable loading document sizes (they are necessary for BM25 scoring)."),
                    new Switch("http", 'h', "http", "Starts an HTTP query server."),
                    new Switch("verbose", 'v', "verbose", "Print full exception stack traces."),
                    new FlaggedOption("itemClass", MG4JClassParser.getParser(), JSAP.NO_DEFAULT,
                            JSAP.NOT_REQUIRED, 'i', "item-class",
                            "The class that will handle item display in the HTTP server."),
                    new FlaggedOption("itemMimeType", JSAP.STRING_PARSER, "text/html", JSAP.NOT_REQUIRED, 'm',
                            "item-mime-type",
                            "A MIME type suggested to the class handling item display in the HTTP server."),
                    new FlaggedOption("port", JSAP.INTEGER_PARSER, "4242", JSAP.NOT_REQUIRED, 'p', "port",
                            "The port on localhost where the server will appear."),
                    new UnflaggedOption("basenameWeight", JSAP.STRING_PARSER, JSAP.NO_DEFAULT, JSAP.REQUIRED,
                            JSAP.GREEDY,
                            "The indices that the servlet will use. Indices are specified using their basename, optionally followed by a colon and a double representing the weight used to score results from that index. Indices without a specified weight are weighted 1."),

                    new Switch("noMplex", 'P', "noMplex", "Starts with multiplex disabled."),
                    new FlaggedOption("results", JSAP.INTEGER_PARSER, "1000", JSAP.NOT_REQUIRED, 'r', "results",
                            "The # of results to display"),
                    new FlaggedOption("mode", JSAP.STRING_PARSER, JSAP.NO_DEFAULT, JSAP.NOT_REQUIRED, 'M',
                            "time", "The results display mode"),
                    new FlaggedOption("divert", JSAP.STRING_PARSER, JSAP.NO_DEFAULT, JSAP.NOT_REQUIRED, 'd',
                            "divert", "output file"),
                    new FlaggedOption("dumpsize", JSAP.INTEGER_PARSER, JSAP.NO_DEFAULT, JSAP.NOT_REQUIRED, 'D',
                            "dumpsize", "number of queries before dumping")

            });/*from w ww  . j  ava  2 s  . co  m*/

    final JSAPResult jsapResult = jsap.parse(arg);
    if (jsap.messagePrinted())
        return;

    final DocumentCollection documentCollection = (DocumentCollection) (jsapResult.userSpecified("collection")
            ? AbstractDocumentSequence.load(jsapResult.getString("collection"))
            : jsapResult.userSpecified("objectCollection") ? jsapResult.getObject("objectCollection") : null);
    final BigList<? extends CharSequence> titleList = (BigList<? extends CharSequence>) (jsapResult
            .userSpecified("titleList")
                    ? BinIO.loadObject(jsapResult.getString("titleList"))
                    : jsapResult.userSpecified("titleFile")
                            ? new FileLinesBigList(jsapResult.getString("titleFile"), "UTF-8")
                            : null);
    final String[] basenameWeight = jsapResult.getStringArray("basenameWeight");
    final Object2ReferenceLinkedOpenHashMap<String, Index> indexMap = new Object2ReferenceLinkedOpenHashMap<String, Index>(
            Hash.DEFAULT_INITIAL_SIZE, .5f);
    final Reference2DoubleOpenHashMap<Index> index2Weight = new Reference2DoubleOpenHashMap<Index>();
    final boolean verbose = jsapResult.getBoolean("verbose");
    final boolean loadSizes = !jsapResult.getBoolean("noSizes");
    BinnedRawHits.loadIndicesFromSpec(basenameWeight, loadSizes, documentCollection, indexMap, index2Weight);

    final long numberOfDocuments = indexMap.values().iterator().next().numberOfDocuments;
    if (titleList != null && titleList.size64() != numberOfDocuments)
        throw new IllegalArgumentException("The number of titles (" + titleList.size64()
                + " and the number of documents (" + numberOfDocuments + ") do not match");

    final Object2ObjectOpenHashMap<String, TermProcessor> termProcessors = new Object2ObjectOpenHashMap<String, TermProcessor>(
            indexMap.size());
    for (String alias : indexMap.keySet())
        termProcessors.put(alias, indexMap.get(alias).termProcessor);

    final SimpleParser simpleParser = new SimpleParser(indexMap.keySet(), indexMap.firstKey(), termProcessors);

    final Reference2ReferenceMap<Index, Object> index2Parser = new Reference2ReferenceOpenHashMap<Index, Object>();
    /*
    // Fetch parsers for payload-based fields.
    for( Index index: indexMap.values() ) if ( index.hasPayloads ) {
     if ( index.payload.getClass() == DatePayload.class ) index2Parser.put( index, DateFormat.getDateInstance( DateFormat.SHORT, Locale.UK ) );
    }
    */

    final HitsQueryEngine queryEngine = new HitsQueryEngine(simpleParser, new DocumentIteratorBuilderVisitor(
            indexMap, index2Parser, indexMap.get(indexMap.firstKey()), MAX_STEMMING), indexMap);
    queryEngine.setWeights(index2Weight);
    queryEngine.score(new Scorer[] { new BM25Scorer(), new VignaScorer() }, new double[] { 1, 1 });

    queryEngine.multiplex = !jsapResult.userSpecified("moPlex") || jsapResult.getBoolean("noMplex");
    queryEngine.intervalSelector = null;
    queryEngine.equalize(1000);

    BinnedRawHits query = new BinnedRawHits(queryEngine);

    // start docHits with at least 10K results
    query.interpretCommand("$score BM25Scorer");
    query.interpretCommand("$mode time");

    if (jsapResult.userSpecified("divert"))
        query.interpretCommand("$divert " + jsapResult.getObject("divert"));

    query.displayMode = OutputType.DOCHHITS;
    query.maxOutput = jsapResult.getInt("results", 10000);

    String q;
    int n = 0;

    int dumpsize = jsapResult.userSpecified("dumpsize") ? jsapResult.getInt("dumpsize", 10000) : 10000;
    buildBins(query.maxOutput, (int) numberOfDocuments);
    String lastQ = "";

    try {
        final BufferedReader br = new BufferedReader(
                new InputStreamReader(new FileInputStream(jsapResult.getString("input"))));

        final ObjectArrayList<DocumentScoreInfo<ObjectArrayList<Byte>>> results = new ObjectArrayList<DocumentScoreInfo<ObjectArrayList<Byte>>>();

        for (;;) {
            q = br.readLine();
            if (q == null) {
                System.err.println();
                break; // CTRL-D
            }
            if (q.length() == 0)
                continue;
            if (q.charAt(0) == '$') {
                if (!query.interpretCommand(q))
                    break;
                continue;
            }

            queryCount++;
            long time = -System.nanoTime();
            if (q.compareTo(lastQ) != 0) {
                try {
                    n = queryEngine.process(q, 0, query.maxOutput, results);
                } catch (QueryParserException e) {
                    if (verbose)
                        e.getCause().printStackTrace(System.err);
                    else
                        System.err.println(e.getCause());
                    continue;
                } catch (Exception e) {
                    if (verbose)
                        e.printStackTrace(System.err);
                    else
                        System.err.println(e);
                    continue;
                }
                lastQ = q;
            }
            time += System.nanoTime();
            query.output(results, documentCollection, titleList, TextMarker.TEXT_BOLDFACE);

            // dump batch
            if (queryCount % dumpsize == 0) {
                dumpBatch(query, numberOfDocuments, false);
            }
            // check postHits
            if (query.postHits.size() > 100000000)
                dumpPosthits(query, numberOfDocuments, false);
        }

    } finally {
        dumpBatch(query, numberOfDocuments, true);
        dumpPosthits(query, numberOfDocuments, true);
        if (query.outputDH != System.out)
            query.outputDH.close();
    }
}

From source file:GetDBInfo.java

public static void main(String[] args) {
    Connection c = null; // The JDBC connection to the database server
    try {//from ww w .j a va  2 s  .co  m
        // Look for the properties file DB.props in the same directory as
        // this program. It will contain default values for the various
        // parameters needed to connect to a database
        Properties p = new Properties();
        try {
            p.load(GetDBInfo.class.getResourceAsStream("DB.props"));
        } catch (Exception e) {
        }

        // Get default values from the properties file
        String driver = p.getProperty("driver"); // Driver class name
        String server = p.getProperty("server", ""); // JDBC URL for server
        String user = p.getProperty("user", ""); // db user name
        String password = p.getProperty("password", ""); // db password

        // These variables don't have defaults
        String database = null; // The db name (appended to server URL)
        String table = null; // The optional name of a table in the db

        // Parse the command-line args to override the default values above
        for (int i = 0; i < args.length; i++) {
            if (args[i].equals("-d"))
                driver = args[++i]; //-d <driver>
            else if (args[i].equals("-s"))
                server = args[++i];//-s <server>
            else if (args[i].equals("-u"))
                user = args[++i]; //-u <user>
            else if (args[i].equals("-p"))
                password = args[++i];
            else if (database == null)
                database = args[i]; // <dbname>
            else if (table == null)
                table = args[i]; // <table>
            else
                throw new IllegalArgumentException("Unknown argument: " + args[i]);
        }

        // Make sure that at least a server or a database were specified.
        // If not, we have no idea what to connect to, and cannot continue.
        if ((server.length() == 0) && (database.length() == 0))
            throw new IllegalArgumentException("No database specified.");

        // Load the db driver, if any was specified.
        if (driver != null)
            Class.forName(driver);

        // Now attempt to open a connection to the specified database on
        // the specified server, using the specified name and password
        c = DriverManager.getConnection(server + database, user, password);

        // Get the DatabaseMetaData object for the connection. This is the
        // object that will return us all the data we're interested in here
        DatabaseMetaData md = c.getMetaData();

        // Display information about the server, the driver, etc.
        System.out.println("DBMS: " + md.getDatabaseProductName() + " " + md.getDatabaseProductVersion());
        System.out.println("JDBC Driver: " + md.getDriverName() + " " + md.getDriverVersion());
        System.out.println("Database: " + md.getURL());
        System.out.println("User: " + md.getUserName());

        // Now, if the user did not specify a table, then display a list of
        // all tables defined in the named database. Note that tables are
        // returned in a ResultSet, just like query results are.
        if (table == null) {
            System.out.println("Tables:");
            ResultSet r = md.getTables("", "", "%", null);
            while (r.next())
                System.out.println("\t" + r.getString(3));
        }

        // Otherwise, list all columns of the specified table.
        // Again, information about the columns is returned in a ResultSet
        else {
            System.out.println("Columns of " + table + ": ");
            ResultSet r = md.getColumns("", "", table, "%");
            while (r.next())
                System.out.println("\t" + r.getString(4) + " : " + r.getString(6));
        }
    }
    // Print an error message if anything goes wrong.
    catch (Exception e) {
        System.err.println(e);
        if (e instanceof SQLException)
            System.err.println(((SQLException) e).getSQLState());
        System.err.println("Usage: java GetDBInfo [-d <driver] " + "[-s <dbserver>]\n"
                + "\t[-u <username>] [-p <password>] <dbname>");
    }
    // Always remember to close the Connection object when we're done!
    finally {
        try {
            c.close();
        } catch (Exception e) {
        }
    }
}

From source file:edu.nyu.tandon.tool.RawDocHits_deprecated.java

@SuppressWarnings("unchecked")
public static void main(final String[] arg) throws Exception {

    SimpleJSAP jsap = new SimpleJSAP(RawDocHits_deprecated.class.getName(),
            "Loads indices relative to a collection, possibly loads the collection, and answers to queries.",
            new Parameter[] {
                    new FlaggedOption("collection", JSAP.STRING_PARSER, JSAP.NO_DEFAULT, JSAP.NOT_REQUIRED, 'c',
                            "collection", "The collection of documents indexed by the given indices."),
                    new FlaggedOption("objectCollection",
                            new ObjectParser(DocumentCollection.class, MG4JClassParser.PACKAGE),
                            JSAP.NO_DEFAULT, JSAP.NOT_REQUIRED, 'o', "object-collection",
                            "An object specification describing a document collection."),
                    new FlaggedOption("titleList", JSAP.STRING_PARSER, JSAP.NO_DEFAULT, JSAP.NOT_REQUIRED, 't',
                            "title-list",
                            "A serialized big list of titles (will override collection titles if specified)."),
                    new FlaggedOption("titleFile", JSAP.STRING_PARSER, JSAP.NO_DEFAULT, JSAP.NOT_REQUIRED, 'T',
                            "title-file",
                            "A file of newline-separated, UTF-8 titles (will override collection titles if specified)."),
                    new FlaggedOption("input", JSAP.STRING_PARSER, JSAP.NO_DEFAULT, JSAP.REQUIRED, 'I', "input",
                            "A file containing the input."),
                    new Switch("noSizes", 'n', "no-sizes",
                            "Disable loading document sizes (they are necessary for BM25 scoring)."),
                    new Switch("http", 'h', "http", "Starts an HTTP query server."),
                    new Switch("verbose", 'v', "verbose", "Print full exception stack traces."),
                    new FlaggedOption("itemClass", MG4JClassParser.getParser(), JSAP.NO_DEFAULT,
                            JSAP.NOT_REQUIRED, 'i', "item-class",
                            "The class that will handle item display in the HTTP server."),
                    new FlaggedOption("itemMimeType", JSAP.STRING_PARSER, "text/html", JSAP.NOT_REQUIRED, 'm',
                            "item-mime-type",
                            "A MIME type suggested to the class handling item display in the HTTP server."),
                    new FlaggedOption("port", JSAP.INTEGER_PARSER, "4242", JSAP.NOT_REQUIRED, 'p', "port",
                            "The port on localhost where the server will appear."),
                    new UnflaggedOption("basenameWeight", JSAP.STRING_PARSER, JSAP.NO_DEFAULT, JSAP.REQUIRED,
                            JSAP.GREEDY,
                            "The indices that the servlet will use. Indices are specified using their basename, optionally followed by a colon and a double representing the weight used to score results from that index. Indices without a specified weight are weighted 1."),

                    new Switch("noMplex", 'P', "noMplex", "Starts with multiplex disabled."),
                    new FlaggedOption("results", JSAP.INTEGER_PARSER, "1000", JSAP.NOT_REQUIRED, 'r', "results",
                            "The # of results to display"),
                    new FlaggedOption("mode", JSAP.STRING_PARSER, JSAP.NO_DEFAULT, JSAP.NOT_REQUIRED, 'M',
                            "time", "The results display mode"),
                    new FlaggedOption("divert", JSAP.STRING_PARSER, JSAP.NO_DEFAULT, JSAP.NOT_REQUIRED, 'd',
                            "divert", "output file"),
                    new FlaggedOption("dumpsize", JSAP.INTEGER_PARSER, JSAP.NO_DEFAULT, JSAP.NOT_REQUIRED, 'D',
                            "dumpsize", "number of queries before dumping")

            });//www  . j  a v  a 2  s  . c  o m

    final JSAPResult jsapResult = jsap.parse(arg);
    if (jsap.messagePrinted())
        return;

    final DocumentCollection documentCollection = (DocumentCollection) (jsapResult.userSpecified("collection")
            ? AbstractDocumentSequence.load(jsapResult.getString("collection"))
            : jsapResult.userSpecified("objectCollection") ? jsapResult.getObject("objectCollection") : null);
    final BigList<? extends CharSequence> titleList = (BigList<? extends CharSequence>) (jsapResult
            .userSpecified("titleList")
                    ? BinIO.loadObject(jsapResult.getString("titleList"))
                    : jsapResult.userSpecified("titleFile")
                            ? new FileLinesBigList(jsapResult.getString("titleFile"), "UTF-8")
                            : null);
    final String[] basenameWeight = jsapResult.getStringArray("basenameWeight");
    final Object2ReferenceLinkedOpenHashMap<String, Index> indexMap = new Object2ReferenceLinkedOpenHashMap<String, Index>(
            Hash.DEFAULT_INITIAL_SIZE, .5f);
    final Reference2DoubleOpenHashMap<Index> index2Weight = new Reference2DoubleOpenHashMap<Index>();
    final boolean verbose = jsapResult.getBoolean("verbose");
    final boolean loadSizes = !jsapResult.getBoolean("noSizes");
    RawDocHits_deprecated.loadIndicesFromSpec(basenameWeight, loadSizes, documentCollection, indexMap,
            index2Weight);

    final long numberOfDocuments = indexMap.values().iterator().next().numberOfDocuments;
    if (titleList != null && titleList.size64() != numberOfDocuments)
        throw new IllegalArgumentException("The number of titles (" + titleList.size64()
                + " and the number of documents (" + numberOfDocuments + ") do not match");

    final Object2ObjectOpenHashMap<String, TermProcessor> termProcessors = new Object2ObjectOpenHashMap<String, TermProcessor>(
            indexMap.size());
    for (String alias : indexMap.keySet())
        termProcessors.put(alias, indexMap.get(alias).termProcessor);

    final SimpleParser simpleParser = new SimpleParser(indexMap.keySet(), indexMap.firstKey(), termProcessors);

    final Reference2ReferenceMap<Index, Object> index2Parser = new Reference2ReferenceOpenHashMap<Index, Object>();
    /*
    // Fetch parsers for payload-based fields.
    for( Index index: indexMap.values() ) if ( index.hasPayloads ) {
     if ( index.payload.getClass() == DatePayload.class ) index2Parser.put( index, DateFormat.getDateInstance( DateFormat.SHORT, Locale.UK ) );
    }
    */

    final QueryEngine queryEngine = new QueryEngine(simpleParser, new DocumentIteratorBuilderVisitor(indexMap,
            index2Parser, indexMap.get(indexMap.firstKey()), MAX_STEMMING), indexMap);
    queryEngine.setWeights(index2Weight);
    queryEngine.score(new Scorer[] { new BM25Scorer(), new VignaScorer() }, new double[] { 1, 1 });

    // We set up an interval selector only if there is a collection for snippeting
    queryEngine.intervalSelector = documentCollection != null ? new IntervalSelector(4, 40)
            : new IntervalSelector();

    queryEngine.multiplex = !jsapResult.userSpecified("moPlex") || jsapResult.getBoolean("noMplex");

    queryEngine.equalize(1000);

    RawDocHits_deprecated query = new RawDocHits_deprecated(queryEngine);

    // start docHits with at least 10K results
    query.interpretCommand("$score BM25Scorer");
    query.interpretCommand("$mode time");
    query.interpretCommand("$select");

    if (jsapResult.userSpecified("divert"))
        query.interpretCommand("$divert " + jsapResult.getObject("divert"));

    query.displayMode = OutputType.DOCHHITS;
    query.maxOutput = jsapResult.getInt("results", 10000);

    String q;
    int n = 0;

    int dumpsize = jsapResult.userSpecified("dumpsize") ? jsapResult.getInt("dumpsize", 10000) : 1000;
    buildBins(query.maxOutput, (int) numberOfDocuments);
    String lastQ = "";

    try {
        final BufferedReader br = new BufferedReader(
                new InputStreamReader(new FileInputStream(jsapResult.getString("input"))));
        final ObjectArrayList<DocumentScoreInfo<Reference2ObjectMap<Index, SelectedInterval[]>>> results = new ObjectArrayList<DocumentScoreInfo<Reference2ObjectMap<Index, SelectedInterval[]>>>();

        for (;;) {
            q = br.readLine();
            if (q == null) {
                System.err.println();
                break; // CTRL-D
            }
            if (q.length() == 0)
                continue;
            if (q.charAt(0) == '$') {
                if (!query.interpretCommand(q))
                    break;
                continue;
            }

            queryCount++;
            long time = -System.nanoTime();
            if (q.compareTo(lastQ) != 0) {
                try {
                    n = queryEngine.process(q, 0, query.maxOutput, results);
                } catch (QueryParserException e) {
                    if (verbose)
                        e.getCause().printStackTrace(System.err);
                    else
                        System.err.println(e.getCause());
                    continue;
                } catch (Exception e) {
                    if (verbose)
                        e.printStackTrace(System.err);
                    else
                        System.err.println(e);
                    continue;
                }
                lastQ = q;
                time += System.nanoTime();
                query.output(results, documentCollection, titleList, TextMarker.TEXT_BOLDFACE);
            } else {
                // repeat last query results
                time += System.nanoTime();
                for (int j = 0; j < results.size(); j++)
                    docHits[lastResults[j]]++;
            }

            // dump batch
            if (queryCount % dumpsize == 0) {
                dumpBatch(query, numberOfDocuments);
            }
        }

    } finally {
        dumpBatch(query, numberOfDocuments);
        if (query.output != System.out)
            query.output.close();
    }
}

From source file:com.cloud.test.utils.SubmitCert.java

public static void main(String[] args) {
    // Parameters
    List<String> argsList = Arrays.asList(args);
    Iterator<String> iter = argsList.iterator();
    while (iter.hasNext()) {
        String arg = iter.next();

        if (arg.equals("-c")) {
            certFileName = iter.next();/*from w  ww . j a  v a  2s .c o m*/
        }

        if (arg.equals("-s")) {
            secretKey = iter.next();
        }

        if (arg.equals("-a")) {
            apiKey = iter.next();
        }

        if (arg.equals("-action")) {
            url = "Action=" + iter.next();
        }
    }

    Properties prop = new Properties();
    try {
        prop.load(new FileInputStream("conf/tool.properties"));
    } catch (IOException ex) {
        s_logger.error("Error reading from conf/tool.properties", ex);
        System.exit(2);
    }

    host = prop.getProperty("host");
    port = prop.getProperty("port");

    if (url.equals("Action=SetCertificate") && certFileName == null) {
        s_logger.error("Please set path to certificate (including file name) with -c option");
        System.exit(1);
    }

    if (secretKey == null) {
        s_logger.error("Please set secretkey  with -s option");
        System.exit(1);
    }

    if (apiKey == null) {
        s_logger.error("Please set apikey with -a option");
        System.exit(1);
    }

    if (host == null) {
        s_logger.error("Please set host in tool.properties file");
        System.exit(1);
    }

    if (port == null) {
        s_logger.error("Please set port in tool.properties file");
        System.exit(1);
    }

    TreeMap<String, String> param = new TreeMap<String, String>();

    String req = "GET\n" + host + ":" + prop.getProperty("port") + "\n/" + prop.getProperty("accesspoint")
            + "\n";
    String temp = "";

    if (certFileName != null) {
        cert = readCert(certFileName);
        param.put("cert", cert);
    }

    param.put("AWSAccessKeyId", apiKey);
    param.put("Expires", prop.getProperty("expires"));
    param.put("SignatureMethod", prop.getProperty("signaturemethod"));
    param.put("SignatureVersion", "2");
    param.put("Version", prop.getProperty("version"));

    StringTokenizer str1 = new StringTokenizer(url, "&");
    while (str1.hasMoreTokens()) {
        String newEl = str1.nextToken();
        StringTokenizer str2 = new StringTokenizer(newEl, "=");
        String name = str2.nextToken();
        String value = str2.nextToken();
        param.put(name, value);
    }

    //sort url hash map by key
    Set c = param.entrySet();
    Iterator it = c.iterator();
    while (it.hasNext()) {
        Map.Entry me = (Map.Entry) it.next();
        String key = (String) me.getKey();
        String value = (String) me.getValue();
        try {
            temp = temp + key + "=" + URLEncoder.encode(value, "UTF-8") + "&";
        } catch (Exception ex) {
            s_logger.error("Unable to set parameter " + value + " for the command " + param.get("command"), ex);
        }

    }
    temp = temp.substring(0, temp.length() - 1);
    String requestToSign = req + temp;
    String signature = UtilsForTest.signRequest(requestToSign, secretKey);
    String encodedSignature = "";
    try {
        encodedSignature = URLEncoder.encode(signature, "UTF-8");
    } catch (Exception ex) {
        ex.printStackTrace();
    }

    String url = "http://" + host + ":" + prop.getProperty("port") + "/" + prop.getProperty("accesspoint") + "?"
            + temp + "&Signature=" + encodedSignature;
    s_logger.info("Sending request with url:  " + url + "\n");
    sendRequest(url);
}

From source file:SIFT_Volume_Stitching.java

public static void main(String[] args) {
    // set the plugins.dir property to make the plugin appear in the Plugins menu
    Class<?> clazz = SIFT_Volume_Stitching.class;
    String url = clazz.getResource("/" + clazz.getName().replace('.', '/') + ".class").toString();
    String pluginsDir = url.substring("file:".length(),
            url.length() - clazz.getName().length() - ".class".length());
    System.setProperty("plugins.dir", pluginsDir);

    // start ImageJ
    new ImageJ();

    // open the Clown sample
    ImagePlus image1 = IJ.openImage("https://www.creatis.insa-lyon.fr/~frindel/BackStack115.tif");
    ImagePlus image2 = IJ.openImage("https://www.creatis.insa-lyon.fr/~frindel/FrontStack.tif");
    image1.show();/*from w  w w.  j  a v  a 2s  . com*/
    image2.show();

    // run the plugin
    IJ.runPlugIn(clazz.getName(), "");
}

From source file:imp.lstm.main.Driver.java

public static void main(String[] args)
        throws FileNotFoundException, IOException, ConfigurationException, InvalidParametersException {
    FileBasedConfigurationBuilder<PropertiesConfiguration> builder = new FileBasedConfigurationBuilder<>(
            PropertiesConfiguration.class).configure(
                    new Parameters().properties().setFileName(args[0]).setThrowExceptionOnMissing(true)
                            .setListDelimiterHandler(new DefaultListDelimiterHandler(';'))
                            .setIncludesAllowed(false));
    Configuration config = builder.getConfiguration();

    String inputSongPath = config.getString("input_song");
    String outputFolderPath = config.getString("output_folder");
    String autoEncoderParamsPath = config.getString("auto_encoder_params");
    String nameGeneratorParamsPath = config.getString("name_generator_params");
    String queueFolderPath = config.getString("queue_folder");
    String referenceQueuePath = config.getString("reference_queue", "nil");
    String inputCorpusFolder = config.getString("input_corpus_folder");
    boolean shouldWriteQueue = config.getBoolean("should_write_generated_queue");
    boolean frankensteinTest = config.getBoolean("queue_tests_frankenstein");
    boolean interpolateTest = config.getBoolean("queue_tests_interpolation");
    boolean iterateOverCorpus = config.getBoolean("iterate_over_corpus", false);
    boolean shouldGenerateSongTitle = config.getBoolean("generate_song_title");
    boolean shouldGenerateSong = config.getBoolean("generate_leadsheet");

    LogTimer.initStartTime(); //start our logging timer to keep track of our execution time
    LogTimer.log("Creating name generator...");

    //here is just silly code for generating name based on an LSTM lol $wag
    LSTM lstm = new LSTM();
    FullyConnectedLayer fullLayer = new FullyConnectedLayer(Operations.None);
    Loadable titleNetLoader = new Loadable() {
        @Override/*ww w .ja  v  a 2 s.c  om*/
        public boolean load(INDArray array, String path) {
            String car = pathCar(path);
            String cdr = pathCdr(path);
            switch (car) {
            case "full":
                return fullLayer.load(array, cdr);
            case "lstm":
                return lstm.load(array, cdr);
            default:
                return false;
            }
        }
    };

    LogTimer.log("Packing name generator from files...");
    (new NetworkConnectomeLoader()).load(nameGeneratorParamsPath, titleNetLoader);

    String characterString = " !\"'[],-.01245679:?ABCDEFGHIJKLMNOPQRSTUVWYZabcdefghijklmnopqrstuvwxyz";

    //Initialization
    LogTimer.log("Creating autoencoder...");
    int inputSize = 34;
    int outputSize = EncodingParameters.noteEncoder.getNoteLength();
    int featureVectorSize = 100;
    ProductCompressingAutoencoder autoencoder = new ProductCompressingAutoencoder(24, 48, 84 + 1, false); //create our network

    int numInterpolationDivisions = 5;

    //"pack" the network from weights and biases file directory
    LogTimer.log("Packing autoencoder from files");
    (new NetworkConnectomeLoader()).load(autoEncoderParamsPath, autoencoder);

    File[] songFiles;
    if (iterateOverCorpus) {
        songFiles = new File(inputCorpusFolder).listFiles();
    } else {
        songFiles = new File[] { new File(inputSongPath) };
    }
    for (File inputFile : songFiles) {
        (new NetworkConnectomeLoader()).refresh(autoEncoderParamsPath, autoencoder, "initialstate");
        String songTitle;
        if (shouldGenerateSong) {
            Random rand = new Random();
            AVector charOut = Vector.createLength(characterString.length());
            GroupedSoftMaxSampler sampler = new GroupedSoftMaxSampler(
                    new Group[] { new Group(0, characterString.length(), true) });
            songTitle = "";
            for (int i = 0; i < 50; i++) {
                charOut = fullLayer.forward(lstm.step(charOut));
                charOut = sampler.filter(charOut);
                int charIndex = 0;
                for (; charIndex < charOut.length(); charIndex++) {
                    if (charOut.get(charIndex) == 1.0) {
                        break;
                    }
                }
                songTitle += characterString.substring(charIndex, charIndex + 1);
            }
            songTitle = songTitle.trim();

            LogTimer.log("Generated song name: " + songTitle);
        } else {
            songTitle = "The Song We Never Name";
        }
        LogTimer.log("Reading file...");
        LeadSheetDataSequence inputSequence = LeadSheetIO.readLeadSheet(inputFile); //read our leadsheet to get a data vessel as retrieved in rbm-provisor
        LeadSheetDataSequence outputSequence = inputSequence.copy();

        outputSequence.clearMelody();
        if (interpolateTest) {
            LeadSheetDataSequence additionalOutput = outputSequence.copy();
            for (int i = 0; i < numInterpolationDivisions; i++) {
                outputSequence.concat(additionalOutput.copy());
            }
        }
        LeadSheetDataSequence decoderInputSequence = outputSequence.copy();

        LogTimer.startLog("Encoding data...");
        //TradingTimer.initStart(); //start our trading timer to keep track our our generation versus realtime play
        while (inputSequence.hasNext()) { //iterate through time steps in input data
            //TradingTimer.waitForNextTimedInput();
            autoencoder.encodeStep(inputSequence.retrieve()); //feed the resultant input vector into the network
            if (advanceDecoding) { //if we are using advance decoding (we start decoding as soon as we can)
                if (autoencoder.canDecode()) { //if queue has enough data to decode from
                    outputSequence.pushStep(null, null,
                            autoencoder.decodeStep(decoderInputSequence.retrieve())); //take sampled data for a timestep from autoencoder
                    //TradingTimer.logTimestep(); //log our time to TradingTimer so we can know how far ahead of realtime we are
                }
            }
        }
        LogTimer.endLog();

        if (shouldWriteQueue) {
            String queueFilePath = queueFolderPath + java.io.File.separator
                    + inputFile.getName().replace(".ls", ".q");
            FragmentedNeuralQueue currQueue = autoencoder.getQueue();
            currQueue.writeToFile(queueFilePath);
            LogTimer.log("Wrote queue " + inputFile.getName().replace(".ls", ".q") + " to file...");
        }
        if (shouldGenerateSong) {
            if (interpolateTest) {

                FragmentedNeuralQueue refQueue = new FragmentedNeuralQueue();
                refQueue.initFromFile(referenceQueuePath);

                FragmentedNeuralQueue currQueue = autoencoder.getQueue();
                //currQueue.writeToFile(queueFilePath);

                autoencoder.setQueue(currQueue.copy());
                while (autoencoder.hasDataStepsLeft()) { //we are done encoding all time steps, so just finish decoding!{
                    outputSequence.pushStep(null, null,
                            autoencoder.decodeStep(decoderInputSequence.retrieve())); //take sampled data for a timestep from autoencoder
                    //TradingTimer.logTimestep(); //log our time to TradingTimer so we can know how far ahead of realtime we are       
                }

                for (int i = 1; i <= numInterpolationDivisions; i++) {
                    System.out.println("Starting interpolation " + ((1.0 / numInterpolationDivisions) * (i)));
                    (new NetworkConnectomeLoader()).refresh(autoEncoderParamsPath, autoencoder, "initialstate");
                    FragmentedNeuralQueue currCopy = currQueue.copy();
                    currCopy.basicInterpolate(refQueue, (1.0 / numInterpolationDivisions) * (i));
                    autoencoder.setQueue(currCopy);
                    int timeStep = 0;
                    while (autoencoder.hasDataStepsLeft()) { //we are done encoding all time steps, so just finish decoding!{
                        System.out.println("interpolation " + i + " step " + ++timeStep);
                        outputSequence.pushStep(null, null,
                                autoencoder.decodeStep(decoderInputSequence.retrieve())); //take sampled data for a timestep from autoencoder
                        //TradingTimer.logTimestep(); //log our time to TradingTimer so we can know how far ahead of realtime we are       
                    }
                }

            }
            if (frankensteinTest) {
                LogTimer.startLog("Loading queues");
                File queueFolder = new File(queueFolderPath);
                int numComponents = config.getInt("frankenstein_num_components", 5);
                int numCombinations = config.getInt("frankenstein_num_combinations", 6);
                double interpolationMagnitude = config.getDouble("frankenstein_magnitude", 2.0);
                if (queueFolder.isDirectory()) {
                    File[] queueFiles = queueFolder.listFiles(new FilenameFilter() {
                        @Override
                        public boolean accept(File dir, String name) {
                            return name.contains(".q");
                        }
                    });

                    List<File> fileList = new ArrayList<>();
                    for (File file : queueFiles) {
                        fileList.add(file);
                    }
                    Collections.shuffle(fileList);
                    int numSelectedFiles = (numComponents > queueFiles.length) ? queueFiles.length
                            : numComponents;

                    for (int i = 0; i < queueFiles.length - numSelectedFiles; i++) {
                        fileList.remove(fileList.size() - 1);
                    }
                    List<FragmentedNeuralQueue> queuePopulation = new ArrayList<>(fileList.size());
                    songTitle += " - a mix of ";
                    for (File file : fileList) {
                        FragmentedNeuralQueue newQueue = new FragmentedNeuralQueue();
                        newQueue.initFromFile(file.getPath());
                        queuePopulation.add(newQueue);
                        songTitle += file.getName().replaceAll(".ls", "") + ", ";
                    }
                    LogTimer.endLog();

                    LeadSheetDataSequence additionalOutput = outputSequence.copy();
                    for (int i = 1; i < numCombinations; i++) {
                        outputSequence.concat(additionalOutput.copy());
                    }
                    decoderInputSequence = outputSequence.copy();

                    FragmentedNeuralQueue origQueue = autoencoder.getQueue();

                    for (int i = 0; i < numCombinations; i++) {

                        LogTimer.startLog("Performing queue interpolation...");
                        AVector combinationStrengths = Vector.createLength(queuePopulation.size());
                        Random vectorRand = new Random(i);
                        for (int j = 0; j < combinationStrengths.length(); j++) {
                            combinationStrengths.set(j, vectorRand.nextDouble());
                        }
                        combinationStrengths.divide(combinationStrengths.elementSum());
                        FragmentedNeuralQueue currQueue = origQueue.copy();
                        for (int k = 0; k < combinationStrengths.length(); k++) {
                            currQueue.basicInterpolate(queuePopulation.get(k),
                                    combinationStrengths.get(k) * interpolationMagnitude);
                        }
                        LogTimer.endLog();
                        autoencoder.setQueue(currQueue);
                        LogTimer.startLog("Refreshing autoencoder state...");
                        (new NetworkConnectomeLoader()).refresh(autoEncoderParamsPath, autoencoder,
                                "initialstate");
                        LogTimer.endLog();
                        LogTimer.startLog("Decoding segment...");
                        while (autoencoder.hasDataStepsLeft()) { //we are done encoding all time steps, so just finish decoding!{
                            outputSequence.pushStep(null, null,
                                    autoencoder.decodeStep(decoderInputSequence.retrieve())); //take sampled data for a timestep from autoencoder
                            //TradingTimer.logTimestep(); //log our time to TradingTimer so we can know how far ahead of realtime we are       
                        }
                        LogTimer.endLog();
                    }

                }
            }

            while (autoencoder.hasDataStepsLeft()) { //we are done encoding all time steps, so just finish decoding!{
                outputSequence.pushStep(null, null, autoencoder.decodeStep(decoderInputSequence.retrieve())); //take sampled data for a timestep from autoencoder
                //TradingTimer.logTimestep(); //log our time to TradingTimer so we can know how far ahead of realtime we are       
            }
            LogTimer.log("Writing file...");

            String outputFilename = outputFolderPath + java.io.File.separator
                    + inputFile.getName().replace(".ls", "_Output"); //we'll write our generated file with the same name plus "_Output"
            LeadSheetIO.writeLeadSheet(outputSequence, outputFilename, songTitle);
            System.out.println(outputFilename);
        } else {
            autoencoder.setQueue(new FragmentedNeuralQueue());
        }
    }
    LogTimer.log("Process finished"); //Done!

}

From source file:TwoFourTree.java

/**
 * The main method which the program executes from and which handles all
 * testing and input/output.//  w w  w .java 2  s.  c  om
 * 
 * @param args String[]
 */
public static void main(String[] args) throws IOException {
    Comparator myComp = new IntegerComparator();
    TwoFourTree myTree = new TwoFourTree(myComp);
    TwoFourTree firstTree = new TwoFourTree(myComp);
    TwoFourTree secondTree = new TwoFourTree(myComp);
    BufferedReader myReader = new BufferedReader(new InputStreamReader(System.in));
    String input;
    boolean go = true;

    while (go == true) {
        System.out.print(
                "Format like this:  -a 37 to add a 37 to the tree. -r 37 to remove a 37 from the tree: ");
        input = myReader.readLine();

        if (input.substring(0, 2).equals("-a")) {
            Integer myInt = new Integer(input.substring(3, input.length()));
            myTree.insertElement(myInt, myInt);
            myTree.printAllElements();
        } else if (input.substring(0, 2).equals("-r")) {
            Integer myInt = new Integer(input.substring(3, input.length()));
            myTree.removeElement(myInt);
            myTree.printAllElements();
        } else
            System.out.print("\nYou're bad!");

        System.out.print("\n");

    }

    /*System.out.println("Loading firstTree with values ...");
            
    Integer firstInt1 = new Integer(20);
    firstTree.insertElement(firstInt1, firstInt1);
    Integer firstInt2 = new Integer(50);
    firstTree.insertElement(firstInt2, firstInt2);
    Integer firstInt3 = new Integer(30);
    firstTree.insertElement(firstInt3, firstInt3);
    Integer firstInt4 = new Integer(40);
    firstTree.insertElement(firstInt4, firstInt4);
    Integer firstInt5 = new Integer(15);
    firstTree.insertElement(firstInt5, firstInt5);
    Integer firstInt6 = new Integer(35);
    firstTree.insertElement(firstInt6, firstInt6);
    Integer firstInt7 = new Integer(55);
    firstTree.insertElement(firstInt7, firstInt7);
    Integer firstInt8 = new Integer(38);
    firstTree.insertElement(firstInt8, firstInt8);
    Integer firstInt9 = new Integer(17);
    firstTree.insertElement(firstInt9, firstInt9);
    Integer firstInt10 = new Integer(37);
    firstTree.insertElement(firstInt10, firstInt10);
    Integer firstInt11 = new Integer(16);
    firstTree.insertElement(firstInt11, firstInt11);
    Integer firstInt12 = new Integer(36);
    firstTree.insertElement(firstInt12, firstInt12);
    Integer firstInt13 = new Integer(15);
    firstTree.insertElement(firstInt13, firstInt13);
    Integer firstInt14 = new Integer(36);
    firstTree.insertElement(firstInt14, firstInt14);
    Integer firstInt15 = new Integer(30);
    firstTree.insertElement(firstInt15, firstInt15);
    Integer firstInt16 = new Integer(34);
    firstTree.insertElement(firstInt16, firstInt16);
    Integer firstInt17 = new Integer(34);
    firstTree.insertElement(firstInt17, firstInt17);
    Integer firstInt18 = new Integer(30);
    firstTree.insertElement(firstInt18, firstInt18);
    Integer firstInt19 = new Integer(30);
    firstTree.insertElement(firstInt18, firstInt18);
            
    System.out.println("firstTree is fully loaded as follows:");
    firstTree.printAllElements();
    System.out.println("Checking parent/child connections ...");
    firstTree.checkTree(firstTree.root());
    System.out.println("All parent/child connections valid.");
            
    System.out.println("\nRemove 30");
    firstTree.removeElement(30);
    System.out.println("Removed successfully. Print new tree:");
    firstTree.printAllElements();
    System.out.println("");
    System.out.println("Remove 30");
    firstTree.removeElement(30);
    System.out.println("Removed successfully. Print new tree:");
    firstTree.printAllElements();
    System.out.println("");
    System.out.println("Remove 30");
    firstTree.removeElement(30);
    System.out.println("Removed successfully. Print new tree:");
    firstTree.printAllElements();
    System.out.println("");
    System.out.println("Remove 30");
    firstTree.removeElement(30);
    System.out.println("Removed successfully. Print new tree:");
    firstTree.printAllElements();
    System.out.println("");
    System.out.println("Remove 15");
    firstTree.removeElement(15);
    System.out.println("Removed successfully. Print new tree:");
    firstTree.printAllElements();
    System.out.println("");
    System.out.println("Remove 55");
    firstTree.removeElement(55);
    System.out.println("Removed successfully. Print new tree:");
    firstTree.printAllElements();
    System.out.println("");
    System.out.println("Remove 35");
    firstTree.removeElement(35);
    System.out.println("Removed successfully. Print new tree:");
    firstTree.printAllElements();
    System.out.println("");
    System.out.println("Remove 38");
    firstTree.removeElement(38);
    System.out.println("Removed successfully. Print new tree:");
    firstTree.printAllElements();
    System.out.println("");
    System.out.println("Remove 40");
    firstTree.removeElement(40);
    System.out.println("Removed successfully. Print new tree:");
    firstTree.printAllElements();
    System.out.println("");
    System.out.println("Remove 36");
    firstTree.removeElement(36);
    System.out.println("Removed successfully. Print new tree:");
    firstTree.printAllElements();
    System.out.println("");
    System.out.println("Remove 17");
    firstTree.removeElement(17);
    System.out.println("Removed successfully. Print new tree:");
    firstTree.printAllElements();
    System.out.println("");
    System.out.println("Remove 34");
    firstTree.removeElement(34);
    System.out.println("Removed successfully. Print new tree:");
    firstTree.printAllElements();
    System.out.println("");
    System.out.println("Remove 34");
    firstTree.removeElement(34);
    System.out.println("Removed successfully. Print new tree:");
    firstTree.printAllElements();
    System.out.println("");
    System.out.println("Remove 36");
    firstTree.removeElement(36);
    System.out.println("Removed successfully. Print new tree:");
    firstTree.printAllElements();
    System.out.println("");
    System.out.println("Remove 20");
    firstTree.removeElement(20);
    System.out.println("Removed successfully. Print new tree:");
    firstTree.printAllElements();
    System.out.println("");
    System.out.println("Remove 37");
    firstTree.removeElement(37);
    System.out.println("Removed successfully. Print new tree:");
    firstTree.printAllElements();
    System.out.println("");
    System.out.println("Remove 16");
    firstTree.removeElement(16);
    System.out.println("Removed successfully. Print new tree:");
    firstTree.printAllElements();
    System.out.println("");
    System.out.println("Remove 50");
    firstTree.removeElement(50);
    System.out.println("Removed successfully. Print new tree:");
    firstTree.printAllElements();
    System.out.println("");
    System.out.println("Remove 15");
    firstTree.removeElement(15);
    System.out.println("Removed successfully. Print new tree:");
    firstTree.printAllElements();
    System.out.println("done");
            
    System.out.println("\n\nLoading secondTree with values ...");
            
    Integer secondInt1 = new Integer(47);
    secondTree.insertElement(secondInt1, secondInt1);
    Integer secondInt2 = new Integer(83);
    secondTree.insertElement(secondInt2, secondInt2);
    Integer secondInt3 = new Integer(22);
    secondTree.insertElement(secondInt3, secondInt3);
    Integer secondInt4 = new Integer(16);
    secondTree.insertElement(secondInt4, secondInt4);
    Integer secondInt5 = new Integer(49);
    secondTree.insertElement(secondInt5, secondInt5);
    Integer secondInt6 = new Integer(100);
    secondTree.insertElement(secondInt6, secondInt6);
    Integer secondInt7 = new Integer(38);
    secondTree.insertElement(secondInt7, secondInt7);
    Integer secondInt8 = new Integer(3);
    secondTree.insertElement(secondInt8, secondInt8);
    Integer secondInt9 = new Integer(53);
    secondTree.insertElement(secondInt9, secondInt9);
    Integer secondInt10 = new Integer(66);
    secondTree.insertElement(secondInt10, secondInt10);
    Integer secondInt11 = new Integer(19);
    secondTree.insertElement(secondInt11, secondInt11);
    Integer secondInt12 = new Integer(23);
    secondTree.insertElement(secondInt12, secondInt12);
    Integer secondInt13 = new Integer(24);
    secondTree.insertElement(secondInt13, secondInt13);
    Integer secondInt14 = new Integer(88);
    secondTree.insertElement(secondInt14, secondInt14);
    Integer secondInt15 = new Integer(1);
    secondTree.insertElement(secondInt15, secondInt15);
    Integer secondInt16 = new Integer(97);
    secondTree.insertElement(secondInt16, secondInt16);
    Integer secondInt17 = new Integer(94);
    secondTree.insertElement(secondInt17, secondInt17);
    Integer secondInt18 = new Integer(35);
    secondTree.insertElement(secondInt18, secondInt18);
    Integer secondInt19 = new Integer(51);
    secondTree.insertElement(secondInt19, secondInt19);
            
    System.out.println("firstTree is fully loaded as follows:");
    secondTree.printAllElements();
    System.out.println("Checking parent/child connections ...");
    secondTree.checkTree(secondTree.root());
    System.out.println("All parent/child connections valid.");
            
    System.out.println("\nRemove 19");
    secondTree.removeElement(19);
    System.out.println("Removed successfully. Print new tree:");
    secondTree.printAllElements();
    System.out.println("");
    System.out.println("Remove 3");
    secondTree.removeElement(3);
    System.out.println("Removed successfully. Print new tree:");
    secondTree.printAllElements();
    System.out.println("");
    System.out.println("Remove 66");
    secondTree.removeElement(66);
    System.out.println("Removed successfully. Print new tree:");
    secondTree.printAllElements();
    System.out.println("");
    System.out.println("Remove 88");
    secondTree.removeElement(88);
    System.out.println("Removed successfully. Print new tree:");
    secondTree.printAllElements();
    System.out.println("");
    System.out.println("Remove 94");
    secondTree.removeElement(94);
    System.out.println("Removed successfully. Print new tree:");
    secondTree.printAllElements();
    System.out.println("");
    System.out.println("Remove 100");
    secondTree.removeElement(100);
    System.out.println("Removed successfully. Print new tree:");
    secondTree.printAllElements();
    System.out.println("");
    System.out.println("Remove 97");
    secondTree.removeElement(97);
    System.out.println("Removed successfully. Print new tree:");
    secondTree.printAllElements();
    System.out.println("");
    System.out.println("Remove 47");
    secondTree.removeElement(47);
    System.out.println("Removed successfully. Print new tree:");
    secondTree.printAllElements();
    System.out.println("");
    System.out.println("Remove 53");
    secondTree.removeElement(53);
    System.out.println("Removed successfully. Print new tree:");
    secondTree.printAllElements();
    System.out.println("");
    System.out.println("Remove 38");
    secondTree.removeElement(38);
    System.out.println("Removed successfully. Print new tree:");
    secondTree.printAllElements();
    System.out.println("");
    System.out.println("Remove 24");
    secondTree.removeElement(24);
    System.out.println("Removed successfully. Print new tree:");
    secondTree.printAllElements();
    System.out.println("");
    System.out.println("Remove 83");
    secondTree.removeElement(83);
    System.out.println("Removed successfully. Print new tree:");
    secondTree.printAllElements();
    System.out.println("");
    System.out.println("Remove 49");
    secondTree.removeElement(49);
    System.out.println("Removed successfully. Print new tree:");
    secondTree.printAllElements();
    System.out.println("");
    System.out.println("Remove 23");
    secondTree.removeElement(23);
    System.out.println("Removed successfully. Print new tree:");
    secondTree.printAllElements();
    System.out.println("");
    System.out.println("Remove 51");
    secondTree.removeElement(51);
    System.out.println("Removed successfully. Print new tree:");
    secondTree.printAllElements();
    System.out.println("");
    System.out.println("Remove 1");
    secondTree.removeElement(1);
    System.out.println("Removed successfully. Print new tree:");
    secondTree.printAllElements();
    System.out.println("");
    System.out.println("Remove 22");
    secondTree.removeElement(22);
    System.out.println("Removed successfully. Print new tree:");
    secondTree.printAllElements();
    System.out.println("");
    System.out.println("Remove 16");
    secondTree.removeElement(16);
    System.out.println("Removed successfully. Print new tree:");
    secondTree.printAllElements();
    System.out.println("");
    System.out.println("Remove 35");
    secondTree.removeElement(35);
    System.out.println("Removed successfully. Print new tree:");
    secondTree.printAllElements();
    System.out.println("done");*/
}

From source file:com.joliciel.frenchTreebank.FrenchTreebank.java

/**
 * @param args//from   w  ww.  ja v  a2  s .c o  m
 */
public static void main(String[] args) throws Exception {
    String command = args[0];

    String outFilePath = "";
    String outDirPath = "";
    String treebankPath = "";
    String ftbFileName = "";
    String rawTextDir = "";
    String queryPath = "";
    String sentenceNumber = null;
    boolean firstArg = true;
    for (String arg : args) {
        if (firstArg) {
            firstArg = false;
            continue;
        }
        int equalsPos = arg.indexOf('=');
        String argName = arg.substring(0, equalsPos);
        String argValue = arg.substring(equalsPos + 1);
        if (argName.equals("outfile"))
            outFilePath = argValue;
        else if (argName.equals("outdir"))
            outDirPath = argValue;
        else if (argName.equals("ftbFileName"))
            ftbFileName = argValue;
        else if (argName.equals("treebank"))
            treebankPath = argValue;
        else if (argName.equals("sentence"))
            sentenceNumber = argValue;
        else if (argName.equals("query"))
            queryPath = argValue;
        else if (argName.equals("rawTextDir"))
            rawTextDir = argValue;
        else
            throw new RuntimeException("Unknown argument: " + argName);
    }

    TalismaneServiceLocator talismaneServiceLocator = TalismaneServiceLocator.getInstance();

    TreebankServiceLocator locator = TreebankServiceLocator.getInstance(talismaneServiceLocator);

    if (treebankPath.length() == 0)
        locator.setDataSourcePropertiesFile("jdbc-live.properties");

    if (command.equals("search")) {
        final SearchService searchService = locator.getSearchService();
        final XmlPatternSearch search = searchService.newXmlPatternSearch();
        search.setXmlPatternFile(queryPath);
        List<SearchResult> searchResults = search.perform();

        FileWriter fileWriter = new FileWriter(outFilePath);
        for (SearchResult searchResult : searchResults) {
            String lineToWrite = "";
            Sentence sentence = searchResult.getSentence();
            Phrase phrase = searchResult.getPhrase();
            lineToWrite += sentence.getFile().getFileName() + "|";
            lineToWrite += sentence.getSentenceNumber() + "|";
            List<PhraseUnit> phraseUnits = searchResult.getPhraseUnits();
            LOG.debug("Phrase: " + phrase.getId());
            for (PhraseUnit phraseUnit : phraseUnits)
                lineToWrite += phraseUnit.getLemma().getText() + "|";
            lineToWrite += phrase.getText();
            fileWriter.write(lineToWrite + "\n");
        }
        fileWriter.flush();
        fileWriter.close();
    } else if (command.equals("load")) {
        final TreebankService treebankService = locator.getTreebankService();
        final TreebankSAXParser parser = new TreebankSAXParser();
        parser.setTreebankService(treebankService);
        parser.parseDocument(treebankPath);
    } else if (command.equals("loadAll")) {
        final TreebankService treebankService = locator.getTreebankService();

        File dir = new File(treebankPath);

        String firstFile = null;
        if (args.length > 2)
            firstFile = args[2];
        String[] files = dir.list();
        if (files == null) {
            throw new RuntimeException("Not a directory or no children: " + treebankPath);
        } else {
            boolean startProcessing = true;
            if (firstFile != null)
                startProcessing = false;
            for (int i = 0; i < files.length; i++) {
                if (!startProcessing && files[i].equals(firstFile))
                    startProcessing = true;
                if (startProcessing) {
                    String filePath = args[1] + "/" + files[i];
                    LOG.debug(filePath);
                    final TreebankSAXParser parser = new TreebankSAXParser();
                    parser.setTreebankService(treebankService);
                    parser.parseDocument(filePath);
                }
            }
        }
    } else if (command.equals("loadRawText")) {
        final TreebankService treebankService = locator.getTreebankService();
        final TreebankRawTextAssigner assigner = new TreebankRawTextAssigner();
        assigner.setTreebankService(treebankService);
        assigner.setRawTextDirectory(rawTextDir);
        assigner.loadRawText();
    } else if (command.equals("tokenize")) {
        Writer csvFileWriter = null;
        if (outFilePath != null && outFilePath.length() > 0) {
            if (outFilePath.lastIndexOf("/") > 0) {
                String outputDirPath = outFilePath.substring(0, outFilePath.lastIndexOf("/"));
                File outputDir = new File(outputDirPath);
                outputDir.mkdirs();
            }

            File csvFile = new File(outFilePath);
            csvFile.delete();
            csvFile.createNewFile();
            csvFileWriter = new BufferedWriter(
                    new OutputStreamWriter(new FileOutputStream(csvFile, false), "UTF8"));
        }
        try {

            final TreebankService treebankService = locator.getTreebankService();
            TreebankExportService treebankExportService = locator.getTreebankExportServiceLocator()
                    .getTreebankExportService();
            TreebankUploadService treebankUploadService = locator.getTreebankUploadServiceLocator()
                    .getTreebankUploadService();
            TreebankReader treebankReader = null;

            if (treebankPath.length() > 0) {
                File treebankFile = new File(treebankPath);
                if (sentenceNumber != null)
                    treebankReader = treebankUploadService.getXmlReader(treebankFile, sentenceNumber);
                else
                    treebankReader = treebankUploadService.getXmlReader(treebankFile);

            } else {
                treebankReader = treebankService.getDatabaseReader(TreebankSubSet.ALL, 0);
            }

            TokeniserAnnotatedCorpusReader reader = treebankExportService
                    .getTokeniserAnnotatedCorpusReader(treebankReader, csvFileWriter);

            while (reader.hasNextTokenSequence()) {
                TokenSequence tokenSequence = reader.nextTokenSequence();
                List<Integer> tokenSplits = tokenSequence.getTokenSplits();
                String sentence = tokenSequence.getText();
                LOG.debug(sentence);
                int currentPos = 0;
                StringBuilder sb = new StringBuilder();
                for (int split : tokenSplits) {
                    if (split == 0)
                        continue;
                    String token = sentence.substring(currentPos, split);
                    sb.append('|');
                    sb.append(token);
                    currentPos = split;
                }
                LOG.debug(sb.toString());
            }
        } finally {
            csvFileWriter.flush();
            csvFileWriter.close();
        }
    } else if (command.equals("export")) {
        if (outDirPath.length() == 0)
            throw new RuntimeException("Parameter required: outdir");
        File outDir = new File(outDirPath);
        outDir.mkdirs();

        final TreebankService treebankService = locator.getTreebankService();
        FrenchTreebankXmlWriter xmlWriter = new FrenchTreebankXmlWriter();
        xmlWriter.setTreebankService(treebankService);

        if (ftbFileName.length() == 0) {
            xmlWriter.write(outDir);
        } else {
            TreebankFile ftbFile = treebankService.loadTreebankFile(ftbFileName);
            String fileName = ftbFileName.substring(ftbFileName.lastIndexOf('/') + 1);
            File xmlFile = new File(outDir, fileName);
            xmlFile.delete();
            xmlFile.createNewFile();

            Writer xmlFileWriter = new BufferedWriter(
                    new OutputStreamWriter(new FileOutputStream(xmlFile, false), "UTF8"));
            xmlWriter.write(xmlFileWriter, ftbFile);
            xmlFileWriter.flush();
            xmlFileWriter.close();
        }
    } else {
        throw new RuntimeException("Unknown command: " + command);
    }
    LOG.debug("========== END ============");
}

From source file:edu.nyu.vida.data_polygamy.standard_techniques.CorrelationTechniques.java

/**
 * @param args//from  www.  ja  v  a2s .  c  o m
 * @throws ParseException 
 */
@SuppressWarnings({ "deprecation" })
public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {

    Options options = new Options();

    Option forceOption = new Option("f", "force", false,
            "force the computation of the relationship " + "even if files already exist");
    forceOption.setRequired(false);
    options.addOption(forceOption);

    Option g1Option = new Option("g1", "first-group", true, "set first group of datasets");
    g1Option.setRequired(true);
    g1Option.setArgName("FIRST GROUP");
    g1Option.setArgs(Option.UNLIMITED_VALUES);
    options.addOption(g1Option);

    Option g2Option = new Option("g2", "second-group", true, "set second group of datasets");
    g2Option.setRequired(false);
    g2Option.setArgName("SECOND GROUP");
    g2Option.setArgs(Option.UNLIMITED_VALUES);
    options.addOption(g2Option);

    Option machineOption = new Option("m", "machine", true, "machine identifier");
    machineOption.setRequired(true);
    machineOption.setArgName("MACHINE");
    machineOption.setArgs(1);
    options.addOption(machineOption);

    Option nodesOption = new Option("n", "nodes", true, "number of nodes");
    nodesOption.setRequired(true);
    nodesOption.setArgName("NODES");
    nodesOption.setArgs(1);
    options.addOption(nodesOption);

    Option s3Option = new Option("s3", "s3", false, "data on Amazon S3");
    s3Option.setRequired(false);
    options.addOption(s3Option);

    Option awsAccessKeyIdOption = new Option("aws_id", "aws-id", true,
            "aws access key id; " + "this is required if the execution is on aws");
    awsAccessKeyIdOption.setRequired(false);
    awsAccessKeyIdOption.setArgName("AWS-ACCESS-KEY-ID");
    awsAccessKeyIdOption.setArgs(1);
    options.addOption(awsAccessKeyIdOption);

    Option awsSecretAccessKeyOption = new Option("aws_key", "aws-id", true,
            "aws secrect access key; " + "this is required if the execution is on aws");
    awsSecretAccessKeyOption.setRequired(false);
    awsSecretAccessKeyOption.setArgName("AWS-SECRET-ACCESS-KEY");
    awsSecretAccessKeyOption.setArgs(1);
    options.addOption(awsSecretAccessKeyOption);

    Option bucketOption = new Option("b", "s3-bucket", true,
            "bucket on s3; " + "this is required if the execution is on aws");
    bucketOption.setRequired(false);
    bucketOption.setArgName("S3-BUCKET");
    bucketOption.setArgs(1);
    options.addOption(bucketOption);

    Option helpOption = new Option("h", "help", false, "display this message");
    helpOption.setRequired(false);
    options.addOption(helpOption);

    HelpFormatter formatter = new HelpFormatter();
    CommandLineParser parser = new PosixParser();
    CommandLine cmd = null;

    try {
        cmd = parser.parse(options, args);
    } catch (ParseException e) {
        formatter.printHelp(
                "hadoop jar data-polygamy.jar "
                        + "edu.nyu.vida.data_polygamy.standard_techniques.CorrelationTechniques",
                options, true);
        System.exit(0);
    }

    if (cmd.hasOption("h")) {
        formatter.printHelp(
                "hadoop jar data-polygamy.jar "
                        + "edu.nyu.vida.data_polygamy.standard_techniques.CorrelationTechniques",
                options, true);
        System.exit(0);
    }

    boolean s3 = cmd.hasOption("s3");
    String s3bucket = "";
    String awsAccessKeyId = "";
    String awsSecretAccessKey = "";

    if (s3) {
        if ((!cmd.hasOption("aws_id")) || (!cmd.hasOption("aws_key")) || (!cmd.hasOption("b"))) {
            System.out.println(
                    "Arguments 'aws_id', 'aws_key', and 'b'" + " are mandatory if execution is on AWS.");
            formatter.printHelp(
                    "hadoop jar data-polygamy.jar "
                            + "edu.nyu.vida.data_polygamy.standard_techniques.CorrelationTechniques",
                    options, true);
            System.exit(0);
        }
        s3bucket = cmd.getOptionValue("b");
        awsAccessKeyId = cmd.getOptionValue("aws_id");
        awsSecretAccessKey = cmd.getOptionValue("aws_key");
    }

    boolean snappyCompression = false;
    boolean bzip2Compression = false;
    String machine = cmd.getOptionValue("m");
    int nbNodes = Integer.parseInt(cmd.getOptionValue("n"));

    Configuration s3conf = new Configuration();
    if (s3) {
        s3conf.set("fs.s3.awsAccessKeyId", awsAccessKeyId);
        s3conf.set("fs.s3.awsSecretAccessKey", awsSecretAccessKey);
        s3conf.set("bucket", s3bucket);
    }

    Path path = null;
    FileSystem fs = FileSystem.get(new Configuration());

    ArrayList<String> shortDataset = new ArrayList<String>();
    ArrayList<String> firstGroup = new ArrayList<String>();
    ArrayList<String> secondGroup = new ArrayList<String>();
    HashMap<String, String> datasetAgg = new HashMap<String, String>();

    boolean removeExistingFiles = cmd.hasOption("f");

    String[] firstGroupCmd = cmd.getOptionValues("g1");
    String[] secondGroupCmd = cmd.hasOption("g2") ? cmd.getOptionValues("g2") : new String[0];
    addDatasets(firstGroupCmd, firstGroup, shortDataset, datasetAgg, path, fs, s3conf, s3, s3bucket);
    addDatasets(secondGroupCmd, secondGroup, shortDataset, datasetAgg, path, fs, s3conf, s3, s3bucket);

    if (shortDataset.size() == 0) {
        System.out.println("No datasets to process.");
        System.exit(0);
    }

    if (firstGroup.isEmpty()) {
        System.out.println("First group of datasets (G1) is empty. " + "Doing G1 = G2.");
        firstGroup.addAll(secondGroup);
    }

    if (secondGroup.isEmpty()) {
        System.out.println("Second group of datasets (G2) is empty. " + "Doing G2 = G1.");
        secondGroup.addAll(firstGroup);
    }

    // getting dataset ids

    String datasetNames = "";
    String datasetIds = "";
    HashMap<String, String> datasetId = new HashMap<String, String>();
    Iterator<String> it = shortDataset.iterator();
    while (it.hasNext()) {
        datasetId.put(it.next(), null);
    }

    if (s3) {
        path = new Path(s3bucket + FrameworkUtils.datasetsIndexDir);
        fs = FileSystem.get(path.toUri(), s3conf);
    } else {
        path = new Path(fs.getHomeDirectory() + "/" + FrameworkUtils.datasetsIndexDir);
    }
    BufferedReader br = new BufferedReader(new InputStreamReader(fs.open(path)));
    String line = br.readLine();
    while (line != null) {
        String[] dt = line.split("\t");
        if (datasetId.containsKey(dt[0])) {
            datasetId.put(dt[0], dt[1]);
            datasetNames += dt[0] + ",";
            datasetIds += dt[1] + ",";
        }
        line = br.readLine();
    }
    br.close();
    if (s3)
        fs.close();

    datasetNames = datasetNames.substring(0, datasetNames.length() - 1);
    datasetIds = datasetIds.substring(0, datasetIds.length() - 1);
    it = shortDataset.iterator();
    while (it.hasNext()) {
        String dataset = it.next();
        if (datasetId.get(dataset) == null) {
            System.out.println("No dataset id for " + dataset);
            System.exit(0);
        }
    }

    String firstGroupStr = "";
    String secondGroupStr = "";
    for (String dataset : firstGroup) {
        firstGroupStr += datasetId.get(dataset) + ",";
    }
    for (String dataset : secondGroup) {
        secondGroupStr += datasetId.get(dataset) + ",";
    }
    firstGroupStr = firstGroupStr.substring(0, firstGroupStr.length() - 1);
    secondGroupStr = secondGroupStr.substring(0, secondGroupStr.length() - 1);

    FrameworkUtils.createDir(s3bucket + FrameworkUtils.correlationTechniquesDir, s3conf, s3);

    String dataAttributesInputDirs = "";
    String noRelationship = "";

    HashSet<String> dirs = new HashSet<String>();

    String dataset1;
    String dataset2;
    String datasetId1;
    String datasetId2;
    for (int i = 0; i < firstGroup.size(); i++) {
        for (int j = 0; j < secondGroup.size(); j++) {

            if (Integer.parseInt(datasetId.get(firstGroup.get(i))) < Integer
                    .parseInt(datasetId.get(secondGroup.get(j)))) {
                dataset1 = firstGroup.get(i);
                dataset2 = secondGroup.get(j);
            } else {
                dataset1 = secondGroup.get(j);
                dataset2 = firstGroup.get(i);
            }

            datasetId1 = datasetId.get(dataset1);
            datasetId2 = datasetId.get(dataset2);

            if (dataset1.equals(dataset2))
                continue;
            String correlationOutputFileName = s3bucket + FrameworkUtils.correlationTechniquesDir + "/"
                    + dataset1 + "-" + dataset2 + "/";

            if (removeExistingFiles) {
                FrameworkUtils.removeFile(correlationOutputFileName, s3conf, s3);
            }
            if (!FrameworkUtils.fileExists(correlationOutputFileName, s3conf, s3)) {
                dirs.add(s3bucket + FrameworkUtils.aggregatesDir + "/" + dataset1);
                dirs.add(s3bucket + FrameworkUtils.aggregatesDir + "/" + dataset2);
            } else {
                noRelationship += datasetId1 + "-" + datasetId2 + ",";
            }
        }
    }

    if (dirs.isEmpty()) {
        System.out.println("All the relationships were already computed.");
        System.out.println("Use -f in the beginning of the command line to force the computation.");
        System.exit(0);
    }

    for (String dir : dirs) {
        dataAttributesInputDirs += dir + ",";
    }

    Configuration conf = new Configuration();
    Machine machineConf = new Machine(machine, nbNodes);

    String jobName = "correlation";
    String correlationOutputDir = s3bucket + FrameworkUtils.correlationTechniquesDir + "/tmp/";

    FrameworkUtils.removeFile(correlationOutputDir, s3conf, s3);

    for (int i = 0; i < shortDataset.size(); i++) {
        conf.set("dataset-" + datasetId.get(shortDataset.get(i)) + "-agg", datasetAgg.get(shortDataset.get(i)));
    }
    for (int i = 0; i < shortDataset.size(); i++) {
        conf.set("dataset-" + datasetId.get(shortDataset.get(i)) + "-agg-size",
                Integer.toString(datasetAgg.get(shortDataset.get(i)).split(",").length));
    }
    conf.set("dataset-keys", datasetIds);
    conf.set("dataset-names", datasetNames);
    conf.set("first-group", firstGroupStr);
    conf.set("second-group", secondGroupStr);
    conf.set("main-dataset-id", datasetId.get(shortDataset.get(0)));
    if (noRelationship.length() > 0) {
        conf.set("no-relationship", noRelationship.substring(0, noRelationship.length() - 1));
    }

    conf.set("mapreduce.tasktracker.map.tasks.maximum", String.valueOf(machineConf.getMaximumTasks()));
    conf.set("mapreduce.tasktracker.reduce.tasks.maximum", String.valueOf(machineConf.getMaximumTasks()));
    conf.set("mapreduce.jobtracker.maxtasks.perjob", "-1");
    conf.set("mapreduce.reduce.shuffle.parallelcopies", "20");
    conf.set("mapreduce.input.fileinputformat.split.minsize", "0");
    conf.set("mapreduce.task.io.sort.mb", "200");
    conf.set("mapreduce.task.io.sort.factor", "100");
    conf.set("mapreduce.task.timeout", "2400000");

    if (s3) {
        machineConf.setMachineConfiguration(conf);
        conf.set("fs.s3.awsAccessKeyId", awsAccessKeyId);
        conf.set("fs.s3.awsSecretAccessKey", awsSecretAccessKey);
        conf.set("bucket", s3bucket);
    }

    if (snappyCompression) {
        conf.set("mapreduce.map.output.compress", "true");
        conf.set("mapreduce.map.output.compress.codec", "org.apache.hadoop.io.compress.SnappyCodec");
        //conf.set("mapreduce.output.fileoutputformat.compress.codec", "org.apache.hadoop.io.compress.SnappyCodec");
    }
    if (bzip2Compression) {
        conf.set("mapreduce.map.output.compress", "true");
        conf.set("mapreduce.map.output.compress.codec", "org.apache.hadoop.io.compress.BZip2Codec");
        //conf.set("mapreduce.output.fileoutputformat.compress.codec", "org.apache.hadoop.io.compress.BZip2Codec");
    }

    Job job = new Job(conf);
    job.setJobName(jobName);

    job.setMapOutputKeyClass(PairAttributeWritable.class);
    job.setMapOutputValueClass(SpatioTemporalValueWritable.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(Text.class);

    job.setMapperClass(CorrelationTechniquesMapper.class);
    job.setReducerClass(CorrelationTechniquesReducer.class);
    job.setNumReduceTasks(machineConf.getNumberReduces());

    job.setInputFormatClass(SequenceFileInputFormat.class);
    LazyOutputFormat.setOutputFormatClass(job, TextOutputFormat.class);

    FileInputFormat.setInputDirRecursive(job, true);
    FileInputFormat.setInputPaths(job,
            dataAttributesInputDirs.substring(0, dataAttributesInputDirs.length() - 1));
    FileOutputFormat.setOutputPath(job, new Path(correlationOutputDir));

    job.setJarByClass(CorrelationTechniques.class);

    long start = System.currentTimeMillis();
    job.submit();
    job.waitForCompletion(true);
    System.out.println(jobName + "\t" + (System.currentTimeMillis() - start));

    // moving files to right place
    for (int i = 0; i < firstGroup.size(); i++) {
        for (int j = 0; j < secondGroup.size(); j++) {

            if (Integer.parseInt(datasetId.get(firstGroup.get(i))) < Integer
                    .parseInt(datasetId.get(secondGroup.get(j)))) {
                dataset1 = firstGroup.get(i);
                dataset2 = secondGroup.get(j);
            } else {
                dataset1 = secondGroup.get(j);
                dataset2 = firstGroup.get(i);
            }

            if (dataset1.equals(dataset2))
                continue;

            String from = s3bucket + FrameworkUtils.correlationTechniquesDir + "/tmp/" + dataset1 + "-"
                    + dataset2 + "/";
            String to = s3bucket + FrameworkUtils.correlationTechniquesDir + "/" + dataset1 + "-" + dataset2
                    + "/";
            FrameworkUtils.renameFile(from, to, s3conf, s3);
        }
    }
}

From source file:com.aerospike.load.AerospikeLoad.java

public static void main(String[] args) throws IOException {

    Thread statPrinter = new Thread(new PrintStat(counters));
    try {//from   ww  w. j  a  va2s  . c  o m
        log.info("Aerospike loader started");
        Options options = new Options();
        options.addOption("h", "host", true, "Server hostname (default: localhost)");
        options.addOption("p", "port", true, "Server port (default: 3000)");
        options.addOption("n", "namespace", true, "Namespace (default: test)");
        options.addOption("s", "set", true, "Set name. (default: null)");
        options.addOption("c", "config", true, "Column definition file name");
        options.addOption("wt", "write-threads", true,
                "Number of writer threads (default: Number of cores * 5)");
        options.addOption("rt", "read-threads", true,
                "Number of reader threads (default: Number of cores * 1)");
        options.addOption("l", "rw-throttle", true, "Throttling of reader to writer(default: 10k) ");
        options.addOption("tt", "transaction-timeout", true,
                "write transaction timeout in miliseconds(default: No timeout)");
        options.addOption("et", "expiration-time", true,
                "Expiration time of records in seconds (default: never expire)");
        options.addOption("T", "timezone", true,
                "Timezone of source where data dump is taken (default: local timezone)");
        options.addOption("ec", "abort-error-count", true, "Error count to abort (default: 0)");
        options.addOption("wa", "write-action", true, "Write action if key already exists (default: update)");
        options.addOption("v", "verbose", false, "Logging all");
        options.addOption("u", "usage", false, "Print usage.");

        CommandLineParser parser = new PosixParser();
        CommandLine cl = parser.parse(options, args, false);

        if (args.length == 0 || cl.hasOption("u")) {
            logUsage(options);
            return;
        }

        if (cl.hasOption("l")) {
            rwThrottle = Integer.parseInt(cl.getOptionValue("l"));
        } else {
            rwThrottle = Constants.READLOAD;
        }
        // Get all command line options
        params = Utils.parseParameters(cl);

        //Get client instance
        AerospikeClient client = new AerospikeClient(params.host, params.port);
        if (!client.isConnected()) {
            log.error("Client is not able to connect:" + params.host + ":" + params.port);
            return;
        }

        if (params.verbose) {
            log.setLevel(Level.DEBUG);
        }

        // Get available processors to calculate default number of threads
        int cpus = Runtime.getRuntime().availableProcessors();
        nWriterThreads = cpus * scaleFactor;
        nReaderThreads = cpus;

        // Get writer thread count
        if (cl.hasOption("wt")) {
            nWriterThreads = Integer.parseInt(cl.getOptionValue("wt"));
            nWriterThreads = (nWriterThreads > 0
                    ? (nWriterThreads > Constants.MAX_THREADS ? Constants.MAX_THREADS : nWriterThreads)
                    : 1);
            log.debug("Using writer Threads: " + nWriterThreads);
        }
        writerPool = Executors.newFixedThreadPool(nWriterThreads);

        // Get reader thread count
        if (cl.hasOption("rt")) {
            nReaderThreads = Integer.parseInt(cl.getOptionValue("rt"));
            nReaderThreads = (nReaderThreads > 0
                    ? (nReaderThreads > Constants.MAX_THREADS ? Constants.MAX_THREADS : nReaderThreads)
                    : 1);
            log.debug("Using reader Threads: " + nReaderThreads);
        }

        String columnDefinitionFileName = cl.getOptionValue("c", null);

        log.debug("Column definition files/directory: " + columnDefinitionFileName);
        if (columnDefinitionFileName == null) {
            log.error("Column definition files/directory not specified. use -c <file name>");
            return;
        }

        File columnDefinitionFile = new File(columnDefinitionFileName);
        if (!columnDefinitionFile.exists()) {
            log.error("Column definition files/directory does not exist: "
                    + Utils.getFileName(columnDefinitionFileName));
            return;
        }

        // Get data file list
        String[] files = cl.getArgs();
        if (files.length == 0) {
            log.error("No data file Specified: add <file/dir name> to end of the command ");
            return;
        }
        List<String> allFileNames = new ArrayList<String>();
        allFileNames = Utils.getFileNames(files);
        if (allFileNames.size() == 0) {
            log.error("Given datafiles/directory does not exist");
            return;
        }
        for (int i = 0; i < allFileNames.size(); i++) {
            log.debug("File names:" + Utils.getFileName(allFileNames.get(i)));
            File file = new File(allFileNames.get(i));
            counters.write.recordTotal = counters.write.recordTotal + file.length();
        }

        //remove column definition file from list
        allFileNames.remove(columnDefinitionFileName);

        log.info("Number of data files:" + allFileNames.size());

        /**
         * Process column definition file to get meta data and bin mapping.
         */
        metadataColumnDefs = new ArrayList<ColumnDefinition>();
        binColumnDefs = new ArrayList<ColumnDefinition>();
        metadataConfigs = new HashMap<String, String>();

        if (Parser.processJSONColumnDefinitions(columnDefinitionFile, metadataConfigs, metadataColumnDefs,
                binColumnDefs, params)) {
            log.info("Config file processed.");
        } else {
            throw new Exception("Config file parsing Error");
        }

        // Add metadata of config to parameters
        String metadata;
        if ((metadata = metadataConfigs.get(Constants.INPUT_TYPE)) != null) {
            params.fileType = metadata;
            if (params.fileType.equals(Constants.CSV_FILE)) {

                // Version check
                metadata = metadataConfigs.get(Constants.VERSION);
                String[] vNumber = metadata.split("\\.");
                int v1 = Integer.parseInt(vNumber[0]);
                int v2 = Integer.parseInt(vNumber[1]);
                if ((v1 <= Constants.MajorV) && (v2 <= Constants.MinorV)) {
                    log.debug("Config version used:" + metadata);
                } else
                    throw new Exception("\"" + Constants.VERSION + ":" + metadata + "\" is not Supported");

                // Set delimiter 
                if ((metadata = metadataConfigs.get(Constants.DELIMITER)) != null && metadata.length() == 1) {
                    params.delimiter = metadata.charAt(0);
                } else {
                    log.warn("\"" + Constants.DELIMITER + ":" + metadata
                            + "\" is not properly specified in config file. Default is ','");
                }

                if ((metadata = metadataConfigs.get(Constants.IGNORE_FIRST_LINE)) != null) {
                    params.ignoreFirstLine = metadata.equals("true");
                } else {
                    log.warn("\"" + Constants.IGNORE_FIRST_LINE + ":" + metadata
                            + "\" is not properly specified in config file. Default is false");
                }

                if ((metadata = metadataConfigs.get(Constants.COLUMNS)) != null) {
                    counters.write.colTotal = Integer.parseInt(metadata);
                } else {
                    throw new Exception("\"" + Constants.COLUMNS + ":" + metadata
                            + "\" is not properly specified in config file");
                }
            } else {
                throw new Exception("\"" + params.fileType + "\" is not supported in config file");
            }
        } else {
            throw new Exception("\"" + Constants.INPUT_TYPE + "\" is not specified in config file");
        }

        // add config input to column definitions
        if (params.fileType.equals(Constants.CSV_FILE)) {
            List<String> binName = null;
            if (params.ignoreFirstLine) {
                String line;
                BufferedReader br = new BufferedReader(
                        new InputStreamReader(new FileInputStream(allFileNames.get(0)), "UTF8"));
                if ((line = br.readLine()) != null) {
                    binName = Parser.getCSVRawColumns(line, params.delimiter);
                    br.close();
                    if (binName.size() != counters.write.colTotal) {
                        throw new Exception("Number of column in config file and datafile are mismatch."
                                + " Datafile: " + Utils.getFileName(allFileNames.get(0)) + " Configfile: "
                                + Utils.getFileName(columnDefinitionFileName));
                    }
                }
            }

            //update columndefs for metadata
            for (int i = 0; i < metadataColumnDefs.size(); i++) {
                if (metadataColumnDefs.get(i).staticValue) {

                } else {
                    if (metadataColumnDefs.get(i).binValuePos < 0) {
                        if (metadataColumnDefs.get(i).columnName == null) {
                            if (metadataColumnDefs.get(i).jsonPath == null) {
                                log.error("dynamic metadata having improper info"
                                        + metadataColumnDefs.toString()); //TODO
                            } else {
                                //TODO check for json_path   
                            }
                        } else {
                            if (params.ignoreFirstLine) {
                                if (binName.indexOf(metadataColumnDefs.get(i).binValueHeader) != -1) {
                                    metadataColumnDefs.get(i).binValuePos = binName
                                            .indexOf(metadataColumnDefs.get(i).binValueHeader);
                                } else {
                                    throw new Exception("binName missing in data file:"
                                            + metadataColumnDefs.get(i).binValueHeader);
                                }
                            }
                        }
                    } else {
                        if (params.ignoreFirstLine)
                            metadataColumnDefs.get(i).binValueHeader = binName
                                    .get(metadataColumnDefs.get(i).binValuePos);
                    }
                }
                if ((!metadataColumnDefs.get(i).staticValue) && (metadataColumnDefs.get(i).binValuePos < 0)) {
                    throw new Exception("Information for bin mapping is missing in config file:"
                            + metadataColumnDefs.get(i));
                }

                if (metadataColumnDefs.get(i).srcType == null) {
                    throw new Exception(
                            "Source data type is not properly mentioned:" + metadataColumnDefs.get(i));
                }

                if (metadataColumnDefs.get(i).binNameHeader == Constants.SET
                        && !metadataColumnDefs.get(i).srcType.equals(SrcColumnType.STRING)) {
                    throw new Exception("Set name should be string type:" + metadataColumnDefs.get(i));
                }

                if (metadataColumnDefs.get(i).binNameHeader.equalsIgnoreCase(Constants.SET)
                        && params.set != null) {
                    throw new Exception(
                            "Set name is given both in config file and commandline. Provide only once.");
                }
            }

            //update columndefs for bins
            for (int i = 0; i < binColumnDefs.size(); i++) {
                if (binColumnDefs.get(i).staticName) {

                } else {
                    if (binColumnDefs.get(i).binNamePos < 0) {
                        if (binColumnDefs.get(i).columnName == null) {
                            if (binColumnDefs.get(i).jsonPath == null) {
                                log.error("dynamic bin having improper info"); //TODO
                            } else {
                                //TODO check for json_path
                            }
                        } else {
                            if (params.ignoreFirstLine) {
                                if (binName.indexOf(binColumnDefs.get(i).binNameHeader) != -1) {
                                    binColumnDefs.get(i).binNamePos = binName
                                            .indexOf(binColumnDefs.get(i).binNameHeader);
                                } else {
                                    throw new Exception("binName missing in data file:"
                                            + binColumnDefs.get(i).binNameHeader);
                                }
                            }
                        }
                    } else {
                        if (params.ignoreFirstLine)
                            binColumnDefs.get(i).binNameHeader = binName.get(binColumnDefs.get(i).binNamePos);
                    }
                }

                if (binColumnDefs.get(i).staticValue) {

                } else {
                    if (binColumnDefs.get(i).binValuePos < 0) {
                        if (binColumnDefs.get(i).columnName == null) {
                            if (binColumnDefs.get(i).jsonPath == null) {
                                log.error("dynamic bin having improper info"); //TODO
                            } else {
                                //TODO check for json_path
                            }
                        } else {
                            if (params.ignoreFirstLine) {
                                if (binName.contains(binColumnDefs.get(i).binValueHeader)) {
                                    binColumnDefs.get(i).binValuePos = binName
                                            .indexOf(binColumnDefs.get(i).binValueHeader);
                                } else if (!binColumnDefs.get(i).binValueHeader.toLowerCase()
                                        .equals(Constants.SYSTEM_TIME)) {
                                    throw new Exception("Wrong column name mentioned in config file:"
                                            + binColumnDefs.get(i).binValueHeader);
                                }
                            }
                        }
                    } else {
                        if (params.ignoreFirstLine)
                            binColumnDefs.get(i).binValueHeader = binName.get(binColumnDefs.get(i).binValuePos);
                    }

                    //check for missing entries in config file
                    if (binColumnDefs.get(i).binValuePos < 0 && binColumnDefs.get(i).binValueHeader == null) {
                        throw new Exception("Information missing(Value header or bin mapping) in config file:"
                                + binColumnDefs.get(i));
                    }

                    //check for proper data type in config file.
                    if (binColumnDefs.get(i).srcType == null) {
                        throw new Exception(
                                "Source data type is not properly mentioned:" + binColumnDefs.get(i));
                    }

                    //check for valid destination type
                    if ((binColumnDefs.get(i).srcType.equals(SrcColumnType.TIMESTAMP)
                            || binColumnDefs.get(i).srcType.equals(SrcColumnType.BLOB))
                            && binColumnDefs.get(i).dstType == null) {
                        throw new Exception("Destination type is not mentioned: " + binColumnDefs.get(i));
                    }

                    //check for encoding
                    if (binColumnDefs.get(i).dstType != null && binColumnDefs.get(i).encoding == null) {
                        throw new Exception(
                                "Encoding is not given for src-dst type conversion:" + binColumnDefs.get(i));
                    }

                    //check for valid encoding
                    if (binColumnDefs.get(i).srcType.equals(SrcColumnType.BLOB)
                            && !binColumnDefs.get(i).encoding.equals(Constants.HEX_ENCODING)) {
                        throw new Exception("Wrong encoding for blob data:" + binColumnDefs.get(i));
                    }
                }

                //Check static bin name mapped to dynamic bin value
                if ((binColumnDefs.get(i).binNamePos == binColumnDefs.get(i).binValuePos)
                        && (binColumnDefs.get(i).binNamePos != -1)) {
                    throw new Exception("Static bin name mapped to dynamic bin value:" + binColumnDefs.get(i));
                }

                //check for missing entries in config file
                if (binColumnDefs.get(i).binNameHeader == null
                        && binColumnDefs.get(i).binNameHeader.length() > Constants.BIN_NAME_LENGTH) {
                    throw new Exception("Information missing binName or large binName in config file:"
                            + binColumnDefs.get(i));
                }
            }
        }

        log.info(params.toString());
        log.debug("MetadataConfig:" + metadataColumnDefs);
        log.debug("BinColumnDefs:" + binColumnDefs);

        // Start PrintStat thread
        statPrinter.start();

        // Reader pool size
        ExecutorService readerPool = Executors.newFixedThreadPool(
                nReaderThreads > allFileNames.size() ? allFileNames.size() : nReaderThreads);
        log.info("Reader pool size : " + nReaderThreads);

        // Submit all tasks to writer threadpool.
        for (String aFile : allFileNames) {
            log.debug("Submitting task for: " + aFile);
            readerPool.submit(new AerospikeLoad(aFile, client, params));
        }

        // Wait for reader pool to complete
        readerPool.shutdown();
        log.info("Shutdown down reader thread pool");

        while (!readerPool.isTerminated())
            ;
        //readerPool.awaitTermination(20, TimeUnit.MINUTES);
        log.info("Reader thread pool terminated");

        // Wait for writer pool to complete after getting all tasks from reader pool
        writerPool.shutdown();
        log.info("Shutdown down writer thread pool");

        while (!writerPool.isTerminated())
            ;
        log.info("Writer thread pool terminated");

        // Print final statistic of aerospike-loader.
        log.info("Final Statistics of importer: (Succesfull Writes = " + counters.write.writeCount.get() + ", "
                + "Errors="
                + (counters.write.writeErrors.get() + counters.write.readErrors.get()
                        + counters.write.processingErrors.get())
                + "(" + (counters.write.writeErrors.get()) + "-Write," + counters.write.readErrors.get()
                + "-Read," + counters.write.processingErrors.get() + "-Processing)");
    } catch (Exception e) {
        log.error(e);
        if (log.isDebugEnabled()) {
            e.printStackTrace();
        }
    } finally {
        // Stop statistic printer thread.
        statPrinter.interrupt();
        log.info("Aerospike loader completed");
    }
}