Example usage for org.apache.commons.lang3 StringUtils join

List of usage examples for org.apache.commons.lang3 StringUtils join

Introduction

In this page you can find the example usage for org.apache.commons.lang3 StringUtils join.

Prototype

public static String join(final Iterable<?> iterable, final String separator) 

Source Link

Document

Joins the elements of the provided Iterable into a single String containing the provided elements.

No delimiter is added before or after the list.

Usage

From source file:com.ibm.watson.developer_cloud.alchemy_data_news.v1.GetNewsDocumentExample.java

public static void main(String[] args) {
    AlchemyDataNews service = new AlchemyDataNews();
    service.setApiKey("<api_key>");

    Map<String, Object> params = new HashMap<String, Object>();

    String[] fields = new String[] { "enriched.url.title", "enriched.url.url", "enriched.url.author",
            "enriched.url.publicationDate", "enriched.url.enrichedTitle.entities",
            "enriched.url.enrichedTitle.docSentiment" };
    params.put(AlchemyDataNews.RETURN, StringUtils.join(fields, ","));
    params.put(AlchemyDataNews.START, "1440720000");
    params.put(AlchemyDataNews.END, "1441407600");
    params.put(AlchemyDataNews.COUNT, 7);

    //Query on adjacent nested fields: 
    params.put("q.enriched.url.enrichedTitle.entities.entity", "|text=IBM,type=company|");
    params.put("q.enriched.url.enrichedTitle.docSentiment.type", "positive");
    params.put("q.enriched.url.enrichedTitle.taxonomy.taxonomy_.label", "technology and computing");

    DocumentsResult result = service.getNewsDocuments(params).execute();

    System.out.println(result);/*from w ww.  j  av  a2  s .  c  om*/
}

From source file:com.ibm.ecod.watson.AlchemyDataNewsTest.java

public static void main(String[] args) {
    AlchemyDataNews service = new AlchemyDataNews();
    service.setApiKey("5fc91e98eacfa5ebf83440e8c6a61d0f60fa380b");

    Map<String, Object> params = new HashMap<String, Object>();

    String[] fields = new String[] { "enriched.url.title", "enriched.url.url", "enriched.url.author",
            "enriched.url.publicationDate", "enriched.url.enrichedTitle.entities",
            "enriched.url.enrichedTitle.docSentiment" };
    params.put(AlchemyDataNews.RETURN, StringUtils.join(fields, ","));
    params.put(AlchemyDataNews.START, "now-60d");
    params.put(AlchemyDataNews.END, "now");
    //params.put(AlchemyDataNews.COUNT, 7);
    //Query on adjacent nested fields: 
    params.put("q.enriched.url.enrichedTitle.entities.entity", "|text=IBM,type=company|");
    params.put("q.enriched.url.enrichedTitle.docSentiment.type", "positive");
    params.put("q.enriched.url.enrichedTitle.taxonomy.taxonomy_.label", "technology and computing");

    DocumentsResult result = service.getNewsDocuments(params);

    System.out.println(result);//from w w w .  j  av a 2 s . c  om
}

From source file:com.thinkbiganalytics.spark.cleanup.Cleanup.java

public static void main(String[] args) {
    log.info("Running Cleanup with these command line args: " + StringUtils.join(args, ","));

    if (args.length < 2) {
        System.out.println("Expected command line args: <hive-schema-name> <hive-table-name>");
        System.exit(1);/* w  w w . j  a  v  a2 s .  c o m*/
    }

    try {
        ApplicationContext ctx = new AnnotationConfigApplicationContext("com.thinkbiganalytics.spark");
        Cleanup app = ctx.getBean(Cleanup.class);
        app.setArguments(args[0], args[1]);
        app.doCleanup();
    } catch (Exception e) {
        log.error("Failed to perform cleanup: {}", e.getMessage());
        System.exit(1);
    }

    log.info("Cleanup has finished.");
}

From source file:com.thinkbiganalytics.spark.dataquality.checker.DataQualityChecker.java

public static void main(String[] args) {

    log.info("Running DataQualityChecker with these command line args: " + StringUtils.join(args, ","));

    if (args.length < 2) {
        System.out.println("Expected command line args: <hive-schema-name> <hive-table-name>");
        System.exit(1);/*from  w  w w  .  jav  a 2 s  .co  m*/
    }

    try {
        ApplicationContext ctx = new AnnotationConfigApplicationContext("com.thinkbiganalytics.spark");
        DataQualityChecker app = ctx.getBean(DataQualityChecker.class);
        app.setArguments(args[0], args[1]);
        app.doDataQualityChecks();
    } catch (Exception e) {
        log.error("Failed to perform data quality checks: {}", e.getMessage());
        System.exit(1);
    }

    log.info("DataQualityChecker has finished.");
}

From source file:com.act.lcms.db.io.LoadTSVIntoDB.java

public static void main(String[] args) throws Exception {
    Options opts = new Options();
    opts.addOption(Option.builder("t").argName("type")
            .desc("The type of TSV data to read, options are: " + StringUtils.join(TSV_TYPE.values(), ", "))
            .hasArg().required().longOpt("table-type").build());
    opts.addOption(Option.builder("i").argName("path").desc("The TSV file to read").hasArg().required()
            .longOpt("input-file").build());

    // DB connection options.
    opts.addOption(Option.builder().argName("database url")
            .desc("The url to use when connecting to the LCMS db").hasArg().longOpt("db-url").build());
    opts.addOption(Option.builder("u").argName("database user").desc("The LCMS DB user").hasArg()
            .longOpt("db-user").build());
    opts.addOption(Option.builder("p").argName("database password").desc("The LCMS DB password").hasArg()
            .longOpt("db-pass").build());
    opts.addOption(Option.builder("H").argName("database host")
            .desc(String.format("The LCMS DB host (default = %s)", DB.DEFAULT_HOST)).hasArg().longOpt("db-host")
            .build());//from  w  w w  .j a v  a 2 s.c  o  m
    opts.addOption(Option.builder("P").argName("database port")
            .desc(String.format("The LCMS DB port (default = %d)", DB.DEFAULT_PORT)).hasArg().longOpt("db-port")
            .build());
    opts.addOption(Option.builder("N").argName("database name")
            .desc(String.format("The LCMS DB name (default = %s)", DB.DEFAULT_DB_NAME)).hasArg()
            .longOpt("db-name").build());

    // Everybody needs a little help from their friends.
    opts.addOption(
            Option.builder("h").argName("help").desc("Prints this help message").longOpt("help").build());

    CommandLine cl = null;
    try {
        CommandLineParser parser = new DefaultParser();
        cl = parser.parse(opts, args);
    } catch (ParseException e) {
        System.err.format("Argument parsing failed: %s\n", e.getMessage());
        HelpFormatter fmt = new HelpFormatter();
        fmt.printHelp(LoadTSVIntoDB.class.getCanonicalName(), opts, true);
        System.exit(1);
    }

    if (cl.hasOption("help")) {
        new HelpFormatter().printHelp(LoadTSVIntoDB.class.getCanonicalName(), opts, true);
        return;
    }

    File inputFile = new File(cl.getOptionValue("input-file"));
    if (!inputFile.exists()) {
        System.err.format("Unable to find input file at %s\n", cl.getOptionValue("input-file"));
        new HelpFormatter().printHelp(LoadTSVIntoDB.class.getCanonicalName(), opts, true);
        System.exit(1);
    }

    TSV_TYPE contentType = null;
    try {
        contentType = TSV_TYPE.valueOf(cl.getOptionValue("table-type"));
    } catch (IllegalArgumentException e) {
        System.err.format("Unrecognized TSV type '%s'\n", cl.getOptionValue("table-type"));
        new HelpFormatter().printHelp(LoadTSVIntoDB.class.getCanonicalName(), opts, true);
        System.exit(1);
    }

    DB db;

    if (cl.hasOption("db-url")) {
        db = new DB().connectToDB(cl.getOptionValue("db-url"));
    } else {
        Integer port = null;
        if (cl.getOptionValue("P") != null) {
            port = Integer.parseInt(cl.getOptionValue("P"));
        }
        db = new DB().connectToDB(cl.getOptionValue("H"), port, cl.getOptionValue("N"), cl.getOptionValue("u"),
                cl.getOptionValue("p"));
    }

    try {
        db.getConn().setAutoCommit(false);

        TSVParser parser = new TSVParser();
        parser.parse(inputFile);

        List<Pair<Integer, DB.OPERATION_PERFORMED>> results = null;
        switch (contentType) {
        case CURATED_CHEMICAL:
            results = CuratedChemical.insertOrUpdateCuratedChemicalsFromTSV(db, parser);
            break;
        case CONSTRUCT:
            results = ConstructEntry.insertOrUpdateCompositionMapEntriesFromTSV(db, parser);
            break;
        case CHEMICAL_OF_INTEREST:
            results = ChemicalOfInterest.insertOrUpdateChemicalOfInterestsFromTSV(db, parser);
            break;
        default:
            throw new RuntimeException(String.format("Unsupported TSV type: %s", contentType));
        }
        if (results != null) {
            for (Pair<Integer, DB.OPERATION_PERFORMED> r : results) {
                System.out.format("%d: %s\n", r.getLeft(), r.getRight());
            }
        }
        // If we didn't encounter an exception, commit the transaction.
        db.getConn().commit();
    } catch (Exception e) {
        System.err.format("Caught exception when trying to load plate composition, rolling back. %s\n",
                e.getMessage());
        db.getConn().rollback();
        throw (e);
    } finally {
        db.getConn().close();
    }
}

From source file:com.thinkbiganalytics.util.PartitionSpec.java

public static void main(String[] args) {
    PartitionKey key1 = new PartitionKey("country", "string", "country");
    PartitionKey key2 = new PartitionKey("year", "int", "year(hired)");
    PartitionKey key3 = new PartitionKey("month", "int", "month(hired)");

    PartitionSpec spec = new PartitionSpec(key1, key2, key3);
    String[] selectFields = new String[] { "id", "name", "company", "zip", "phone", "email", "hired" };
    String selectSQL = StringUtils.join(selectFields, ",");

    String[] values = new String[] { "USA", "2015", "4" };

    String targetSqlWhereClause = spec.toTargetSQLWhere(values);
    String sourceSqlWhereClause = spec.toSourceSQLWhere(values);
    String partitionClause = spec.toPartitionSpec(values);

    /*//from   w  w w  .ja  va2 s . co  m
     insert overwrite table employee partition (year=2015,month=10,country='USA')
     select id, name, company, zip, phone, email, hired from employee_feed
     where year(hired)=2015 and month(hired)=10 and country='USA'
     union distinct
     select id, name, company, zip, phone, email, hired from employee
     where year=2015 and month=10 and country='USA'
     */

    String targetTable = "employee";
    String sourceTable = "employee_feed";
    String sqlWhere = "employee_feed";

    StringBuffer sb = new StringBuffer();
    sb.append("insert overwrite table ").append(targetTable).append(" ").append(partitionClause)
            .append(" select ").append(selectSQL).append(" from ").append(sourceTable).append(" ")
            .append(" where ").append(sourceSqlWhereClause).append(" union distinct ").append(" select ")
            .append(selectSQL).append(" from ").append(targetTable).append(" ").append(" where ")
            .append(targetSqlWhereClause);

    log.info(sb.toString());
}

From source file:com.act.lcms.db.io.LoadPlateCompositionIntoDB.java

public static void main(String[] args) throws Exception {
    Options opts = new Options();
    opts.addOption(Option.builder("t").argName("type")
            .desc("The type of plate composition in this file, valid options are: "
                    + StringUtils.join(Arrays.asList(Plate.CONTENT_TYPE.values()), ", "))
            .hasArg().longOpt("plate-type").required().build());
    opts.addOption(Option.builder("i").argName("path").desc("The plate composition file to read").hasArg()
            .longOpt("input-file").required().build());

    // DB connection options.
    opts.addOption(Option.builder().argName("database url")
            .desc("The url to use when connecting to the LCMS db").hasArg().longOpt("db-url").build());
    opts.addOption(Option.builder("u").argName("database user").desc("The LCMS DB user").hasArg()
            .longOpt("db-user").build());
    opts.addOption(Option.builder("p").argName("database password").desc("The LCMS DB password").hasArg()
            .longOpt("db-pass").build());
    opts.addOption(Option.builder("H").argName("database host")
            .desc(String.format("The LCMS DB host (default = %s)", DB.DEFAULT_HOST)).hasArg().longOpt("db-host")
            .build());/*from  www  .j  a  v  a  2  s . c  om*/
    opts.addOption(Option.builder("P").argName("database port")
            .desc(String.format("The LCMS DB port (default = %d)", DB.DEFAULT_PORT)).hasArg().longOpt("db-port")
            .build());
    opts.addOption(Option.builder("N").argName("database name")
            .desc(String.format("The LCMS DB name (default = %s)", DB.DEFAULT_DB_NAME)).hasArg()
            .longOpt("db-name").build());

    // Everybody needs a little help from their friends.
    opts.addOption(
            Option.builder("h").argName("help").desc("Prints this help message").longOpt("help").build());

    CommandLine cl = null;
    try {
        CommandLineParser parser = new DefaultParser();
        cl = parser.parse(opts, args);
    } catch (ParseException e) {
        System.err.format("Argument parsing failed: %s\n", e.getMessage());
        HelpFormatter fmt = new HelpFormatter();
        fmt.printHelp(LoadPlateCompositionIntoDB.class.getCanonicalName(), opts, true);
        System.exit(1);
    }

    if (cl.hasOption("help")) {
        new HelpFormatter().printHelp(LoadPlateCompositionIntoDB.class.getCanonicalName(), opts, true);
        return;
    }

    File inputFile = new File(cl.getOptionValue("input-file"));
    if (!inputFile.exists()) {
        System.err.format("Unable to find input file at %s\n", cl.getOptionValue("input-file"));
        new HelpFormatter().printHelp(LoadPlateCompositionIntoDB.class.getCanonicalName(), opts, true);
        System.exit(1);
    }

    PlateCompositionParser parser = new PlateCompositionParser();
    parser.processFile(inputFile);

    Plate.CONTENT_TYPE contentType = null;
    try {
        contentType = Plate.CONTENT_TYPE.valueOf(cl.getOptionValue("plate-type"));
    } catch (IllegalArgumentException e) {
        System.err.format("Unrecognized plate type '%s'\n", cl.getOptionValue("plate-type"));
        new HelpFormatter().printHelp(LoadPlateCompositionIntoDB.class.getCanonicalName(), opts, true);
        System.exit(1);
    }

    DB db;
    if (cl.hasOption("db-url")) {
        db = new DB().connectToDB(cl.getOptionValue("db-url"));
    } else {
        Integer port = null;
        if (cl.getOptionValue("P") != null) {
            port = Integer.parseInt(cl.getOptionValue("P"));
        }
        db = new DB().connectToDB(cl.getOptionValue("H"), port, cl.getOptionValue("N"), cl.getOptionValue("u"),
                cl.getOptionValue("p"));
    }

    try {
        db.getConn().setAutoCommit(false);

        Plate p = Plate.getOrInsertFromPlateComposition(db, parser, contentType);

        switch (contentType) {
        case LCMS:
            List<LCMSWell> LCMSWells = LCMSWell.getInstance().insertFromPlateComposition(db, parser, p);
            for (LCMSWell LCMSWell : LCMSWells) {
                System.out.format("%d: %d x %d  %s  %s\n", LCMSWell.getId(), LCMSWell.getPlateColumn(),
                        LCMSWell.getPlateRow(), LCMSWell.getMsid(), LCMSWell.getComposition());
            }
            break;
        case STANDARD:
            List<StandardWell> standardWells = StandardWell.getInstance().insertFromPlateComposition(db, parser,
                    p);
            for (StandardWell standardWell : standardWells) {
                System.out.format("%d: %d x %d  %s\n", standardWell.getId(), standardWell.getPlateColumn(),
                        standardWell.getPlateRow(), standardWell.getChemical());
            }
            break;
        case DELIVERED_STRAIN:
            List<DeliveredStrainWell> deliveredStrainWells = DeliveredStrainWell.getInstance()
                    .insertFromPlateComposition(db, parser, p);
            for (DeliveredStrainWell deliveredStrainWell : deliveredStrainWells) {
                System.out.format("%d: %d x %d (%s) %s %s \n", deliveredStrainWell.getId(),
                        deliveredStrainWell.getPlateColumn(), deliveredStrainWell.getPlateRow(),
                        deliveredStrainWell.getWell(), deliveredStrainWell.getMsid(),
                        deliveredStrainWell.getComposition());
            }
            break;
        case INDUCTION:
            List<InductionWell> inductionWells = InductionWell.getInstance().insertFromPlateComposition(db,
                    parser, p);
            for (InductionWell inductionWell : inductionWells) {
                System.out.format("%d: %d x %d %s %s %s %d\n", inductionWell.getId(),
                        inductionWell.getPlateColumn(), inductionWell.getPlateRow(), inductionWell.getMsid(),
                        inductionWell.getComposition(), inductionWell.getChemical(), inductionWell.getGrowth());
            }
            break;
        case PREGROWTH:
            List<PregrowthWell> pregrowthWells = PregrowthWell.getInstance().insertFromPlateComposition(db,
                    parser, p);
            for (PregrowthWell pregrowthWell : pregrowthWells) {
                System.out.format("%d: %d x %d (%s @ %s) %s %s %d\n", pregrowthWell.getId(),
                        pregrowthWell.getPlateColumn(), pregrowthWell.getPlateRow(),
                        pregrowthWell.getSourcePlate(), pregrowthWell.getSourceWell(), pregrowthWell.getMsid(),
                        pregrowthWell.getComposition(), pregrowthWell.getGrowth());
            }
            break;
        case FEEDING_LCMS:
            List<FeedingLCMSWell> feedingLCMSWells = FeedingLCMSWell.getInstance()
                    .insertFromPlateComposition(db, parser, p);
            for (FeedingLCMSWell feedingLCMSWell : feedingLCMSWells) {
                System.out.format("%d: %d x %d (%s @ %s) %s %s %f\n", feedingLCMSWell.getId(),
                        feedingLCMSWell.getPlateColumn(), feedingLCMSWell.getPlateRow(),
                        feedingLCMSWell.getMsid(), feedingLCMSWell.getComposition(),
                        feedingLCMSWell.getExtract(), feedingLCMSWell.getChemical(),
                        feedingLCMSWell.getConcentration());
            }
            break;
        default:
            System.err.format("Unrecognized/unimplemented data type '%s'\n", contentType);
            break;
        }
        // If we didn't encounter an exception, commit the transaction.
        db.getConn().commit();
    } catch (Exception e) {
        System.err.format("Caught exception when trying to load plate composition, rolling back. %s\n",
                e.getMessage());
        db.getConn().rollback();
        throw (e);
    } finally {
        db.getConn().close();
    }

}

From source file:com.conversantmedia.mapreduce.tool.RunJob.java

public static void main(String[] args) throws ToolException, IOException {

    // Get the base packages from the classpath resource
    String[] scanPackages = getBasePackagesToScanForDrivers();

    // Initialize the reflections object
    Reflections reflections = initReflections((Object[]) scanPackages);

    // Search the classpath for Tool and Driver annotations
    Map<String, DriverMeta> idMap = findAllDrivers(reflections);

    if (idMap.isEmpty()) {
        System.out.printf("No drivers found in package(s) [%s]\n", StringUtils.join(scanPackages, ","));
        System.exit(0);/*from   w  ww  . j a v a 2  s  .com*/
    }

    // Expects the first argument to be the id of the
    // tool to run. Otherwise list them all:
    if (args.length < 1) {
        outputDriversTable(idMap);
        System.exit(0);
    }

    // Shift off the first (driver id) argument
    String id = args[0];
    args = ArrayUtils.subarray(args, 1, args.length);

    DriverMeta driverMeta = idMap.get(id);
    if (driverMeta == null) {
        if (StringUtils.isNotBlank(id) && !StringUtils.startsWith(id, "-")) { // don't output message if no driver was specified
            // or if the first arg is an argument such as --conf (from runjob script)
            System.out.println("No Tool or Driver class found with id [" + id + "]");
        }
        outputDriversTable(idMap);
        System.exit(1);
    }

    // Finally, run the tool
    runDriver(driverMeta, args);
}

From source file:com.webarch.common.lang.Digest.java

public static void main(String args[]) {
    System.out.println(generateSourceString("sdfa", "ggggg", "wwwww"));
    System.out.println(signatureString("sdfa", "ggggg", "wwwww"));
    System.out.println("" + DigestUtils.md5Hex("?"));
    System.out.println(/*from w  w w . j a  va2  s .c  om*/
            StringUtils.join(base64Code("utf-8", "46d045ff5190f6ea93739da6c0aa19bc", "ggggg", "wwwww"), "-"));
    System.out.println(StringUtils.join(
            base64Decode("utf-8", "NDZkMDQ1ZmY1MTkwZjZlYTkzNzM5ZGE2YzBhYTE5YmM", "Z2dnZ2c", "d3d3d3c"), "-"));
}

From source file:net.ontopia.topicmaps.cmdlineutils.rdbms.RDBMSIndexTool.java

public static void main(String[] argv) throws Exception {

    // Initialize logging
    CmdlineUtils.initializeLogging();//w  ww.  j  a va2s. c  o  m

    // Register logging options
    CmdlineOptions options = new CmdlineOptions("RDBMSIndexTool", argv);
    CmdlineUtils.registerLoggingOptions(options);

    // Parse command line options
    try {
        options.parse();
    } catch (CmdlineOptions.OptionsException e) {
        System.err.println("Error: " + e.getMessage());
        System.exit(1);
    }

    // Get command line arguments
    String[] args = options.getArguments();

    if (args.length != 1) {
        usage();
        System.exit(3);
    }

    // load database schema project
    ClassLoader cloader = RDBMSIndexTool.class.getClassLoader();
    InputStream istream = cloader.getResourceAsStream("net/ontopia/topicmaps/impl/rdbms/config/schema.xml");
    Project dbp = DatabaseProjectReader.loadProject(istream);

    // open database connection
    String propfile = args[0];
    ConnectionFactoryIF cf = new DefaultConnectionFactory(PropertyUtils.loadProperties(new File(propfile)),
            true);

    Connection conn = cf.requestConnection();
    try {
        DatabaseMetaData dbm = conn.getMetaData();
        boolean downcase = dbm.storesLowerCaseIdentifiers();

        Map extra_indexes = new TreeMap();
        Map missing_indexes = new TreeMap();

        Iterator tables = dbp.getTables().iterator();
        while (tables.hasNext()) {
            Table table = (Table) tables.next();
            String table_name = (downcase ? table.getName().toLowerCase() : table.getName());
            //! System.out.println("T :"  + table_name);

            // get primary keys from database
            Map pkeys = getPrimaryKeys(table_name, dbm);

            // get indexes from database
            Map indexes = getIndexes(table_name, dbm);

            Map dindexes = new HashMap();
            if (table.getPrimaryKeys() != null) {
                String pkey = table_name + '(' + StringUtils.join(table.getPrimaryKeys(), ',') + ')';
                if (!pkeys.containsKey(pkey))
                    System.out.println("PKM: " + pkey);
            }

            Iterator iter = table.getIndexes().iterator();
            while (iter.hasNext()) {
                Index index = (Index) iter.next();
                String i = table_name + '(' + StringUtils.join(index.getColumns(), ',') + ')';
                String index_name = (downcase ? index.getName().toLowerCase() : index.getName());
                dindexes.put(i, index_name);
            }

            Set extra = new HashSet(indexes.keySet());
            extra.removeAll(dindexes.keySet());
            extra.removeAll(pkeys.keySet());
            if (!extra.isEmpty()) {
                Iterator i = extra.iterator();
                while (i.hasNext()) {
                    Object k = i.next();
                    extra_indexes.put(k, indexes.get(k));
                }
            }

            Set missing = new HashSet(dindexes.keySet());
            missing.addAll(pkeys.keySet());
            missing.removeAll(indexes.keySet());
            if (!missing.isEmpty()) {
                Iterator i = missing.iterator();
                while (i.hasNext()) {
                    Object k = i.next();
                    missing_indexes.put(k, dindexes.get(k));
                }
            }

        }
        if (!extra_indexes.isEmpty())
            System.out.println("/* --- Extra indexes ----------------------------------------- */");
        Iterator eiter = extra_indexes.keySet().iterator();
        while (eiter.hasNext()) {
            Object k = eiter.next();
            System.out.println("drop index " + extra_indexes.get(k) + "; /* " + k + " */");
        }

        if (!missing_indexes.isEmpty())
            System.out.println("/* --- Missing indexes---------------------------------------- */");
        Iterator miter = missing_indexes.keySet().iterator();
        while (miter.hasNext()) {
            Object k = miter.next();
            System.out.println("create index " + missing_indexes.get(k) + " on " + k + ";");
        }

    } finally {
        conn.rollback();
        conn.close();
    }

}