Example usage for org.apache.commons.lang StringUtils split

List of usage examples for org.apache.commons.lang StringUtils split

Introduction

In this page you can find the example usage for org.apache.commons.lang StringUtils split.

Prototype

public static String[] split(String str, String separatorChars) 

Source Link

Document

Splits the provided text into an array, separators specified.

Usage

From source file:com.adobe.acs.commons.util.PathInfoUtil.java

/**
 * Gets the suffixes as an array; each segment is the text between the /'s.
 *
 * /segment-0/segment-1/segment-2/*from  www. j  a  va 2 s  .  c  om*/
 *
 * @param request
 * @return and array of the suffix segments or empty array
 */
public static String[] getSuffixSegments(final SlingHttpServletRequest request) {
    RequestPathInfo pathInfo = request.getRequestPathInfo();
    if (pathInfo == null || pathInfo.getSuffix() == null) {
        return new String[] {};
    }

    return StringUtils.split(pathInfo.getSuffix(), '/');
}

From source file:com.adobe.acs.tools.test_page_generator.impl.Parameters.java

public Parameters(SlingHttpServletRequest request) throws JSONException {

    final String data = request.getParameter("json");

    JSONObject json = new JSONObject(data);

    rootPath = json.optString("rootPath", "");
    template = json.optString("template", "");
    total = json.optInt("total", 0);
    bucketSize = json.optInt("bucketSize", DEFAULT_BUCKET_SIZE);
    bucketType = json.optString("bucketType", DEFAULT_BUCKET_TYPE);
    saveThreshold = json.optInt("saveThreshold", DEFAULT_SAVE_THRESHOLD);

    properties = new HashMap<String, Object>();

    JSONArray jsonArray = json.getJSONArray("properties");

    if (jsonArray != null) {
        for (int i = 0; i < jsonArray.length(); i++) {
            final JSONObject item = jsonArray.getJSONObject(i);

            boolean isMulti = item.optBoolean("multi", false);
            String name = item.optString("name", "");

            if (StringUtils.isNotBlank(name)) {
                if (isMulti) {
                    final List<String> values = new ArrayList<String>();
                    for (String value : StringUtils.split(item.optString("value", ""), ",")) {
                        final String tmp = StringUtils.stripToNull(value);
                        if (tmp != null) {
                            values.add(value);
                        }/*from  w  w  w. jav  a 2s  .c om*/
                    }

                    properties.put(name, values.toArray(new String[values.size()]));
                } else {
                    String value = item.optString("value", "");
                    properties.put(name, value);
                }
            }
        }
    }
}

From source file:com.wibidata.shopping.bulkimport.ProductBulkImporter.java

/**
 * Kiji will call this method once for each line in the input text file.
 * Each line in the input text file is a JSON object that has fields describing the product.
 *
 * This method should parse the JSON text line to extract relevant
 * information about the product. These facts about the product should be
 * written to the columns of a row in the kiji_shopping_product table.
 *
 * @param line The input text line of JSON.
 * @param context A helper object used to write to the KijiTable.
 * @throws IOException//from  w  ww.j a v a 2s. c o  m
 */
@Override
public void produce(LongWritable offset, Text line, KijiTableContext context) throws IOException {
    final JsonNode json = parseJson(line.toString());

    // Parse the ID of the product.
    if (null == json.get("Id")) {
        return;
    }
    final String productId = json.get("Id").getTextValue();

    // Use the ID of the dish as the ID of the WibiTable row.
    final EntityId entityId = context.getEntityId(productId);

    context.put(entityId, "info", "id", productId);

    if (null == json.get("Name")) {
        return;
    }
    context.put(entityId, "info", "name", json.get("Name").getTextValue());

    if (null != json.get("DescriptionHtmlComplete")) {
        context.put(entityId, "info", "description", json.get("DescriptionHtmlComplete").getTextValue());
    }

    if (null != json.get("DescriptionHtmlSimple")) {
        String simpleDesc = json.get("DescriptionHtmlSimple").getTextValue();
        String shortDesc = StringUtils.split(simpleDesc, '\n')[0];
        context.put(entityId, "info", "description_short", shortDesc);
    }

    if (null != json.get("Category")) {
        String category = json.get("Category").getTextValue().toLowerCase();
        context.put(entityId, "info", "category", StringUtils.capitalize(category));
    }

    if (null != json.get("Images").get("PrimaryMedium")) {
        context.put(entityId, "info", "thumbnail", json.get("Images").get("PrimaryMedium").getTextValue());
    }

    if (null != json.get("Images").get("PrimaryExtraLarge")) {
        context.put(entityId, "info", "thumbnail_xl",
                json.get("Images").get("PrimaryExtraLarge").getTextValue());
    }

    if (null != json.get("ListPrice")) {
        context.put(entityId, "info", "price", json.get("ListPrice").getDoubleValue());
    }

    if (null != json.get("Skus").get(0).get("QuantityAvailable")) {
        context.put(entityId, "info", "inventory",
                json.get("Skus").get(0).get("QuantityAvailable").getLongValue());
    }

    List<CharSequence> words = new ArrayList<CharSequence>();
    for (JsonNode word : json.get("DescriptionWords")) {
        words.add(word.getTextValue().toLowerCase());
    }
    DescriptionWords prodWords = DescriptionWords.newBuilder().setWords(words).build();
    context.put(entityId, "info", "description_words", prodWords);
}

From source file:com.hangum.tadpole.sql.util.executer.procedure.MSSQLProcedureExecuter.java

@Override
public boolean exec(List<InOutParameterDAO> parameterList) throws Exception {
    initResult();/*  ww w  .  j a v a 2 s  . c  o m*/

    java.sql.Connection javaConn = null;
    java.sql.CallableStatement cstmt = null;

    try {
        if (listOutParamValues == null)
            getOutParameters();

        SqlMapClient client = TadpoleSQLManager.getInstance(userDB);
        javaConn = client.getDataSource().getConnection();

        // make the script
        String[] arrProcedureName = StringUtils.split(procedureDAO.getName(), ".");
        String strProcName = "[" + arrProcedureName[0] + "].[" + arrProcedureName[1] + "]";

        StringBuffer sbQuery = new StringBuffer("{call " + strProcName + "(");
        // in script
        int intParamSize = this.getParametersCount();
        for (int i = 0; i < intParamSize; i++) {
            if (i == 0)
                sbQuery.append("?");
            else
                sbQuery.append(",?");
        }
        sbQuery.append(")}");
        if (logger.isDebugEnabled())
            logger.debug("Execute Procedure query is\t  " + sbQuery.toString());

        // set prepare call
        cstmt = javaConn.prepareCall(sbQuery.toString());

        // Set input value
        for (InOutParameterDAO inOutParameterDAO : parameterList) {
            //            if(logger.isDebugEnabled()) logger.debug("Parameter " + inOutParameterDAO.getOrder() + " Value is " + inOutParameterDAO.getValue());
            //            if (null==inOutParameterDAO.getValue() || "".equals(inOutParameterDAO.getValue())){
            //               MessageDialog.openError(null, "Error", inOutParameterDAO.getName() + " parameters are required.");
            //               return false;
            //            }
            cstmt.setObject(inOutParameterDAO.getOrder(), inOutParameterDAO.getValue());
        }

        // Set the OUT Parameter
        for (int i = 0; i < listOutParamValues.size(); i++) {
            InOutParameterDAO dao = listOutParamValues.get(i);

            if (logger.isDebugEnabled())
                logger.debug("Out Parameter " + dao.getOrder() + " JavaType is "
                        + RDBTypeToJavaTypeUtils.getJavaType(dao.getRdbType()));

            cstmt.registerOutParameter(dao.getOrder(), RDBTypeToJavaTypeUtils.getJavaType(dao.getRdbType()));
        }
        cstmt.execute();

        //
        //  set
        //
        //  cursor    list

        // boolean is cursor
        boolean isCursor = false;
        ResultSet rs = cstmt.getResultSet();
        if (rs != null) {
            setResultCursor(rs);
            isCursor = true;

            // mssql? result set?  ?  ?.
            while (cstmt.getMoreResults()) {
                setResultCursor(cstmt.getResultSet());
            }
        } else {
            for (int i = 0; i < listOutParamValues.size(); i++) {
                InOutParameterDAO dao = listOutParamValues.get(i);

                Object obj = cstmt.getObject(dao.getOrder());
                //  String?  Type Cast ....   String ...
                if (obj != null) {
                    dao.setValue(obj.toString());
                }

            }
        }

        if (!isCursor) {
            List<Map<Integer, Object>> sourceDataList = new ArrayList<Map<Integer, Object>>();
            Map<Integer, Object> tmpRow = null;

            for (int i = 0; i < listOutParamValues.size(); i++) {
                InOutParameterDAO dao = listOutParamValues.get(i);
                tmpRow = new HashMap<Integer, Object>();

                tmpRow.put(0, "" + dao.getOrder());
                tmpRow.put(1, "" + dao.getName());
                tmpRow.put(2, "" + dao.getType());
                tmpRow.put(3, "" + dao.getRdbType());
                tmpRow.put(4, "" + dao.getLength());
                tmpRow.put(5, "" + dao.getValue());

                sourceDataList.add(tmpRow);
            }

            setResultNoCursor(new TadpoleResultSet(sourceDataList));
        }

        return true;
    } catch (Exception e) {
        logger.error("ProcedureExecutor executing error", e);
        throw e;
    } finally {
        try {
            if (cstmt != null)
                cstmt.close();
        } catch (Exception e) {
        }
        try {
            if (javaConn != null)
                javaConn.close();
        } catch (Exception e) {
        }
    }
}

From source file:com.bstek.dorado.web.resolver.WebFileResolver.java

protected Resource[] getResourcesByFileName(DoradoContext context, String resourcePrefix, String fileName,
        String resourceSuffix) throws Exception {
    String path;/*from   ww w.ja v a  2  s .  c o  m*/
    Resource[] resources = null;
    if ("debug".equals(Configure.getString("core.runMode")) && resourcePrefix != null
            && StringUtils.indexOfAny(resourcePrefix, RESOURCE_PREFIX_DELIM) >= 0) {
        String[] prefixs = StringUtils.split(resourcePrefix, RESOURCE_PREFIX_DELIM);
        for (String prefix : prefixs) {
            boolean allExists = true;
            path = PathUtils.concatPath(prefix, fileName);
            if (resourceSuffix != null) {
                path = path + resourceSuffix;
            }
            resources = context.getResources(path);
            if (resources != null && resources.length > 0) {
                for (int i = 0; i < resources.length; i++) {
                    Resource resource = resources[i];
                    if (!resource.exists()) {
                        allExists = false;
                        break;
                    }
                }
            }
            if (allExists) {
                break;
            }
        }
    } else {
        path = PathUtils.concatPath(resourcePrefix, fileName);
        if (resourceSuffix != null) {
            path = path + resourceSuffix;
        }
        resources = context.getResources(path);
    }
    return resources;
}

From source file:com.backelite.sonarqube.swift.coverage.CoberturaReportParser.java

private static void collectFileData(SMInputCursor clazz, CoverageMeasuresBuilder builder)
        throws XMLStreamException {
    SMInputCursor line = clazz.childElementCursor("lines").advance().childElementCursor("line");
    while (line.getNext() != null) {
        int lineId = Integer.parseInt(line.getAttrValue("number"));
        try {//from  w w  w  . j a va  2 s. c  om
            builder.setHits(lineId, (int) ParsingUtils.parseNumber(line.getAttrValue("hits"), Locale.ENGLISH));
        } catch (ParseException e) {
            throw new XmlParserException(e);
        }

        String isBranch = line.getAttrValue("branch");
        String text = line.getAttrValue("condition-coverage");
        if (StringUtils.equals(isBranch, "true") && StringUtils.isNotBlank(text)) {
            String[] conditions = StringUtils.split(StringUtils.substringBetween(text, "(", ")"), "/");
            builder.setConditions(lineId, Integer.parseInt(conditions[1]), Integer.parseInt(conditions[0]));
        }
    }
}

From source file:ml.shifu.shifu.udf.DynamicBinningUDF.java

private void initSmallBinMap() throws IOException {
    long start = System.currentTimeMillis();
    Configuration jobConf = UDFContext.getUDFContext().getJobConf();
    int partNum = Integer.parseInt(jobConf.get("mapreduce.task.partition"));
    String partition = String.format("%05d", partNum);
    HdfsPartFile partFile = new HdfsPartFile(smallBinsPath + File.separator + "part-*-*" + partition + "*",
            SourceType.HDFS);// w  ww.  j  av  a 2s.c  o m
    try {
        String line = null;
        int cnt = 0;
        while ((line = partFile.readLine()) != null) {
            String[] fields = StringUtils.split(line, '\u0007');
            if (fields.length == 2) {
                smallBinsMap.put(Integer.parseInt(fields[0]), fields[1]);
            }
            cnt++;
        }
        log.info(cnt + " lines are loaded in " + (System.currentTimeMillis() - start) + " milli-seconds.");
    } catch (IOException e) {
        throw new IOException("Fail to load small bin map.", e);
    } finally {
        partFile.close();
    }
}

From source file:edu.ku.brc.af.tasks.subpane.formeditor.RowColDefPanel.java

/**
 * @param defStr/* w w  w. j  a  va2 s.c o m*/
 * @param isRow
 */
public RowColDefPanel(final FormViewDef.JGDefItem item, final int numInUse, final boolean isRow) {
    super(new BorderLayout());

    this.isRow = isRow;

    createUI(numInUse, isRow);

    DefaultListModel model = (DefaultListModel) itemList.getModel();
    int cnt = 0;
    for (String tok : StringUtils.split(item.getDefStr(), ",")) //$NON-NLS-1$
    {
        JGoodiesDefItem jgItem = new JGoodiesDefItem(tok, isRow);
        jgItem.setInUse(cnt < numInUse);
        items.add(jgItem);
        model.addElement(jgItem);
        cnt++;
    }
}

From source file:eionet.cr.dao.readers.PredicateLabelsReader.java

/**
 * Un-refines the given language code (i.e. "en-GB" becomes "en", "en_us" becomes "en", etc).
 *
 * @param literal//from w  ww. j  a  v a 2s  . co m
 * @return
 */
private String unrefineLanguage(String language) {

    return language == null ? "" : StringUtils.split(language, "-_")[0];
}

From source file:azkaban.jobtype.hiveutils.RealHiveQueryExecutor.java

public RealHiveQueryExecutor(HiveConf hiveConf, CliSessionState ss, CliDriver cli) throws Exception {
    LOG.info("HiveConf = " + hiveConf);
    LOG.info("According to the conf, we're talking to the Hive hosted at: "
            + HiveConf.getVar(hiveConf, METASTORECONNECTURLKEY));

    // Expand out the hive aux jars since there was no shell script to do it
    // for us//from  ww  w .  j a v a 2  s. c  o m
    String orig = HiveConf.getVar(hiveConf, HIVEAUXJARS);
    String expanded = HiveUtils.expandHiveAuxJarsPath(orig);
    if (orig == null || orig.equals(expanded)) {
        LOG.info("Hive aux jars variable not expanded");
    } else {
        LOG.info("Expanded aux jars variable from [" + orig + "] to [" + expanded + "]");
        HiveConf.setVar(hiveConf, HIVEAUXJARS, expanded);
    }

    OptionsProcessor op = new OptionsProcessor();

    if (!op.process_stage1(new String[] {})) {
        throw new IllegalArgumentException("Can't process empty args?!?");
    }

    if (!ShimLoader.getHadoopShims().usesJobShell()) {
        // hadoop-20 and above - we need to augment classpath using hiveconf
        // components
        // see also: code in ExecDriver.java
        ClassLoader loader = hiveConf.getClassLoader();
        String auxJars = HiveConf.getVar(hiveConf, HiveConf.ConfVars.HIVEAUXJARS);
        LOG.info("Got auxJars = " + auxJars);

        if (StringUtils.isNotBlank(auxJars)) {
            loader = Utilities.addToClassPath(loader, StringUtils.split(auxJars, ","));
        }
        hiveConf.setClassLoader(loader);
        Thread.currentThread().setContextClassLoader(loader);
    }

    this.ss = ss;
    LOG.info("SessionState = " + ss);
    ss.out = System.out;
    ss.err = System.err;
    ss.in = System.in;

    if (!op.process_stage2(ss)) {
        throw new IllegalArgumentException("Can't process arguments from session state");
    }
    this.cli = cli;
    LOG.info("Cli = " + cli);
}