Example usage for org.apache.commons.lang StringUtils capitalize

List of usage examples for org.apache.commons.lang StringUtils capitalize

Introduction

In this page you can find the example usage for org.apache.commons.lang StringUtils capitalize.

Prototype

public static String capitalize(String str) 

Source Link

Document

Capitalizes a String changing the first letter to title case as per Character#toTitleCase(char) .

Usage

From source file:org.apache.hadoop.metrics2.lib.MutableHistogram.java

public MutableHistogram(String name, String description) {
    this.name = StringUtils.capitalize(name);
    this.desc = StringUtils.uncapitalize(description);
    sample = new ExponentiallyDecayingSample(DEFAULT_SAMPLE_SIZE, DEFAULT_ALPHA);
    count = new AtomicLong();
    min = new AtomicLong(Long.MAX_VALUE);
    max = new AtomicLong(Long.MIN_VALUE);
    sum = new AtomicLong();
}

From source file:org.apache.hadoop.metrics2.lib.MutableMetricsFactory.java

protected String getName(Field field) {
    return StringUtils.capitalize(field.getName());
}

From source file:org.apache.hadoop.metrics2.lib.MutableMetricsFactory.java

protected String getName(Method method) {
    String methodName = method.getName();
    if (methodName.startsWith("get")) {
        return StringUtils.capitalize(methodName.substring(3));
    }/*from   w w w.j  a v a 2 s. c o m*/
    return StringUtils.capitalize(methodName);
}

From source file:org.apache.hadoop.metrics2.lib.MutableQuantiles.java

/**
 * Instantiates a new {@link MutableQuantiles} for a metric that rolls itself
 * over on the specified time interval./*ww  w  . j a  va  2s.com*/
 * 
 * @param name
 *          of the metric
 * @param description
 *          long-form textual description of the metric
 * @param sampleName
 *          type of items in the stream (e.g., "Ops")
 * @param valueName
 *          type of the values
 * @param interval
 *          rollover interval (in seconds) of the estimator
 */
public MutableQuantiles(String name, String description, String sampleName, String valueName, int interval) {
    String ucName = StringUtils.capitalize(name);
    String usName = StringUtils.capitalize(sampleName);
    String uvName = StringUtils.capitalize(valueName);
    String desc = StringUtils.uncapitalize(description);
    String lsName = StringUtils.uncapitalize(sampleName);
    String lvName = StringUtils.uncapitalize(valueName);

    numInfo = info(ucName + "Num" + usName,
            String.format("Number of %s for %s with %ds interval", lsName, desc, interval));
    // Construct the MetricsInfos for the quantiles, converting to percentiles
    quantileInfos = new MetricsInfo[quantiles.length];
    String nameTemplate = ucName + "%dthPercentile" + uvName;
    String descTemplate = "%d percentile " + lvName + " with " + interval + " second interval for " + desc;
    for (int i = 0; i < quantiles.length; i++) {
        int percentile = (int) (100 * quantiles[i].quantile);
        quantileInfos[i] = info(String.format(nameTemplate, percentile),
                String.format(descTemplate, percentile));
    }

    estimator = new SampleQuantiles(quantiles);

    this.interval = interval;
    scheduler.scheduleAtFixedRate(new RolloverSample(this), interval, interval, TimeUnit.SECONDS);
}

From source file:org.apache.hadoop.metrics2.lib.MutableStat.java

/**
 * Construct a sample statistics metric//from w  ww. j a v a 2  s . c o m
 * @param name        of the metric
 * @param description of the metric
 * @param sampleName  of the metric (e.g. "Ops")
 * @param valueName   of the metric (e.g. "Time", "Latency")
 * @param extended    create extended stats (stdev, min/max etc.) by default.
 */
public MutableStat(String name, String description, String sampleName, String valueName, boolean extended) {
    String ucName = StringUtils.capitalize(name);
    String usName = StringUtils.capitalize(sampleName);
    String uvName = StringUtils.capitalize(valueName);
    String desc = StringUtils.uncapitalize(description);
    String lsName = StringUtils.uncapitalize(sampleName);
    String lvName = StringUtils.uncapitalize(valueName);
    numInfo = info(ucName + "Num" + usName, "Number of " + lsName + " for " + desc);
    avgInfo = info(ucName + "Avg" + uvName, "Average " + lvName + " for " + desc);
    stdevInfo = info(ucName + "Stdev" + uvName, "Standard deviation of " + lvName + " for " + desc);
    iMinInfo = info(ucName + "IMin" + uvName, "Interval min " + lvName + " for " + desc);
    iMaxInfo = info(ucName + "IMax" + uvName, "Interval max " + lvName + " for " + desc);
    minInfo = info(ucName + "Min" + uvName, "Min " + lvName + " for " + desc);
    maxInfo = info(ucName + "Max" + uvName, "Max " + lvName + " for " + desc);
    this.extended = extended;
}

From source file:org.apache.hadoop.metrics2.lib.MutableStatShare.java

/**
 * Construct a sample statistics metric//from   www . j ava  2  s.co m
 * @param name        of the metric
 * @param description of the metric
 * @param sampleName  of the metric (e.g. "Ops")
 * @param valueName   of the metric (e.g. "Time", "Latency")
 * @param extended    create extended stats (stdev, min/max etc.) by default.
 */
public MutableStatShare(String name, String description, String sampleName, String valueName, boolean extended,
        MutableCounterLong bytes) {
    String ucName = StringUtils.capitalize(name);
    String usName = StringUtils.capitalize(sampleName);
    String uvName = StringUtils.capitalize(valueName);
    String desc = StringUtils.uncapitalize(description);
    String lsName = StringUtils.uncapitalize(sampleName);
    String lvName = StringUtils.uncapitalize(valueName);
    numInfo = info(ucName + "Num" + usName, "Number of " + lsName + " for " + desc);
    avgInfo = info(ucName + "Avg" + uvName, "Average " + lvName + " for " + desc);
    stdevInfo = info(ucName + "Stdev" + uvName, "Standard deviation of " + lvName + " for " + desc);
    iMinInfo = info(ucName + "IMin" + uvName, "Interval min " + lvName + " for " + desc);
    iMaxInfo = info(ucName + "IMax" + uvName, "Interval max " + lvName + " for " + desc);
    minInfo = info(ucName + "Min" + uvName, "Min " + lvName + " for " + desc);
    maxInfo = info(ucName + "Max" + uvName, "Max " + lvName + " for " + desc);
    initInfo = info(ucName + "Init" + uvName, "Init bytes " + lvName + " for " + desc);
    endInfo = info(ucName + "End" + uvName, "Total bytes " + lvName + " for " + desc);
    intervalInfo = info(ucName + "Interval" + uvName, "Total bytes " + lvName + " for " + desc);
    bytesInfo = info(ucName + "Class" + uvName, "Total bytes " + lvName + " for " + desc);
    sharedInfo = info(ucName + "Shared" + uvName, "Total bytes " + lvName + " for " + desc);
    weightInfo = info(ucName + "Weight" + uvName, "Valor" + lvName + " for " + desc);
    queuedInfo = info(ucName + "Queued" + "Requests", "num requests" + lvName + " for " + desc);
    this.queuedRequests = 0;
    this.totalBytesProcessed = bytes;
    this.initTotalBytesProcessed = bytes.value();
    this.extended = extended;
}

From source file:org.apache.hadoop.yarn.webapp.Router.java

private <T> Class<? extends T> find(Class<T> cls, String pkg, String cname) {
    String name = StringUtils.capitalize(cname);
    Class<? extends T> found = load(cls, djoin(pkg, name));
    if (found == null) {
        found = load(cls, djoin(pkg, "webapp", name));
    }//from w w w  . ja  v  a 2s .co m
    if (found == null) {
        found = load(cls, join(hostClass.getName(), '$', name));
    }
    return found;
}

From source file:org.apache.hive.common.util.HiveStringUtils.java

/**
 * Convert SOME_STUFF to SomeStuff/* ww  w.  ja  v  a 2 s. co  m*/
 *
 * @param s input string
 * @return camelized string
 */
public static String camelize(String s) {
    StringBuilder sb = new StringBuilder();
    String[] words = split(s.toLowerCase(Locale.US), ESCAPE_CHAR, '_');

    for (String word : words) {
        sb.append(StringUtils.capitalize(word));
    }

    return sb.toString();
}

From source file:org.apache.hoya.yarn.appmaster.web.view.ContainerStatsBlock.java

@Override
protected void render(Block html) {
    // TODO Probably better to just get a copy of this list for us to avoid the repeated synchronization?
    // does this change if we have 50 node, 100node, 500 node hoya clusters?
    final Map<String, RoleInstance> containerInstances = getContainerInstances(
            hoya.getAppState().cloneActiveContainerList());

    for (Entry<String, RoleStatus> entry : hoya.getRoleStatusByName().entrySet()) {
        final String name = entry.getKey();
        final RoleStatus roleStatus = entry.getValue();

        DIV<Hamlet> div = html.div("role-info ui-widget-content ui-corner-all");

        List<ClusterNode> nodesInRole;
        try {//from www .  j av a 2  s  .  c o m
            nodesInRole = clusterOps.listClusterNodesInRole(name);
        } catch (Exception e) {
            log.error("Could not fetch containers for role: " + name, e);
            nodesInRole = Collections.emptyList();
        }

        div.h2(BOLD, StringUtils.capitalize(name));

        // Generate the details on this role
        Iterable<Entry<String, Integer>> stats = roleStatus.buildStatistics().entrySet();
        generateRoleDetails(div, "role-stats-wrap", "Specifications",
                Iterables.transform(stats, stringIntPairFunc));

        // Sort the ClusterNodes by their name (containerid)
        Collections.sort(nodesInRole, new ClusterNodeNameComparator());

        // Generate the containers running this role
        generateRoleDetails(div, "role-stats-containers", "Containers",
                Iterables.transform(nodesInRole, new Function<ClusterNode, Entry<TableContent, String>>() {

                    @Override
                    public Entry<TableContent, String> apply(ClusterNode input) {
                        final String containerId = input.name;

                        if (containerInstances.containsKey(containerId)) {
                            RoleInstance roleInst = containerInstances.get(containerId);
                            if (roleInst.container.getNodeHttpAddress() != null) {
                                return Maps.<TableContent, String>immutableEntry(
                                        new TableAnchorContent(containerId,
                                                buildNodeUrlForContainer(
                                                        roleInst.container.getNodeHttpAddress(), containerId)),
                                        null);
                            }
                        }
                        return Maps.immutableEntry(new TableContent(input.name), null);
                    }

                }));

        ClusterDescription desc = hoya.getAppState().getClusterStatus();
        Map<String, String> options = desc.getRole(name);
        Iterable<Entry<TableContent, String>> tableContent;

        // Generate the pairs of data in the expected form
        if (null != options) {
            tableContent = Iterables.transform(options.entrySet(), stringStringPairFunc);
        } else {
            // Or catch that we have no options and provide "empty"
            tableContent = Collections.<Entry<TableContent, String>>emptySet();
        }

        // Generate the options used by this role
        generateRoleDetails(div, "role-options-wrap", "Role Options", tableContent);

        // Close the div for this role
        div._();
    }
}

From source file:org.apache.hoya.yarn.appmaster.web.view.IndexBlock.java

private String getProviderName() {
    String providerServiceName = providerService.getName().toLowerCase();

    // Get HBase properly capitalized
    if (providerServiceName.contains("hbase")) {
        return HBASE;
    }/*from  w w w . j a va  2 s.c  o m*/

    return StringUtils.capitalize(providerServiceName);
}