Example usage for org.apache.commons.lang StringUtils lowerCase

List of usage examples for org.apache.commons.lang StringUtils lowerCase

Introduction

In this page you can find the example usage for org.apache.commons.lang StringUtils lowerCase.

Prototype

public static String lowerCase(String str) 

Source Link

Document

Converts a String to lower case as per String#toLowerCase() .

Usage

From source file:org.apache.ambari.server.api.services.UserPrivilegeService.java

@Override
protected ResourceInstance createPrivilegeResource(String privilegeId) {
    final Map<Resource.Type, String> mapIds = new HashMap<Resource.Type, String>();
    mapIds.put(Resource.Type.User, StringUtils.lowerCase(userName));
    mapIds.put(Resource.Type.UserPrivilege, privilegeId);
    return createResource(Resource.Type.UserPrivilege, mapIds);
}

From source file:org.apache.ambari.server.api.services.UserService.java

/**
 * Create a user resource instance.// w  w w  .jav a2s. co m
 *
 * @param userName  user name
 *
 * @return a user resource instance
 */
private ResourceInstance createUserResource(String userName) {
    return createResource(Resource.Type.User,
            Collections.singletonMap(Resource.Type.User, StringUtils.lowerCase(userName)));
}

From source file:org.apache.ambari.server.state.stack.upgrade.ConfigureTask.java

/**
 * Gets a map containing the following properties pertaining to the
 * configuration value to change:/*from w  w w .  j  av  a 2s  . co  m*/
 * <ul>
 * <li>{@link #PARAMETER_CONFIG_TYPE} - the configuration type (ie hdfs-site)</li>
 * <li>{@link #PARAMETER_KEY_VALUE_PAIRS} - key/value pairs for the
 * configurations</li>
 * <li>{@link #PARAMETER_KEY_VALUE_PAIRS} - key/value pairs for the
 * configurations</li>
 * <li>{@link #PARAMETER_TRANSFERS} - COPY/MOVE/DELETE changes</li>
 * <li>{@link #PARAMETER_REPLACEMENTS} - value replacements</li>
 * </ul>
 *
 * @param cluster
 *          the cluster to use when retrieving conditional properties to test
 *          against (not {@code null}).
 * @return the a map containing the changes to make. This could potentially be
 *         an empty map if no conditions are met. Callers should decide how to
 *         handle a configuration task that is unable to set any configuration
 *         values.
 */
public Map<String, String> getConfigurationChanges(Cluster cluster, ConfigUpgradePack configUpgradePack) {
    Map<String, String> configParameters = new HashMap<>();

    if (id == null || id.isEmpty()) {
        LOG.warn("Config task id is not defined, skipping config change");
        return configParameters;
    }

    if (configUpgradePack == null) {
        LOG.warn("Config upgrade pack is not defined, skipping config change");
        return configParameters;
    }

    // extract config change definition, referenced by current ConfigureTask
    ConfigUpgradeChangeDefinition definition = configUpgradePack.enumerateConfigChangesByID().get(id);
    if (definition == null) {
        LOG.warn(String.format("Can not resolve config change definition by id %s, " + "skipping config change",
                id));
        return configParameters;
    }

    // the first matched condition will win; conditions make configuration tasks singular in
    // the properties that can be set - when there is a condition the task will only contain
    // conditions
    List<Condition> conditions = definition.getConditions();
    if (null != conditions && !conditions.isEmpty()) {
        for (Condition condition : conditions) {
            String conditionConfigType = condition.getConditionConfigType();
            String conditionKey = condition.getConditionKey();
            String conditionValue = condition.getConditionValue();

            // always add the condition's target type just so that we have one to
            // return even if none of the conditions match
            configParameters.put(PARAMETER_CONFIG_TYPE, condition.getConfigType());

            // check the condition; if it passes, set the configuration properties
            // and break
            String checkValue = getDesiredConfigurationValue(cluster, conditionConfigType, conditionKey);

            if (conditionValue.equals(checkValue)) {
                List<ConfigurationKeyValue> configurations = new ArrayList<>(1);
                ConfigurationKeyValue keyValue = new ConfigurationKeyValue();
                keyValue.key = condition.getKey();
                keyValue.value = condition.getValue();
                configurations.add(keyValue);

                configParameters.put(ConfigureTask.PARAMETER_KEY_VALUE_PAIRS, m_gson.toJson(configurations));

                return configParameters;
            }
        }
    }

    // this task is not a condition task, so process the other elements normally
    if (null != definition.getConfigType()) {
        configParameters.put(PARAMETER_CONFIG_TYPE, definition.getConfigType());
    }

    // for every <set key=foo value=bar/> add it to this list
    if (null != definition.getKeyValuePairs() && !definition.getKeyValuePairs().isEmpty()) {
        configParameters.put(ConfigureTask.PARAMETER_KEY_VALUE_PAIRS,
                m_gson.toJson(definition.getKeyValuePairs()));
    }

    // transfers
    List<Transfer> transfers = definition.getTransfers();
    if (null != transfers && !transfers.isEmpty()) {

        List<Transfer> allowedTransfers = new ArrayList<>();
        for (Transfer transfer : transfers) {
            if (transfer.operation == TransferOperation.DELETE) {
                boolean ifKeyIsNotBlank = StringUtils.isNotBlank(transfer.ifKey);
                boolean ifTypeIsNotBlank = StringUtils.isNotBlank(transfer.ifType);

                //  value doesn't required for Key Check
                if (ifKeyIsNotBlank && ifTypeIsNotBlank && transfer.ifKeyState == PropertyKeyState.ABSENT) {
                    boolean keyPresent = getDesiredConfigurationKeyPresence(cluster, transfer.ifType,
                            transfer.ifKey);
                    if (keyPresent) {
                        LOG.info("Skipping property delete for {}/{} as the key {} for {} is present",
                                definition.getConfigType(), transfer.deleteKey, transfer.ifKey,
                                transfer.ifType);
                        continue;
                    }
                }

                if (ifKeyIsNotBlank && ifTypeIsNotBlank && transfer.ifValue == null
                        && transfer.ifKeyState == PropertyKeyState.PRESENT) {
                    boolean keyPresent = getDesiredConfigurationKeyPresence(cluster, transfer.ifType,
                            transfer.ifKey);
                    if (!keyPresent) {
                        LOG.info("Skipping property delete for {}/{} as the key {} for {} is not present",
                                definition.getConfigType(), transfer.deleteKey, transfer.ifKey,
                                transfer.ifType);
                        continue;
                    }
                }

                if (ifKeyIsNotBlank && ifTypeIsNotBlank && transfer.ifValue != null) {

                    String ifConfigType = transfer.ifType;
                    String ifKey = transfer.ifKey;
                    String ifValue = transfer.ifValue;

                    String checkValue = getDesiredConfigurationValue(cluster, ifConfigType, ifKey);
                    if (!ifValue.toLowerCase().equals(StringUtils.lowerCase(checkValue))) {
                        // skip adding
                        LOG.info(
                                "Skipping property delete for {}/{} as the value {} for {}/{} is not equal to {}",
                                definition.getConfigType(), transfer.deleteKey, checkValue, ifConfigType, ifKey,
                                ifValue);
                        continue;
                    }
                }
            }
            allowedTransfers.add(transfer);
        }
        configParameters.put(ConfigureTask.PARAMETER_TRANSFERS, m_gson.toJson(allowedTransfers));
    }

    // replacements
    List<Replace> replacements = definition.getReplacements();
    if (null != replacements && !replacements.isEmpty()) {
        configParameters.put(ConfigureTask.PARAMETER_REPLACEMENTS, m_gson.toJson(replacements));
    }

    return configParameters;
}

From source file:org.apache.archiva.redback.rest.services.DefaultUtilServices.java

public String getI18nResources(String locale) throws RedbackServiceException {
    String cachedi18n = cachei18n.get(StringUtils.isEmpty(locale) ? "en" : StringUtils.lowerCase(locale));
    if (cachedi18n != null) {
        return cachedi18n;
    }/*from  w w w. jav a 2 s.co m*/

    Properties properties = new Properties();

    // load redback user api messages
    try {

        // load default first then requested locale
        loadResource(properties, "org/apache/archiva/redback/users/messages", null);
        loadResource(properties, "org/apache/archiva/redback/users/messages", locale);

    } catch (IOException e) {
        log.warn("skip error loading properties {}", "org/apache/archiva/redback/users/messages");
    }

    try {

        // load default first then requested locale
        loadResource(properties, "org/apache/archiva/redback/i18n/default", null);
        loadResource(properties, "org/apache/archiva/redback/i18n/default", locale);

    } catch (IOException e) {
        log.warn("skip error loading properties {}", "org/apache/archiva/redback/i18n/default");
    }

    StringBuilder output = new StringBuilder();

    for (Map.Entry<Object, Object> entry : properties.entrySet()) {
        output.append((String) entry.getKey()).append('=').append((String) entry.getValue());
        output.append('\n');
    }

    cachei18n.put(StringUtils.isEmpty(locale) ? "en" : StringUtils.lowerCase(locale), output.toString());

    return output.toString();
}

From source file:org.apache.archiva.rest.services.DefaultCommonServices.java

@Override
public String getAllI18nResources(String locale) throws ArchivaRestServiceException {

    String cachedi18n = cachei18n.get(StringUtils.isEmpty(locale) ? "en" : StringUtils.lowerCase(locale));
    if (cachedi18n != null) {
        return cachedi18n;
    }/*from www  .  j ava2 s. c  o  m*/

    try {

        Properties all = utilServices.getI18nProperties(locale);
        StringBuilder resourceName = new StringBuilder(RESOURCE_NAME);
        loadResource(all, resourceName, locale);

        String i18n = fromProperties(all);
        cachei18n.put(StringUtils.isEmpty(locale) ? "en" : StringUtils.lowerCase(locale), i18n);
        return i18n;
    } catch (IOException e) {
        throw new ArchivaRestServiceException(e.getMessage(),
                Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), e);
    } catch (RedbackServiceException e) {
        throw new ArchivaRestServiceException(e.getMessage(), e.getHttpErrorCode(), e);
    }
}

From source file:org.apache.bookkeeper.common.testing.util.TimedOutTestsListener.java

static String buildThreadDump() {
    StringBuilder dump = new StringBuilder();
    Map<Thread, StackTraceElement[]> stackTraces = Thread.getAllStackTraces();
    for (Map.Entry<Thread, StackTraceElement[]> e : stackTraces.entrySet()) {
        Thread thread = e.getKey();
        dump.append(String.format("\"%s\" %s prio=%d tid=%d %s\njava.lang.Thread.State: %s", thread.getName(),
                (thread.isDaemon() ? "daemon" : ""), thread.getPriority(), thread.getId(),
                Thread.State.WAITING.equals(thread.getState()) ? "in Object.wait()"
                        : StringUtils.lowerCase(thread.getState().name()),
                Thread.State.WAITING.equals(thread.getState()) ? "WAITING (on object monitor)"
                        : thread.getState()));
        for (StackTraceElement stackTraceElement : e.getValue()) {
            dump.append("\n        at ");
            dump.append(stackTraceElement);
        }//from   w  w w. j a  v a  2 s  .  co m
        dump.append("\n");
    }
    return dump.toString();
}

From source file:org.apache.fineract.accounting.closure.storeglaccountbalance.data.GLClosureJournalEntryBalanceValidator.java

/**
 * Validates the request to generate a closure journal entry account balance report
 * //from   w  w  w .ja va 2s .c om
 * @param officeId
 * @param startClosure
 * @param endClosure
 */
public void validateGenerateReportRequest(final Long officeId, final GLClosureData startClosure,
        final GLClosureData endClosure) {

    final Long endClosureId = (endClosure != null) ? endClosure.getId() : null;

    final String resourceNameToLowerCase = StringUtils
            .lowerCase(StoreGLAccountBalanceResource.CLOSURE_ACCOUNT_BALANCE_REPORT_ENTITY_NAME);
    final List<ApiParameterError> dataValidationErrors = new ArrayList<>();
    final DataValidatorBuilder dataValidatorBuilder = new DataValidatorBuilder(dataValidationErrors)
            .resource(resourceNameToLowerCase);

    dataValidatorBuilder.reset().parameter(UriQueryParameterHelper.OFFICE_ID_PARAMETER_NAME).value(officeId)
            .notBlank();
    dataValidatorBuilder.reset().parameter(UriQueryParameterHelper.END_CLOSURE_ID_PARAMETER_NAME)
            .value(endClosureId).notBlank();

    if (startClosure != null && startClosure.isDeleted()) {
        throw new GLClosureNotFoundException(startClosure.getId());
    }

    if (endClosure != null && endClosure.isDeleted()) {
        throw new GLClosureNotFoundException(endClosureId);
    }

    if (startClosure != null && endClosure != null) {
        final LocalDate startClosureClosingDate = startClosure.getClosingDate();
        final LocalDate endClosureClosingDate = endClosure.getClosingDate();

        if (endClosureClosingDate.isBefore(startClosureClosingDate)) {
            dataValidatorBuilder.failWithCodeNoParameterAddedToErrorCode(
                    "error.msg." + resourceNameToLowerCase
                            + ".end.closure.closing.date.cannot.be.before.start.closure.closing.date",
                    "Closing " + "date of end closure must be after closing date of start closure.");
        }

        if (startClosure.getId().equals(endClosure.getId())) {
            dataValidatorBuilder.failWithCodeNoParameterAddedToErrorCode(
                    "error.msg." + resourceNameToLowerCase + ".end.closure.cannot.be.equal.to.start.closure",
                    "End closure cannot be equal " + "to start closure.");
        }
    }

    if (officeId != null && startClosure != null && !startClosure.getOfficeId().equals(officeId)) {
        dataValidatorBuilder.failWithCodeNoParameterAddedToErrorCode(
                "error.msg." + resourceNameToLowerCase
                        + ".start.closure.office.id.must.be.equal.to.provided.office.id",
                "The start closure " + "office id is different from provided office id");
    }

    if (officeId != null && endClosure != null && !endClosure.getOfficeId().equals(officeId)) {
        dataValidatorBuilder.failWithCodeNoParameterAddedToErrorCode(
                "error.msg." + resourceNameToLowerCase
                        + ".end.closure.office.id.must.be.equal.to.provided.office.id",
                "The end closure " + "office id is different from provided office id");
    }

    // throw data validation exception if there are any validation errors 
    this.throwExceptionIfValidationWarningsExist(dataValidationErrors);
}

From source file:org.apache.fineract.infrastructure.dataexport.data.ExportDataValidator.java

/** 
 * validate the request to create a new data export tool
 * /*  w w w.  j a v a 2s  .  c  o  m*/
 * @param jsonCommand -- the JSON command object (instance of the JsonCommand class)
 * @return None
 **/
public void validateCreateDataExportRequest(final JsonCommand jsonCommand) {
    final String jsonString = jsonCommand.json();
    final JsonElement jsonElement = jsonCommand.parsedJson();

    if (StringUtils.isBlank(jsonString)) {
        throw new InvalidJsonException();
    }

    final Type typeToken = new TypeToken<Map<String, Object>>() {
    }.getType();
    this.fromJsonHelper.checkForUnsupportedParameters(typeToken, jsonString,
            DataExportApiConstants.CREATE_DATA_EXPORT_REQUEST_PARAMETERS);

    final List<ApiParameterError> dataValidationErrors = new ArrayList<>();
    final DataValidatorBuilder dataValidatorBuilder = new DataValidatorBuilder(dataValidationErrors)
            .resource(StringUtils.lowerCase(DataExportApiConstants.DATA_EXPORT_ENTITY_NAME));

    final String name = this.fromJsonHelper.extractStringNamed(DataExportApiConstants.NAME_PARAM_NAME,
            jsonElement);
    dataValidatorBuilder.reset().parameter(DataExportApiConstants.BASE_ENTITY_NAME_PARAM_NAME).value(name)
            .notBlank();

    final String baseEntity = this.fromJsonHelper
            .extractStringNamed(DataExportApiConstants.BASE_ENTITY_NAME_PARAM_NAME, jsonElement);
    dataValidatorBuilder.reset().parameter(DataExportApiConstants.BASE_ENTITY_NAME_PARAM_NAME).value(baseEntity)
            .notBlank();

    final String[] columns = this.fromJsonHelper.extractArrayNamed(DataExportApiConstants.COLUMNS_PARAM_NAME,
            jsonElement);
    dataValidatorBuilder.reset().parameter(DataExportApiConstants.COLUMNS_PARAM_NAME).value(columns).notBlank();

    throwExceptionIfValidationWarningsExist(dataValidationErrors);
}

From source file:org.apache.fineract.infrastructure.dataexport.helper.FileHelper.java

/** 
 * create a data export XML file//from w w  w.  j av a 2 s  .c o m
 * 
 * @param fileName
 * @return {@link DataExportFileData} object
 **/
public static DataExportFileData createDataExportXmlFile(final String fileName) {
    DataExportFileData dataExportFileData = null;

    try {
        final String fileNamePlusExtension = fileName + "."
                + StringUtils.lowerCase(DataExportApiConstants.XML_FILE_FORMAT);
        final File parentDirectoryPath = FileHelper.getDataExportDirectoryPath();
        final File file = new File(parentDirectoryPath, fileNamePlusExtension);

        dataExportFileData = new DataExportFileData(file, fileNamePlusExtension,
                DataExportApiConstants.XML_FILE_CONTENT_TYPE);
    }

    catch (Exception exception) {
        logger.error(exception.getMessage(), exception);
    }

    return dataExportFileData;
}

From source file:org.apache.fineract.infrastructure.dataexport.helper.FileHelper.java

/**
 * Creates a data export CSV file/*from  w  w w  .j a  v a 2  s.  co  m*/
 * 
 * @param sqlRowSet
 * @param fileName
 * @return {@link DataExportFileData} object
 */
public static DataExportFileData createDataExportCsvFile(final SqlRowSet sqlRowSet, final String fileName,
        final HashMap<Long, CodeValueData> codeValueMap, final HashMap<Long, AppUserData> appUserMap,
        final DataExportCoreTable coreTable) {
    DataExportFileData dataExportFileData = null;

    try {
        final String fileNamePlusExtension = fileName + "."
                + StringUtils.lowerCase(DataExportApiConstants.CSV_FILE_FORMAT);
        final File parentDirectoryPath = FileHelper.getDataExportDirectoryPath();
        final File file = new File(parentDirectoryPath, fileNamePlusExtension);

        // create a new csv file on the server
        CsvFileHelper.createFile(sqlRowSet, file, codeValueMap, appUserMap, coreTable);

        dataExportFileData = new DataExportFileData(file, fileNamePlusExtension,
                DataExportApiConstants.CSV_FILE_CONTENT_TYPE);
    }

    catch (Exception exception) {
        logger.error(exception.getMessage(), exception);
    }

    return dataExportFileData;
}