Example usage for org.apache.commons.lang.text StrSubstitutor StrSubstitutor

List of usage examples for org.apache.commons.lang.text StrSubstitutor StrSubstitutor

Introduction

In this page you can find the example usage for org.apache.commons.lang.text StrSubstitutor StrSubstitutor.

Prototype

public StrSubstitutor(Map valueMap, String prefix, String suffix) 

Source Link

Document

Creates a new instance and initializes it.

Usage

From source file:com.datatorrent.demos.ads.HdfsHashMapOutputOperator.java

@Override
public Path nextFilePath() {
    Map<String, String> params = new HashMap<String, String>();
    params.put(FNAME_SUB_PART_INDEX, String.valueOf(index));
    params.put(FNAME_SUB_OPERATOR_ID, Integer.toString(operatorId));
    StrSubstitutor sub = new StrSubstitutor(params, "%(", ")");
    index++;//from  w w w  .  ja va 2s.  c  o  m
    return new Path(sub.replace(getFilePathPattern().toString()));
}

From source file:com.datatorrent.benchmark.HdfsByteOutputOperator.java

@Override
public Path nextFilePath() {
    Map<String, String> params = new HashMap<String, String>();
    params.put(FNAME_SUB_PART_INDEX, String.valueOf(index));
    params.put(FNAME_SUB_CONTEXT_ID, Integer.toString(contextId));
    StrSubstitutor sub = new StrSubstitutor(params, "%(", ")");
    index++;/*  ww w . j  a  va 2  s  . c  om*/
    return new Path(sub.replace(getFilePathPattern().toString()));
}

From source file:com.datatorrent.demos.frauddetect.operator.HdfsStringOutputOperator.java

@Override
public Path nextFilePath() {
    Map<String, String> params = new HashMap<String, String>();
    params.put(FNAME_SUB_PART_INDEX, String.valueOf(index));
    params.put(FNAME_SUB_CONTEXT_ID, contextId);
    StrSubstitutor sub = new StrSubstitutor(params, "%(", ")");
    index++;/*from ww w  . j av a2  s  .  c om*/
    return new Path(sub.replace(getFilePathPattern().toString()));
}

From source file:com.datatorrent.lib.io.HdfsOutputOperator.java

private Path subFilePath(int index) {
    Map<String, String> params = new HashMap<String, String>();
    params.put(FNAME_SUB_PART_INDEX, String.valueOf(index));
    params.put(FNAME_SUB_CONTEXT_ID, Integer.toString(contextId));
    params.put(FNAME_SUB_OPERATOR_ID, this.getName());
    StrSubstitutor sub = new StrSubstitutor(params, "%(", ")");
    return new Path(sub.replace(filePath.toString()));
}

From source file:com.ariht.maven.plugins.config.generator.ConfigGeneratorImpl.java

/**
 * Merge templates with filters to generate config, scripts and property io.
 *///from  w ww  . jav a2 s .  co  m
private void processTemplatesAndGenerateConfig() throws Exception {
    final DirectoryReader directoryReader = new DirectoryReader(log);
    final List<FileInfo> filters = directoryReader.readFiles(configGeneratorParameters.getFiltersBasePath(),
            configGeneratorParameters.getFiltersToIgnore());
    for (FileInfo fileInfo : filters) {
        fileInfo.lookForExternalFiles(configGeneratorParameters.getExternalFilterBasePaths());
    }
    final List<FileInfo> templates = directoryReader.readFiles(configGeneratorParameters.getTemplatesBasePath(),
            configGeneratorParameters.getTemplatesToIgnore());
    logOutputPath();

    // Get list of all properties in all filter io.
    final Set<String> allProperties = getAllProperties(filters);
    // Collection stores missing properties by file so this can be logged once at the end.
    final Map<String, Set<String>> missingPropertiesByFilename = new LinkedHashMap<String, Set<String>>();

    for (final FileInfo filter : filters) {
        final Properties properties = readFilterIntoProperties(filter);
        final LinkedHashMap<String, String> valueMap = Maps.newLinkedHashMap(Maps.fromProperties(properties));

        // No point checking for missing properties if all were found in the filter file
        boolean missingPropertyFound = false;
        for (String missingProperty : Sets.difference(allProperties, valueMap.keySet()).immutableCopy()) {
            valueMap.put(missingProperty, MISSING_PROPERTY_PREFIX + missingProperty + MISSING_PROPERTY_SUFFIX);
            missingPropertyFound = true;
        }
        final StrSubstitutor strSubstitutor = new StrSubstitutor(valueMap,
                configGeneratorParameters.getPropertyPrefix(), configGeneratorParameters.getPropertySuffix());
        for (final FileInfo template : templates) {
            generateConfig(template, filter, configGeneratorParameters.getOutputBasePath(), strSubstitutor,
                    missingPropertiesByFilename, missingPropertyFound);
        }
    }

    if (!missingPropertiesByFilename.keySet().isEmpty()) {
        final StringBuilder sb = new StringBuilder("Missing properties identified:\n");
        for (String filename : missingPropertiesByFilename.keySet()) {
            sb.append(filename).append(": ");
            sb.append(StringUtils.join(missingPropertiesByFilename.get(filename), ", ")).append("\n");
        }
        log.warn(sb.toString());
        if (configGeneratorParameters.isFailOnMissingProperty()) {
            throw new MojoExecutionException(sb.toString());
        }
    }
}

From source file:eu.eexcess.opensearch.recommender.PartnerConnector.java

/**
 * Replace the "{searchTerm}" in {@code searchEndpointTemplate} with
 * {@code searchQuery}./*from ww w  .ja va2 s  .  c om*/
 * 
 * @param searchEndpointTemplate
 *            the search end point link containing searchTerm placeholder
 * @param searchQuery
 *            the search term
 * @return the substituted link or null on error
 */
private String injectSearchQuery(String searchEndpointTemplate, String searchQuery) {

    try {
        Map<String, String> valuesMap = new HashMap<String, String>();
        valuesMap.put(searchTermsVariableName, searchQuery);
        StrSubstitutor substitutor = new StrSubstitutor(valuesMap, substitutorPrefix, substitutorSuffix);
        return substitutor.replace(partnerConfig.searchEndpoint);
    } catch (Exception e) {
    }

    logger.log(Level.WARNING, "failed to prepare search request url [" + searchEndpointTemplate
            + "] with query [" + searchQuery + "]");
    return null;
}

From source file:com.datatorrent.stram.LaunchContainerRunnable.java

/**
 * Build the command to launch the child VM in the container
 *
 * @param jvmID/* w  ww . j  a va2s  .  c  o m*/
 * @return
 */
public List<CharSequence> getChildVMCommand(String jvmID) {

    List<CharSequence> vargs = new ArrayList<CharSequence>(8);

    if (!StringUtils.isBlank(System.getenv(Environment.JAVA_HOME.key()))) {
        // node manager provides JAVA_HOME
        vargs.add(Environment.JAVA_HOME.$() + "/bin/java");
    } else {
        vargs.add("java");
    }

    String jvmOpts = dag.getAttributes().get(LogicalPlan.CONTAINER_JVM_OPTIONS);
    if (jvmOpts == null) {
        if (dag.isDebug()) {
            vargs.add(JAVA_REMOTE_DEBUG_OPTS);
        }
    } else {
        Map<String, String> params = new HashMap<String, String>();
        params.put("applicationId",
                Integer.toString(container.getId().getApplicationAttemptId().getApplicationId().getId()));
        params.put("containerId", Integer.toString(container.getId().getId()));
        StrSubstitutor sub = new StrSubstitutor(params, "%(", ")");
        vargs.add(sub.replace(jvmOpts));
        if (dag.isDebug() && !jvmOpts.contains("-agentlib:jdwp=")) {
            vargs.add(JAVA_REMOTE_DEBUG_OPTS);
        }
    }

    List<DAG.OperatorMeta> operatorMetaList = Lists.newArrayList();
    int bufferServerMemory = 0;
    for (PTOperator operator : sca.getContainer().getOperators()) {
        bufferServerMemory += operator.getBufferServerMemory();
        operatorMetaList.add(operator.getOperatorMeta());
    }
    Context.ContainerOptConfigurator containerOptConfigurator = dag.getAttributes()
            .get(LogicalPlan.CONTAINER_OPTS_CONFIGURATOR);
    jvmOpts = containerOptConfigurator.getJVMOptions(operatorMetaList);
    jvmOpts = parseJvmOpts(jvmOpts, ((long) bufferServerMemory) * MB_TO_B);
    LOG.info("Jvm opts {} for container {}", jvmOpts, container.getId());
    vargs.add(jvmOpts);

    Path childTmpDir = new Path(Environment.PWD.$(), YarnConfiguration.DEFAULT_CONTAINER_TEMP_DIR);
    vargs.add(String.format("-D%s=%s", StreamingContainer.PROP_APP_PATH, dag.assertAppPath()));
    vargs.add("-Djava.io.tmpdir=" + childTmpDir);
    vargs.add(String.format("-D%scid=%s", StreamingApplication.DT_PREFIX, jvmID));
    vargs.add("-Dhadoop.root.logger=" + (dag.isDebug() ? "DEBUG" : "INFO") + ",RFA");
    vargs.add("-Dhadoop.log.dir=" + ApplicationConstants.LOG_DIR_EXPANSION_VAR);

    String loggersLevel = System.getProperty(DTLoggerFactory.DT_LOGGERS_LEVEL);
    if (loggersLevel != null) {
        vargs.add(String.format("-D%s=%s", DTLoggerFactory.DT_LOGGERS_LEVEL, loggersLevel));
    }
    // Add main class and its arguments
    vargs.add(StreamingContainer.class.getName()); // main of Child

    vargs.add("1>" + ApplicationConstants.LOG_DIR_EXPANSION_VAR + "/stdout");
    vargs.add("2>" + ApplicationConstants.LOG_DIR_EXPANSION_VAR + "/stderr");

    // Final commmand
    StringBuilder mergedCommand = new StringBuilder(256);
    for (CharSequence str : vargs) {
        mergedCommand.append(str).append(" ");
    }
    List<CharSequence> vargsFinal = new ArrayList<CharSequence>(1);
    vargsFinal.add(mergedCommand.toString());
    return vargsFinal;

}

From source file:com.jaspersoft.jasperserver.war.amazon.client.AwsDataSourceServiceImpl.java

private void updateWithConnectionUrl(AwsDBInstanceDTO awsDBInstanceDTO) {
    for (String dbType : jdbcConnectionMap.keySet()) {
        if (awsDBInstanceDTO.getEngine().startsWith(dbType)) {
            Map<String, Object> dbProperties = jdbcConnectionMap.get(dbType);

            awsDBInstanceDTO.setJdbcTemplate((String) dbProperties.get(jdbcUrl));

            Map<String, String> values = new HashMap<String, String>();
            values.put("dbHost", awsDBInstanceDTO.getAddress());
            values.put("dbPort", String.valueOf(awsDBInstanceDTO.getPort()));
            values.put("dbName", awsDBInstanceDTO.getdBName());
            StrSubstitutor sub = new StrSubstitutor(values, "$[", "]");
            awsDBInstanceDTO.setJdbcUrl(sub.replace(dbProperties.get(jdbcUrl)));

            awsDBInstanceDTO.setJdbcDriverClass((String) dbProperties.get(jdbcDriverClass));
            break;
        }//from   ww  w. j a va2s. c  om
    }
}

From source file:org.apache.maven.plugin.cxx.GenerateMojo.java

@Override
public void execute() throws MojoExecutionException, MojoFailureException {
    //Properties systemProperties = session.getSystemProperties();
    //Properties userProperties = session.getUserProperties();
    //Properties properties = session.getExecutionProperties();

    org.apache.maven.artifact.versioning.DefaultArtifactVersion defautCMakeVersion = new org.apache.maven.artifact.versioning.DefaultArtifactVersion(
            "3.0.0");
    org.apache.maven.artifact.versioning.DefaultArtifactVersion askedCMakeVersion = new org.apache.maven.artifact.versioning.DefaultArtifactVersion(
            cmakeMinVersion);// ww w  . java2 s.c om
    boolean bCMake3OrAbove = (askedCMakeVersion.compareTo(defautCMakeVersion) >= 0);

    getLog().debug("CMake 3 or above asked (" + cmakeMinVersion + ") ? " + (bCMake3OrAbove ? "yes" : "no"));

    HashMap<String, String> valuesMap = new HashMap<String, String>();
    valuesMap.put("parentGroupId", parentGroupId);
    valuesMap.put("parentArtifactId", parentArtifactId);
    valuesMap.put("parentVersion", parentVersion);
    valuesMap.put("groupId", groupId);
    valuesMap.put("artifactId", artifactId);
    valuesMap.put("artifactName", artifactName);
    valuesMap.put("version", version);
    valuesMap.put("cmakeMinVersion", cmakeMinVersion);
    valuesMap.put("parentScope", bCMake3OrAbove ? "PARENT_SCOPE" : "");
    valuesMap.put("projectVersion", bCMake3OrAbove ? "VERSION ${TARGET_VERSION}" : "");
    valuesMap.put("scmConnection", "");

    //1/ search for properties
    // -DgroupId=fr.neticoa -DartifactName=QtUtils -DartifactId=qtutils -Dversion=1.0-SNAPSHOT

    if (StringUtils.isEmpty(archetypeArtifactId)) {
        throw new MojoExecutionException("archetypeArtifactId is empty ");
    }

    Map<String, String> resources = listResourceFolderContent(archetypeArtifactId, valuesMap);

    if (null == resources || resources.size() == 0) {
        throw new MojoExecutionException("Unable to find archetype : " + archetypeArtifactId);
    }
    //1.1/ search potential scm location of current dir
    // svn case
    SvnInfo basedirSvnInfo = SvnService.getSvnInfo(basedir, null, basedir.getAbsolutePath(), getLog(), true);
    if (basedirSvnInfo.isValide()) {
        valuesMap.put("scmConnection", "scm:svn:" + basedirSvnInfo.getSvnUrl());
    }
    // todo : handle other scm : git (git remote -v; git log --max-count=1), etc.

    //2/ unpack resource to destdir 
    getLog().info("archetype " + archetypeArtifactId + " has " + resources.entrySet().size() + " item(s)");
    getLog().info("basedir = " + basedir);

    StrSubstitutor substitutor = new StrSubstitutor(valuesMap, "$(", ")");
    String sExecutionDate = new SimpleDateFormat("yyyy-MM-dd-HH:mm:ss.SSS").format(new Date());
    for (Map.Entry<String, String> entry : resources.entrySet()) {
        String curRes = entry.getKey();
        String curDest = entry.getValue();
        InputStream resourceStream = null;
        resourceStream = getClass().getResourceAsStream(curRes);
        if (null == resourceStream) {
            try {
                resourceStream = new FileInputStream(new File(curRes));
            } catch (Exception e) {
                // handled later
                resourceStream = null;
            }
        }

        getLog().debug("resource stream to open : " + curRes);
        getLog().debug("destfile pattern : " + curDest);
        if (null != resourceStream) {
            String sRelativePath = curDest.replaceFirst(Pattern.quote(archetypeArtifactId + File.separator),
                    "");
            File newFile = new File(basedir + File.separator + sRelativePath);

            //3/ create empty dir struct; if needed using a descriptor 
            //create all non exists folders
            File newDirs = new File(newFile.getParent());
            if (Files.notExists(Paths.get(newDirs.getPath()))) {
                getLog().info("dirs to generate : " + newDirs.getAbsoluteFile());
                newDirs.mkdirs();
            }

            if (!newFile.getName().equals("empty.dir")) {
                getLog().info("file to generate : " + newFile.getAbsoluteFile());
                try {
                    if (!newFile.createNewFile()) {
                        // duplicate existing file
                        FileInputStream inStream = new FileInputStream(newFile);
                        File backFile = File.createTempFile(newFile.getName() + ".",
                                "." + sExecutionDate + ".back", newFile.getParentFile());
                        FileOutputStream outStream = new FileOutputStream(backFile);

                        IOUtils.copy(inStream, outStream);
                        // manage file times
                        //backFile.setLastModified(newFile.lastModified());
                        BasicFileAttributes attributesFrom = Files.getFileAttributeView(
                                Paths.get(newFile.getPath()), BasicFileAttributeView.class).readAttributes();
                        BasicFileAttributeView attributesToView = Files.getFileAttributeView(
                                Paths.get(backFile.getPath()), BasicFileAttributeView.class);
                        attributesToView.setTimes(attributesFrom.lastModifiedTime(),
                                attributesFrom.lastAccessTime(), attributesFrom.creationTime());

                        inStream.close();
                        outStream.close();
                    }
                    FileOutputStream outStream = new FileOutputStream(newFile);

                    //4/ variable substitution :
                    // change prefix and suffix to '$(' and ')'
                    // see https://commons.apache.org/proper/commons-lang/javadocs/api-2.6/org/apache/commons/lang/text/StrSubstitutor.html
                    String content = IOUtils.toString(resourceStream, "UTF8");
                    content = substitutor.replace(content);

                    //IOUtils.copy( resourceStream, outStream );
                    IOUtils.write(content, outStream, "UTF8");

                    outStream.close();
                    resourceStream.close();
                } catch (IOException e) {
                    getLog().error("File " + newFile.getAbsoluteFile() + " can't be created : " + e);
                }
            }
        } else {
            getLog().error("Unable to open resource " + curRes);
        }
    }
}

From source file:org.craftercms.cstudio.publishing.processor.ShellProcessor.java

@Override
public void doProcess(PublishedChangeSet changeSet, Map<String, String> parameters, PublishingTarget target)
        throws PublishingException {
    checkConfiguration(parameters, target);
    LOGGER.debug("Starting Shell Processor");
    ProcessBuilder builder = new ProcessBuilder();
    builder.directory(getWorkingDir(workingDir, parameters.get(FileUploadServlet.PARAM_SITE)));
    LOGGER.debug("Working directory is " + workingDir);
    HashMap<String, String> argumentsMap = buildArgumentsMap(getFileList(parameters, changeSet));
    if (asSingleCommand) {
        StrSubstitutor substitutor = new StrSubstitutor(argumentsMap, "%{", "}");
        String execComand = substitutor.replace(command);
        LOGGER.debug("Command to be Executed is " + execComand);
        builder.command("/bin/bash", "-c", execComand);

    } else {/*w  ww  .j a  va2s.c o m*/
        Set<String> keys = argumentsMap.keySet();
        ArrayList<String> commandAsList = new ArrayList<String>();
        commandAsList.add(command.trim());
        for (String key : keys) {
            if (!key.equalsIgnoreCase(INCLUDE_FILTER_PARAM)) {
                commandAsList.add(argumentsMap.get(key));
            }
        }
        LOGGER.debug("Command to be Executed is " + StringUtils.join(commandAsList, " "));
        builder.command(commandAsList);
    }

    builder.environment().putAll(enviroment);
    builder.redirectErrorStream(true);
    try {
        Process process = builder.start();
        process.waitFor();
        BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream()));
        String str;
        while ((str = reader.readLine()) != null) {
            LOGGER.info("PROCESS OUTPUT :" + str);
        }
        reader.close();
        LOGGER.info("Process Finish with Exit Code " + process.exitValue());
        LOGGER.debug("Process Output ");
    } catch (IOException ex) {
        LOGGER.error("Error ", ex);
    } catch (InterruptedException e) {
        LOGGER.error("Error ", e);
    } finally {
        LOGGER.debug("End of Shell Processor");
    }
}