Example usage for org.springframework.batch.item ExecutionContext putString

List of usage examples for org.springframework.batch.item ExecutionContext putString

Introduction

In this page you can find the example usage for org.springframework.batch.item ExecutionContext putString.

Prototype


public void putString(String key, @Nullable String value) 

Source Link

Document

Adds a String value to the context.

Usage

From source file:ru.xxlabaza.test.batch.job.RangePartitioner.java

@Override
public Map<String, ExecutionContext> partition(int gridSize) {
    long totalItems = personRepository.count();
    System.out.println("\nTotal items: " + totalItems);

    int range = (int) totalItems / gridSize;
    if (range < chunkSize) {
        throw new IllegalArgumentException();
    }/*from www  . ja  va  2 s  . c o  m*/

    return IntStream.range(0, gridSize).boxed().map(index -> {
        ExecutionContext context = new ExecutionContext();
        context.putString("name", "partition-" + index);
        context.putInt("from", index * range);
        int nextIndex = index + 1;
        int to = nextIndex * range - 1;
        if (nextIndex == gridSize) {
            to += totalItems % gridSize;
        }
        context.putInt("to", to);
        return context;
    }).map(context -> {
        System.out.format("\nCREATED PARTITION: '%s', RANGE FROM %d, TO %d\n", context.getString("name"),
                context.getInt("from"), context.getInt("to"));
        return context;
    }).collect(toMap(context -> context.getString("name"), Function.identity()));
}

From source file:fr.acxio.tools.agia.cmis.CmisReader.java

@Override
public void update(ExecutionContext sExecutionContext) throws ItemStreamException {
    sExecutionContext.putString(CONTEXT_KEY_LASTTIMESTAMP, lastTimestamp);
}

From source file:de.langmi.spring.batch.examples.complex.support.CustomMultiResourcePartitioner.java

/**
 * Assign the filename of each of the injected resources to an
 * {@link ExecutionContext}.// ww  w  .j  a v a 2  s.c  o m
 * 
 * @see Partitioner#partition(int)
 */
@Override
public Map<String, ExecutionContext> partition(int gridSize) {
    Map<String, ExecutionContext> map = new HashMap<String, ExecutionContext>(gridSize);
    int i = 0;
    for (Resource resource : resources) {
        ExecutionContext context = new ExecutionContext();
        Assert.state(resource.exists(), "Resource does not exist: " + resource);
        try {
            context.putString(keyName, resource.getURL().toExternalForm());
            context.put("outputFileName", createOutputFilename(i, resource));
        } catch (IOException e) {
            throw new IllegalArgumentException("File could not be located for: " + resource, e);
        }
        map.put(PARTITION_KEY + i, context);
        i++;
    }
    return map;
}

From source file:de.langmi.spring.batch.examples.readers.file.multiresourcepartitioner.CustomMultiResourcePartitioner.java

/**
 * Assign the filename of each of the injected resources to an
 * {@link ExecutionContext}.//  w  w  w.j av a2s  .c  om
 * 
 * @see Partitioner#partition(int)
 */
@Override
public Map<String, ExecutionContext> partition(int gridSize) {
    Map<String, ExecutionContext> map = new HashMap<String, ExecutionContext>(gridSize);
    int i = 0;
    for (Resource resource : resources) {
        ExecutionContext context = new ExecutionContext();
        Assert.state(resource.exists(), "Resource does not exist: " + resource);
        try {
            context.putString(inputKeyName, resource.getURL().toExternalForm());
            context.put(outputKeyName, createOutputFilename(i, resource));
        } catch (IOException e) {
            throw new IllegalArgumentException("File could not be located for: " + resource, e);
        }
        map.put(PARTITION_KEY + i, context);
        i++;
    }
    return map;
}

From source file:org.springframework.cloud.dataflow.server.support.StepExecutionJacksonMixInTests.java

private StepExecution getStepExecution() {
    JobExecution jobExecution = new JobExecution(1L, null, "hi");
    final StepExecution stepExecution = new StepExecution("step1", jobExecution);
    jobExecution.createStepExecution("step1");
    final ExecutionContext executionContext = stepExecution.getExecutionContext();

    executionContext.putInt("counter", 1234);
    executionContext.putDouble("myDouble", 1.123456d);
    executionContext.putLong("Josh", 4444444444L);
    executionContext.putString("awesomeString", "Yep");
    executionContext.put("hello", "world");
    executionContext.put("counter2", 9999);

    return stepExecution;
}

From source file:gemlite.core.internal.batch.ColumnRangePartitioner.java

public Map<String, ExecutionContext> partitionSybase(int gridSize) {
    //,???/*from w  w w .j  a  v  a  2s  .  c o m*/
    String countSql = "select count(1) from (" + table + ") GF_table_c";
    Long count = jdbcTemplate.queryForObject(countSql, Long.class);
    long targetSize = count / gridSize + 1;
    String firstPageSql = "select top " + targetSize + " * from (" + table + ") GF_table order by gf_rowid asc";
    String remainingPagesSql = "select top " + targetSize + " * from (" + table
            + ") GF_table where gf_rowid > ?" + " order by gf_rowid asc";
    Map<String, ExecutionContext> result = new HashMap<String, ExecutionContext>();
    PagingRowMapper rowCallback = new PagingRowMapper();
    while (index < count) {
        if (page == 0) {
            if (LogUtil.getLogger().isDebugEnabled()) {
                LogUtil.getLogger().debug("SQL used for partition first page: [" + firstPageSql + "]");
            }
            getJdbcTemplate().query(firstPageSql, rowCallback);

            //,?
            ExecutionContext value = new ExecutionContext();
            result.put("partition " + page, value);
            value.putString("min", startValue);
            value.putString("max", endValue);
        } else {
            if (LogUtil.getLogger().isDebugEnabled()) {
                LogUtil.getLogger()
                        .debug("SQL used for partition remaining pages: [" + remainingPagesSql + "]");
            }
            startValue = new String();
            getJdbcTemplate().query(remainingPagesSql, new String[] { endValue }, rowCallback);
            //,?
            ExecutionContext value = new ExecutionContext();
            result.put("partition " + page, value);
            value.putString("min", startValue);
            value.putString("max", endValue);
        }
        page++;
    }
    return result;
}

From source file:fr.acxio.tools.agia.alfresco.AlfrescoNodeReader.java

@Override
public void update(ExecutionContext sExecutionContext) throws ItemStreamException {
    sExecutionContext.putString(CONTEXT_KEY_CURRENTPATH, currentDirPath);
    sExecutionContext.put(CONTEXT_KEY_CURRENTINDEXES, currentIndexes.toArray(new Integer[] {}));
}

From source file:org.opensourcebank.batch.partition.HazelcastMapPartitioner.java

public Map<String, ExecutionContext> partition(int gridSize) {

    Map<Long, Object> itemsMap = Hazelcast.getMap(mapName);
    Set<Long> itemsIds = itemsMap.keySet();

    long min = 0;
    long max = 0;

    if (itemsIds.size() > 0) {
        min = Collections.min(itemsIds);
        max = Collections.max(itemsIds);
    }/*from   www  .  j  ava 2s.c o m*/

    long targetSize = (max - min) / gridSize + 1;

    Map<String, ExecutionContext> result = new HashMap<String, ExecutionContext>();
    int number = 0;
    long start = min;
    long end = start + targetSize - 1;

    while (start <= max) {

        ExecutionContext value = new ExecutionContext();
        result.put("partition" + number, value);

        if (end >= max) {
            end = max;
        }
        value.putLong("fromId", start);
        value.putLong("toId", end);
        value.putString("mapName", mapName);
        start += targetSize;
        end += targetSize;
        number++;
    }

    return result;
}

From source file:batch.OutputFileListener.java

@BeforeStep
public void createOutputNameFromInput(StepExecution stepExecution) {
    ExecutionContext executionContext = stepExecution.getExecutionContext();
    String inputName = stepExecution.getStepName().replace(":", "-");
    if (executionContext.containsKey(inputKeyName)) {
        inputName = executionContext.getString(inputKeyName);
    }//from   w w  w  . j  a v a2s  .  c om
    if (!executionContext.containsKey(outputKeyName)) {
        executionContext.putString(outputKeyName, path + FilenameUtils.getBaseName(inputName) + ".csv");
    }
}

From source file:mpg.biochem.de.interbase.batch.OutputFileListener.java

@BeforeStep
public void createOutputNameFromInput(StepExecution stepExecution) {
    ExecutionContext executionContext = stepExecution.getExecutionContext();
    String inputName = stepExecution.getStepName().replace(":", "-");
    if (executionContext.containsKey(inputKeyName)) {
        inputName = executionContext.getString(inputKeyName);
    }/*  ww  w  . ja v  a 2s.  c  o m*/
    if (!executionContext.containsKey(outputKeyName)) {
        executionContext.putString(outputKeyName, path + FilenameUtils.getBaseName(inputName) + ".mapped.tab");
    }
}