Example usage for org.springframework.batch.item ExecutionContext putLong

List of usage examples for org.springframework.batch.item ExecutionContext putLong

Introduction

In this page you can find the example usage for org.springframework.batch.item ExecutionContext putLong.

Prototype

public void putLong(String key, long value) 

Source Link

Document

Adds a Long value to the context.

Usage

From source file:org.opensourcebank.batch.reader.HazelcastMapItemReader.java

public void update(ExecutionContext executionContext) throws ItemStreamException {

    // save the current item ID to make this step restartable
    executionContext.putLong(CURRENT_ITEM_ID, new Long(currentItemId).longValue());
}

From source file:batch.demo.job.MultiThreadedFlatFileItemReader.java

@Override
public void update(ExecutionContext executionContext) throws ItemStreamException {
    super.update(executionContext);
    if (isSaveState()) {
        executionContext.putLong(getExecutionContextUserSupport().getKey(START_AT_KEY), startAt);
    }//ww  w. j  av  a  2 s . c o m
}

From source file:org.springframework.cloud.dataflow.server.support.StepExecutionJacksonMixInTests.java

private StepExecution getStepExecution() {
    JobExecution jobExecution = new JobExecution(1L, null, "hi");
    final StepExecution stepExecution = new StepExecution("step1", jobExecution);
    jobExecution.createStepExecution("step1");
    final ExecutionContext executionContext = stepExecution.getExecutionContext();

    executionContext.putInt("counter", 1234);
    executionContext.putDouble("myDouble", 1.123456d);
    executionContext.putLong("Josh", 4444444444L);
    executionContext.putString("awesomeString", "Yep");
    executionContext.put("hello", "world");
    executionContext.put("counter2", 9999);

    return stepExecution;
}

From source file:org.opensourcebank.batch.partition.HazelcastMapPartitioner.java

public Map<String, ExecutionContext> partition(int gridSize) {

    Map<Long, Object> itemsMap = Hazelcast.getMap(mapName);
    Set<Long> itemsIds = itemsMap.keySet();

    long min = 0;
    long max = 0;

    if (itemsIds.size() > 0) {
        min = Collections.min(itemsIds);
        max = Collections.max(itemsIds);
    }/*from  w  w w  .j a va 2 s  .  c  o m*/

    long targetSize = (max - min) / gridSize + 1;

    Map<String, ExecutionContext> result = new HashMap<String, ExecutionContext>();
    int number = 0;
    long start = min;
    long end = start + targetSize - 1;

    while (start <= max) {

        ExecutionContext value = new ExecutionContext();
        result.put("partition" + number, value);

        if (end >= max) {
            end = max;
        }
        value.putLong("fromId", start);
        value.putLong("toId", end);
        value.putString("mapName", mapName);
        start += targetSize;
        end += targetSize;
        number++;
    }

    return result;
}

From source file:gemlite.core.internal.batch.ColumnRangePartitioner.java

public Map<String, ExecutionContext> partitionCommon(int gridSize) {
    LogUtil.getLogger().info("ColumnRangePartitioner start...");
    LogUtil logUtil = LogUtil.newInstance();
    long min = jdbcTemplate.queryForObject("SELECT MIN(gfa." + column + ") from (" + table + ") gfa",
            Long.class);
    long max = jdbcTemplate.queryForObject("SELECT MAX(gfa." + column + ") from (" + table + ") gfa",
            Long.class);
    long targetSize = (max - min) / gridSize + 1;
    LogUtil.getLogger().info(/* ww  w.j  av a  2 s .c om*/
            "+++++++++++++++++:" + max + "?:" + min + "+++++++++++++++++++++++++++++++++");
    Map<String, ExecutionContext> result = new HashMap<String, ExecutionContext>();
    long number = 0;
    long start = min;
    long end = start + targetSize - 1;

    while (start <= max) {
        ExecutionContext value = new ExecutionContext();
        result.put("partition" + number, value);

        if (end >= max) {
            end = max;
        }
        value.putLong("min", start);
        value.putLong("max", end);
        LogUtil.getLogger().info("min:" + start + " max:" + end);
        start += targetSize;
        end += targetSize;
        number++;
    }
    LogUtil.getLogger().info("ColumnRangePartitioner end. Cost:" + logUtil.cost());
    return result;
}

From source file:batch.demo.job.FlatFilePartitioner.java

/**
 * Creates a standard {@link ExecutionContext} with the specified parameters.
 * @param partitionName the name of the partition
 * @param startAt the number of bytes for a partition thread to skip before starting reading
 * @param itemsCount the number of items to read
 * @return the execution context (output)
 *///from   www  .  ja  v a  2s.c  o  m
protected ExecutionContext createExecutionContext(String partitionName, long startAt, long itemsCount) {
    final ExecutionContext executionContext = new ExecutionContext();
    executionContext.putLong(startAtKeyName, startAt);
    executionContext.putLong(itemsCountKeyName, itemsCount);
    try {
        executionContext.putString(resourceKeyName, "file:" + resource.getFile().getPath());
    } catch (IOException e) {
        throw new IllegalArgumentException("File could not be located for: " + resource, e);
    }
    if (logger.isDebugEnabled()) {
        logger.debug("Added partition [" + partitionName + "] with [" + executionContext + "]");
    }
    return executionContext;
}

From source file:me.andpay.ti.spring.batch.FlatFileItemWriter.java

/**
 * @see ItemStream#update(ExecutionContext)
 *///from   w ww.  j  a v  a2s  .c o m
public void update(ExecutionContext executionContext) {
    if (state == null) {
        throw new ItemStreamException("ItemStream not open or already closed.");
    }

    Assert.notNull(executionContext, "ExecutionContext must not be null");

    if (saveState) {

        try {
            executionContext.putLong(getKey(RESTART_DATA_NAME), state.position());
        } catch (IOException e) {
            throw new ItemStreamException("ItemStream does not return current position properly", e);
        }

        executionContext.putLong(getKey(WRITTEN_STATISTICS_NAME), state.linesWritten);
    }
}

From source file:egovframework.rte.bat.core.item.file.EgovPartitionFlatFileItemWriter.java

/**
 * state? ?  ??  position? //from   ww  w  . ja  v  a 2 s  .co m
 * @see ItemStream#update(ExecutionContext)
 */
public void update(ExecutionContext executionContext) {
    if (state == null) {
        throw new ItemStreamException("ItemStream not open or already closed.");
    }

    Assert.notNull(executionContext, "ExecutionContext must not be null");

    if (saveState) {

        try {

            executionContext.putLong(getKey(RESTART_DATA_NAME), state.position());
        } catch (IOException e) {
            throw new ItemStreamException("ItemStream does not return current position properly", e);
        }

        executionContext.putLong(getKey(WRITTEN_STATISTICS_NAME), state.linesWritten);
    }
}

From source file:org.emonocot.job.io.StaxEventItemWriter.java

/**
 * @param executionContext Set the execution context
 * Get the restart data.//from   www. j  a v  a  2s .c o m
 *
 * @see org.springframework.batch.item.ItemStream#update(ExecutionContext)
 */
public final void update(final ExecutionContext executionContext) {

    if (saveState) {
        Assert.notNull(executionContext, "ExecutionContext must not be null");
        executionContext.putLong(getKey(RESTART_DATA_NAME), getPosition());
        executionContext.putLong(getKey(WRITE_STATISTICS_NAME), currentRecordCount);
    }
}

From source file:org.geoserver.backuprestore.writer.CatalogFileWriter.java

/**
 * @throws Exception//from ww  w .j a  va  2  s.  co  m
 * @see ItemStream#update(ExecutionContext)
 */
@Override
public void update(ExecutionContext executionContext) {
    super.update(executionContext);
    if (state == null) {
        throw new ItemStreamException("ItemStream not open or already closed.");
    }

    Assert.notNull(executionContext, "ExecutionContext must not be null");

    if (saveState) {
        try {
            executionContext.putLong(getExecutionContextKey(RESTART_DATA_NAME), state.position());
        } catch (IOException e) {
            logValidationExceptions((T) null,
                    new ItemStreamException("ItemStream does not return current position properly", e));
        }

        executionContext.putLong(getExecutionContextKey(WRITTEN_STATISTICS_NAME), state.linesWritten);
    }
}