Example usage for org.springframework.batch.item ExecutionContext ExecutionContext

List of usage examples for org.springframework.batch.item ExecutionContext ExecutionContext

Introduction

In this page you can find the example usage for org.springframework.batch.item ExecutionContext ExecutionContext.

Prototype

public ExecutionContext() 

Source Link

Document

Default constructor.

Usage

From source file:gemlite.core.internal.batch.ColumnRangePartitioner.java

public Map<String, ExecutionContext> partitionCommon(int gridSize) {
    LogUtil.getLogger().info("ColumnRangePartitioner start...");
    LogUtil logUtil = LogUtil.newInstance();
    long min = jdbcTemplate.queryForObject("SELECT MIN(gfa." + column + ") from (" + table + ") gfa",
            Long.class);
    long max = jdbcTemplate.queryForObject("SELECT MAX(gfa." + column + ") from (" + table + ") gfa",
            Long.class);
    long targetSize = (max - min) / gridSize + 1;
    LogUtil.getLogger().info(/*from  ww  w  .j ava2s  .  c  o  m*/
            "+++++++++++++++++:" + max + "?:" + min + "+++++++++++++++++++++++++++++++++");
    Map<String, ExecutionContext> result = new HashMap<String, ExecutionContext>();
    long number = 0;
    long start = min;
    long end = start + targetSize - 1;

    while (start <= max) {
        ExecutionContext value = new ExecutionContext();
        result.put("partition" + number, value);

        if (end >= max) {
            end = max;
        }
        value.putLong("min", start);
        value.putLong("max", end);
        LogUtil.getLogger().info("min:" + start + " max:" + end);
        start += targetSize;
        end += targetSize;
        number++;
    }
    LogUtil.getLogger().info("ColumnRangePartitioner end. Cost:" + logUtil.cost());
    return result;
}

From source file:org.beanio.spring.SpringTest.java

/**
 * Test BeanIO flat file writer./*w  ww .  ja  v  a  2s .c o  m*/
 */
@Test
@SuppressWarnings("unchecked")
public void testItemWriter() throws Exception {
    ExecutionContext ec = new ExecutionContext();

    File tempFile = File.createTempFile("beanio-", "xml");
    tempFile.deleteOnExit();

    BeanIOFlatFileItemWriter<Map<String, Object>> writer = (BeanIOFlatFileItemWriter<Map<String, Object>>) context
            .getBean("itemWriter-standalone");
    writer.setResource(new FileSystemResource(tempFile));
    assertNotNull(writer);
    writer.open(ec);

    Map<String, Object> record = new HashMap<String, Object>();
    record.put("id", 1);
    record.put("name", "John");

    List<Map<String, Object>> list = new ArrayList<Map<String, Object>>();
    list.add(record);
    writer.write(list);
    writer.update(ec);

    long position = ec.getLong("BeanIOFlatFileItemWriter.current.count");
    assertTrue(position > 0);

    writer.close();
    assertFileMatches("out1.txt", tempFile);

    // test appendAllowed = true, and saveState = false
    writer = (BeanIOFlatFileItemWriter<Map<String, Object>>) context.getBean("itemWriter-append");
    writer.setResource(new FileSystemResource(tempFile));
    assertNotNull(writer);
    writer.open(ec);

    record.put("id", 2);
    record.put("name", "Joe");
    writer.write(list);
    writer.update(ec);
    assertEquals(position, ec.getLong("BeanIOFlatFileItemWriter.current.count"));

    writer.close();
    assertFileMatches("out2.txt", tempFile);

    // test restart
    writer = (BeanIOFlatFileItemWriter<Map<String, Object>>) context.getBean("itemWriter-standalone");
    writer.setResource(new FileSystemResource(tempFile));
    assertNotNull(writer);
    writer.open(ec);
    record.put("id", 3);
    record.put("name", "Kevin");
    writer.write(list);
    writer.update(ec);
    assertTrue(ec.getLong("BeanIOFlatFileItemWriter.current.count") > position);

    writer.close();
    assertFileMatches("out3.txt", tempFile);
}

From source file:fr.acxio.tools.agia.item.database.MapJdbcLookupProcessor.java

@Override
public Map<String, Object> process(Map<String, Object> sItem) throws Exception {
    Map<String, Object> aResult = null;
    if ((sItem != null) && !sItem.isEmpty()) {
        aResult = new HashMap<String, Object>(sItem);

        preparedStatementSetter.updateContext(aResult);
        jdbcCursorItemReader.open(new ExecutionContext());
        Map<String, Object> aRecord = null;
        int aLkpIdx = 0;
        do {/*w  w  w.  j  av a 2 s  .  c  o  m*/
            aRecord = jdbcCursorItemReader.read();
            if (aRecord != null) {
                for (Entry<String, Object> aLookupCol : aRecord.entrySet()) {
                    aResult.put(String.format(lookupFieldFormat, aLkpIdx, aLookupCol.getKey()),
                            aLookupCol.getValue());
                }
                aLkpIdx++;
            }
        } while (aRecord != null);
        if (LOGGER.isDebugEnabled()) {
            LOGGER.debug("Lookup record(s) found : {}", aLkpIdx);
        }
        jdbcCursorItemReader.close();

    }
    return aResult;
}

From source file:fr.acxio.tools.agia.file.ExtendedMultiResourceItemReader.java

/**
 * Use the delegate to read the next item, jump to next resource if current
 * one is exhausted. Items are appended to the buffer.
 * /*  www. j ava  2s . com*/
 * @return next item from input
 */
private T readNextItem() throws Exception {

    T item = delegate.read();

    while (item == null) {

        currentResource++;

        if (currentResource >= resources.length) {
            return null;
        }

        delegate.close();
        delegate.setResource(resources[currentResource]);
        delegate.open(new ExecutionContext());

        item = delegate.read();
    }

    return item;
}

From source file:org.trpr.platform.batch.impl.spring.admin.repository.MapExecutionContextDao.java

private ExecutionContext copy(ExecutionContext original) {
    if (original == null)
        return null;
    Map<String, Object> m = new HashMap<String, Object>();
    for (java.util.Map.Entry<String, Object> me : original.entrySet()) {
        m.put(me.getKey(), me.getValue());
    }//from  ww  w .j a  v a  2  s.c om
    ExecutionContext copy = new ExecutionContext();
    Map<String, Object> map = serializer.deserialize(serializer.serialize(m));
    for (Map.Entry<String, Object> entry : map.entrySet()) {
        copy.put(entry.getKey(), entry.getValue());
    }
    return copy;
}

From source file:org.obiba.onyx.core.etl.participant.impl.AppointmentListUpdateListenerTest.java

@Test
public void testAfterUpdateCompleted() {
    Map<String, JobParameter> jobParameterMap = new HashMap<String, JobParameter>();
    jobParameterMap.put("date", new JobParameter(new Date()));
    JobInstance job = new JobInstance(1l, new JobParameters(jobParameterMap), "jobTest");
    StepExecution stepExecution = new StepExecution("compltion", new JobExecution(job));
    stepExecution.setExitStatus(ExitStatus.COMPLETED);
    ExecutionContext context = new ExecutionContext();
    context.put("fileName", "fileName.xls");
    stepExecution.setExecutionContext(context);

    appointmentManagementServiceMock.saveAppointmentUpdateStats((AppointmentUpdateStats) EasyMock.anyObject());

    replay(appointmentManagementServiceMock);
    appointmentListUpdateListener.afterUpdateCompleted(stepExecution);
    verify(appointmentManagementServiceMock);

}

From source file:io.spring.JobConfiguration.java

@Bean
public Partitioner partitioner() {
    return new Partitioner() {
        @Override// www .j a  va 2s  . c om
        public Map<String, ExecutionContext> partition(int gridSize) {

            Map<String, ExecutionContext> partitions = new HashMap<>(gridSize);

            for (int i = 0; i < GRID_SIZE; i++) {
                ExecutionContext context1 = new ExecutionContext();
                context1.put("partitionNumber", i);

                partitions.put("partition" + i, context1);
            }

            return partitions;
        }
    };
}

From source file:gemlite.core.internal.batch.ColumnRangePartitioner.java

public Map<String, ExecutionContext> partitionSybase(int gridSize) {
    //,???/*from   w w  w  .ja  v a  2 s.  c o m*/
    String countSql = "select count(1) from (" + table + ") GF_table_c";
    Long count = jdbcTemplate.queryForObject(countSql, Long.class);
    long targetSize = count / gridSize + 1;
    String firstPageSql = "select top " + targetSize + " * from (" + table + ") GF_table order by gf_rowid asc";
    String remainingPagesSql = "select top " + targetSize + " * from (" + table
            + ") GF_table where gf_rowid > ?" + " order by gf_rowid asc";
    Map<String, ExecutionContext> result = new HashMap<String, ExecutionContext>();
    PagingRowMapper rowCallback = new PagingRowMapper();
    while (index < count) {
        if (page == 0) {
            if (LogUtil.getLogger().isDebugEnabled()) {
                LogUtil.getLogger().debug("SQL used for partition first page: [" + firstPageSql + "]");
            }
            getJdbcTemplate().query(firstPageSql, rowCallback);

            //,?
            ExecutionContext value = new ExecutionContext();
            result.put("partition " + page, value);
            value.putString("min", startValue);
            value.putString("max", endValue);
        } else {
            if (LogUtil.getLogger().isDebugEnabled()) {
                LogUtil.getLogger()
                        .debug("SQL used for partition remaining pages: [" + remainingPagesSql + "]");
            }
            startValue = new String();
            getJdbcTemplate().query(remainingPagesSql, new String[] { endValue }, rowCallback);
            //,?
            ExecutionContext value = new ExecutionContext();
            result.put("partition " + page, value);
            value.putString("min", startValue);
            value.putString("max", endValue);
        }
        page++;
    }
    return result;
}

From source file:org.sift.batch.tuple.MultiThreadedMultiResourceItemReader.java

/**
 * Helper method to open the next available Resource for reading
 * @throws Exception in case of errors in opening the next available Resource
 */// ww  w  . j  a v  a  2  s  . com
private void openNextAvailableResource() throws Exception {
    this.currentResourceIndex += 1;
    if (this.currentResourceIndex >= this.getResources().length) { // all Resource instances have been read
        return;
    }
    this.delegate.close();
    this.delegate.setResource(this.getResources()[this.currentResourceIndex]);
    this.getDelegate().open(new ExecutionContext());
    LOGGER.info("Opened Resource {} for read. Resource Index, Resource Length is : ["
            + this.currentResourceIndex + "," + this.getResources().length + "]",
            this.getResources()[this.currentResourceIndex]);
}

From source file:org.beanio.spring.SpringTest.java

/**
 * Test BeanIO flat file writer for XML.
 *//*from w ww .  j a va 2 s .  co  m*/
@Test
@SuppressWarnings("unchecked")
public void testRestarbleXmlItemWriter() throws Exception {
    ExecutionContext ec = new ExecutionContext();

    File tempFile = File.createTempFile("beanio-", "xml");
    tempFile.deleteOnExit();

    BeanIOFlatFileItemWriter<Human> writer = (BeanIOFlatFileItemWriter<Human>) context
            .getBean("itemWriter-xml");
    writer.setResource(new FileSystemResource(tempFile));
    writer.open(ec);

    List<Human> list = new ArrayList<Human>();
    list.add(new Human(Human.FRIEND, "John", 'M'));
    writer.write(list);
    writer.update(ec);

    long position = ec.getLong("BeanIOFlatFileItemWriter.current.count");
    assertTrue(position > 0);

    list.clear();
    list.add(new Human(Human.COWORKER, "Mike", 'M'));
    list.add(new Human(Human.NEIGHBOR, "Steve", 'M'));
    writer.write(list);
    writer.close();
    assertFileMatches("xout1.xml", tempFile);

    // open for restart
    writer = (BeanIOFlatFileItemWriter<Human>) context.getBean("itemWriter-xml");
    writer.setResource(new FileSystemResource(tempFile));
    writer.open(ec);

    list.clear();
    list.add(new Human(Human.FRIEND, "Jen", 'F'));
    writer.write(list);

    writer.update(ec);
    writer.close();
    assertFileMatches("xout2.xml", tempFile);
}