Example usage for org.springframework.batch.core.scope.context ChunkContext getStepContext

List of usage examples for org.springframework.batch.core.scope.context ChunkContext getStepContext

Introduction

In this page you can find the example usage for org.springframework.batch.core.scope.context ChunkContext getStepContext.

Prototype

public StepContext getStepContext() 

Source Link

Usage

From source file:org.obiba.onyx.core.etl.participant.impl.ArchiveAppointmentFileTasklet.java

public RepeatStatus execute(StepContribution stepContribution, ChunkContext context) throws Exception {

    if (getInputDirectory() != null && getInputDirectory().getFile() != null) {
        for (File file : getInputDirectory().getFile().listFiles(getFilter())) {
            archiveFile(file, context.getStepContext().getStepExecution().getExecutionContext());
        }//from   w  w w.ja  v a2  s  . c  o  m
    }

    return RepeatStatus.FINISHED;
}

From source file:org.cloudfoundry.identity.uaa.scim.job.GenericSqlTasklet.java

@Override
public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception {
    if (sql == null) {
        return RepeatStatus.FINISHED;
    }// w w  w.java  2s.  c  om
    if (sql.toLowerCase().startsWith("select")) {
        List<Map<String, Object>> list = jdbcTemplate.queryForList(sql);
        String result = list.toString();
        chunkContext.getStepContext().getStepExecution().getJobExecution().getExecutionContext().put("result",
                result.substring(0, Math.min(result.length(), 4096)));
    } else {
        int updated = jdbcTemplate.update(sql);
        contribution.incrementWriteCount(updated);
    }
    return RepeatStatus.FINISHED;
}

From source file:fr.acxio.tools.agia.file.pdf.SplitPDFTasklet.java

@Override
public RepeatStatus execute(StepContribution sContribution, ChunkContext sChunkContext) throws Exception {
    Map<String, Object> aSourceParams = new HashMap<String, Object>();
    aSourceParams.put(ResourceFactoryConstants.PARAM_STEP_EXEC,
            ((sChunkContext != null) && (sChunkContext.getStepContext() != null))
                    ? sChunkContext.getStepContext().getStepExecution()
                    : null);/*w ww .  j  a  va 2 s. c  om*/
    Resource[] aSourceResources = sourceFactory.getResources(aSourceParams);

    if (LOGGER.isInfoEnabled()) {
        LOGGER.info("{} file(s) to split", aSourceResources.length);
    }

    for (Resource aSourceResource : aSourceResources) {
        if (sContribution != null) {
            sContribution.incrementReadCount();
        }
        File aOriginFile = aSourceResource.getFile();
        if (aOriginFile.exists()) {
            int aOutputCount = splitFile(aSourceResource, sChunkContext);
            if (sContribution != null) {
                sContribution.incrementWriteCount(aOutputCount);
            }
        } else {
            throw new SplitPDFException("File not found: " + aOriginFile);
        }
    }

    return RepeatStatus.FINISHED;
}

From source file:fr.acxio.tools.agia.file.pdf.SplitPDFTasklet.java

private int splitFile(Resource sSourceResource, ChunkContext sChunkContext) throws Exception {
    Map<String, Object> aDestinationParams = new HashMap<String, Object>();
    aDestinationParams.put(ResourceFactoryConstants.PARAM_SOURCE, sSourceResource);
    aDestinationParams.put(ResourceFactoryConstants.PARAM_STEP_EXEC,
            ((sChunkContext != null) && (sChunkContext.getStepContext() != null))
                    ? sChunkContext.getStepContext().getStepExecution()
                    : null);/*from w w w . ja  va 2  s  .  c om*/
    Resource aDestination = null;
    int aResult = 0;

    PDDocumentContainer aDocumentContainer = null;

    try {
        aDocumentContainer = documentFactory.getDocument(sSourceResource.getFile());
        List<PDDocument> documents = aDocumentContainer.getParts();

        for (int i = 0; i < documents.size(); i++) {
            PDDocument doc = documents.get(i);
            // Output file factory
            int aTryCount = 10;
            do {
                aDestination = destinationFactory.getResource(aDestinationParams);
                aTryCount--;
            } while (!forceReplace && (aTryCount > 0) && (aDestination != null) && aDestination.exists());
            if ((aTryCount == 0) && !forceReplace) {
                throw new SplitPDFException("Cannot create a new destination filename");
            }
            if (aDestination != null) {
                if (aDestination.exists() && LOGGER.isWarnEnabled()) {
                    LOGGER.warn("Replacing {}", aDestination.getFile().getAbsolutePath());
                }
                writeDocument(doc, aDestination.getFile().getAbsolutePath());
                doc.close();
            } else {
                throw new SplitPDFException("No destination specified");
            }
            aResult++;
        }

    } finally {
        if (aDocumentContainer != null) {
            aDocumentContainer.close();
        }
    }
    return aResult;
}

From source file:com.gopivotal.spring.xd.module.jdbc.JdbcTasklet.java

private String runScripts(ChunkContext chunkContext, Iterable<Resource> scripts, String encoding)
        throws Exception {

    Assert.notNull(chunkContext, "a valid instance is required");

    StringBuffer messages = new StringBuffer();

    if (scripts != null) {
        for (Resource resource : scripts) {
            String sqlCommand = scriptToString(resource, encoding);
            String msg = runCommand(chunkContext.getStepContext(), sqlCommand);
            messages.append(msg).append("\n");
        }//ww  w  . ja va  2  s.c o  m
    }

    return messages.toString();
}

From source file:com.github.jrrdev.mantisbtsync.core.jobs.projects.tasklets.MantisLoginTasklet.java

/**
 * {@inheritDoc}//from   www.  j  a  v a2 s.c  o m
 * @see org.springframework.batch.core.step.tasklet.Tasklet#execute(org.springframework.batch.core.StepContribution, org.springframework.batch.core.scope.context.ChunkContext)
 */
@Override
public RepeatStatus execute(final StepContribution contribution, final ChunkContext chunkContext)
        throws Exception {

    Assert.notNull(clientStub);

    // If auth manager is set, try to get the cookie
    if (authManager != null && authManager.getAuthCookie() != null) {
        clientStub._setProperty(HTTPConstants.HEADER_COOKIE, authManager.getAuthCookie());
    }

    final UserData data = clientStub.mc_login(userName, password);
    if (data != null && data.getAccess_level() != null) {
        chunkContext.getStepContext().getStepExecution().getExecutionContext().put("mantis.acess_level",
                data.getAccess_level());
    }

    return RepeatStatus.FINISHED;
}

From source file:org.obiba.onyx.core.purge.PurgeParticipantDataTasklet.java

public RepeatStatus execute(StepContribution stepContribution, ChunkContext context) {

    log.info("**** STARTING PARTICIPANT DATA PURGE ****");
    log.info("Current purge configuration is [{}] days",
            purgeParticipantDataService.getPurgeDataOlderThanInDays());
    long start = System.currentTimeMillis();

    List<Participant> participantsToBePurged = onyxDataPurge.getParticipantsToPurge();
    for (Participant participant : participantsToBePurged) {
        participantService.deleteParticipant(participant);
        log.info("Deleting Participant id = [{}] and related data :  ", participant.getId());
    }/*w w  w.  j  a  v a  2s .  c o  m*/

    context.getStepContext().getStepExecution().getJobExecution().getExecutionContext().put("totalDeleted",
            participantsToBePurged.size());

    long end = System.currentTimeMillis();

    log.info("A total of [{}] Participants were deleted in [{}] ms.", participantsToBePurged.size(),
            end - start);

    log.info("**** PARTICIPANT DATA PURGE COMPLETED ****");

    return RepeatStatus.FINISHED;
}

From source file:fr.acxio.tools.agia.tasks.ZipFilesTasklet.java

@Override
public RepeatStatus execute(StepContribution sContribution, ChunkContext sChunkContext) throws Exception {

    // 1. Destination exists
    //    a. Overwrite => default behaviour
    //    b. Update => copy to temporary file, open, read entries, merge with new entries, write merged entries and stream
    // 2. New destination => default behaviour

    Map<String, Object> aSourceParams = new HashMap<String, Object>();
    aSourceParams.put(ResourceFactoryConstants.PARAM_STEP_EXEC,
            ((sChunkContext != null) && (sChunkContext.getStepContext() != null))
                    ? sChunkContext.getStepContext().getStepExecution()
                    : null);//w ww .  j a  va 2  s.  com
    aSourceParams.put(ResourceFactoryConstants.PARAM_BASE_DIRECTORY, sourceBaseDirectory);
    Resource[] aSourceResources = sourceFactory.getResources(aSourceParams);
    Map<String, Object> aDestinationParams = new HashMap<String, Object>();

    if (LOGGER.isInfoEnabled()) {
        LOGGER.info("{} file(s) to zip", aSourceResources.length);
    }

    if (aSourceResources.length > 0) {

        aDestinationParams.put(ResourceFactoryConstants.PARAM_BASE_DIRECTORY, sourceBaseDirectory);
        aDestinationParams.put(ResourceFactoryConstants.PARAM_STEP_EXEC,
                ((sChunkContext != null) && (sChunkContext.getStepContext() != null))
                        ? sChunkContext.getStepContext().getStepExecution()
                        : null);
        Resource aDestination = destinationFactory.getResource(aDestinationParams);

        ZipArchiveOutputStream aZipArchiveOutputStream = null;
        try {
            aZipArchiveOutputStream = new ZipArchiveOutputStream(aDestination.getFile());

            sourceBaseDirectoryPath = sourceBaseDirectory.getFile().getCanonicalPath();

            for (Resource aSourceResource : aSourceResources) {
                zipResource(aSourceResource, aZipArchiveOutputStream, sContribution, sChunkContext);
            }
        } finally {
            if (aZipArchiveOutputStream != null) {
                aZipArchiveOutputStream.finish();
                aZipArchiveOutputStream.close();
            }
        }
    }

    return RepeatStatus.FINISHED;
}

From source file:fr.acxio.tools.agia.tasks.FilesOperationTaskletTest.java

@Test
public void testExecuteCopyWithChunkContext() throws Exception {
    FilesOperationTasklet aTasklet = new FilesOperationTasklet();
    ResourcesFactory aSourceFactory = mock(ResourcesFactory.class);
    Resource aFileResource1 = mock(Resource.class);
    when(aFileResource1.getFile()).thenReturn(new File("src/test/resources/testFiles/input.csv"));
    when(aFileResource1.exists()).thenReturn(true);
    when(aSourceFactory.getResources(anyMapOf(Object.class, Object.class)))
            .thenReturn(new Resource[] { aFileResource1 });
    ResourceFactory aDestinationFactory = mock(ResourceFactory.class);
    Resource aDestResource = mock(Resource.class);
    when(aDestResource.getFile()).thenReturn(new File("target/CP-input.csv"));
    when(aDestResource.exists()).thenReturn(false);
    Resource aRelativeResource = mock(Resource.class);
    when(aRelativeResource.getFile()).thenReturn(new File("target"));
    when(aDestResource.createRelative("/.")).thenReturn(aRelativeResource);
    when(aDestinationFactory.getResource(anyMapOf(Object.class, Object.class))).thenReturn(aDestResource);
    assertFalse(aDestResource.getFile().exists());
    aTasklet.setSourceFactory(aSourceFactory);
    aTasklet.setDestinationFactory(aDestinationFactory);
    aTasklet.setOperation(Operation.COPY);
    aTasklet.afterPropertiesSet();/*from  w  w  w .  ja  v a  2s. c  o m*/
    ChunkContext aChunkContext = mock(ChunkContext.class);
    StepContext aStepContext = mock(StepContext.class);
    when(aChunkContext.getStepContext()).thenReturn(aStepContext);
    assertEquals(RepeatStatus.FINISHED, aTasklet.execute(null, aChunkContext));
    verify(aChunkContext, times(2)).getStepContext();
    verify(aStepContext, times(1)).getStepExecution();
    assertTrue(aDestResource.getFile().exists());
}