List of usage examples for org.springframework.batch.item ExecutionContext ExecutionContext
public ExecutionContext()
From source file:cn.cuizuoli.appranking.batch.partition.FeedTypePartitioner.java
@Override public Map<String, ExecutionContext> partition(int gridSize) { Map<String, ExecutionContext> map = new HashMap<String, ExecutionContext>(); int i = 0;// w ww .j a va2s. c om for (FeedType feedType : FeedType.getFeedType(mediaType)) { if (mediaType == MediaType.GOOGLE) { for (Category category : Category.values()) { ExecutionContext context = new ExecutionContext(); context.put(FEED_TYPE, feedType.getCode()); context.put(CATEGORY, category.getCode()); DateTime datetime = DateTime.now(); context.put(HOUR, DateUtil.toHour(datetime)); context.put(DAY, DateUtil.toDay(datetime)); map.put(PARTITION_KEY + i + "." + feedType.getCode() + "." + category.getCode(), context); i++; } } else { for (Country country : Country.values()) { ExecutionContext context = new ExecutionContext(); context.put(COUNTRY, country.getCode()); context.put(FEED_TYPE, feedType.getCode()); DateTime datetime = DateTime.now(); context.put(HOUR, DateUtil.toHour(datetime)); context.put(DAY, DateUtil.toDay(datetime)); map.put(PARTITION_KEY + i + "." + feedType.getCode(), context); i++; } } } return map; }
From source file:de.mediait.batch.FlatFileItemReaderTest.java
@Test public void testFlatFileReader2() throws Exception { final FlatFileItemReader<String[]> reader = createFlatFileReader(',', '\''); reader.setResource(new ClassPathResource("csv-fix-samples/fixcomma.txt")); final ExecutionContext executionContext = new ExecutionContext(); reader.open(executionContext);//from w w w .j a v a 2 s . co m final String[] object = (String[]) reader.read(); reader.close(); assertArrayEquals(new String[] { "begin", "abc' \"d\" 'ef", "end" }, object); }
From source file:de.langmi.spring.batch.examples.readers.file.multiresourcepartitioner.CustomMultiResourcePartitioner.java
/** * Assign the filename of each of the injected resources to an * {@link ExecutionContext}./*w w w.j a v a 2 s . c om*/ * * @see Partitioner#partition(int) */ @Override public Map<String, ExecutionContext> partition(int gridSize) { Map<String, ExecutionContext> map = new HashMap<String, ExecutionContext>(gridSize); int i = 0; for (Resource resource : resources) { ExecutionContext context = new ExecutionContext(); Assert.state(resource.exists(), "Resource does not exist: " + resource); try { context.putString(inputKeyName, resource.getURL().toExternalForm()); context.put(outputKeyName, createOutputFilename(i, resource)); } catch (IOException e) { throw new IllegalArgumentException("File could not be located for: " + resource, e); } map.put(PARTITION_KEY + i, context); i++; } return map; }
From source file:fr.acxio.tools.agia.file.ExtendedMultiResourceItemReaderTest.java
@Test public void testReadResources() throws Exception { ExtendedMultiResourceItemReader<Resource> aReader = new ExtendedMultiResourceItemReader<Resource>(); IdentityResourceAwareItemReaderItemStream aDelegate = mock(IdentityResourceAwareItemReaderItemStream.class); aReader.setDelegate(aDelegate);//from ww w . ja v a 2s . c om Resource[] aResources = new Resource[2]; aResources[0] = mock(Resource.class); aResources[1] = mock(Resource.class); when(aResources[0].getFilename()).thenReturn("file1"); when(aResources[1].getFilename()).thenReturn("file2"); when(aDelegate.read()).thenReturn(aResources[0]).thenReturn(null).thenReturn(aResources[1]) .thenReturn(null); aReader.setResources(aResources); aReader.open(new ExecutionContext()); assertEquals(aResources[0], aReader.read()); assertEquals(aResources[1], aReader.read()); assertNull(aReader.read()); aReader.close(); verify(aDelegate, times(2)).open(any(ExecutionContext.class)); verify(aDelegate, times(2)).close(); verify(aDelegate, times(4)).read(); // 2 values + 2 nulls }
From source file:org.trpr.platform.batch.impl.spring.partitioner.SimpleRangePartitioner.java
public Map<String, ExecutionContext> partition(int gridSize) { Map<String, ExecutionContext> map = new HashMap<String, ExecutionContext>(gridSize); for (int i = 0; i < gridSize; i++) { ExecutionContext context = new ExecutionContext(); context.putInt(TOTAL_PARTITIIONS, gridSize); context.putInt(PARTITION_INDEX, i); map.put(PARTITION_KEY + i, context); }/* w w w . j a v a 2 s . c om*/ return map; }
From source file:de.langmi.spring.batch.examples.complex.support.CustomMultiResourcePartitioner.java
/** * Assign the filename of each of the injected resources to an * {@link ExecutionContext}.//from w w w . java 2s . c om * * @see Partitioner#partition(int) */ @Override public Map<String, ExecutionContext> partition(int gridSize) { Map<String, ExecutionContext> map = new HashMap<String, ExecutionContext>(gridSize); int i = 0; for (Resource resource : resources) { ExecutionContext context = new ExecutionContext(); Assert.state(resource.exists(), "Resource does not exist: " + resource); try { context.putString(keyName, resource.getURL().toExternalForm()); context.put("outputFileName", createOutputFilename(i, resource)); } catch (IOException e) { throw new IllegalArgumentException("File could not be located for: " + resource, e); } map.put(PARTITION_KEY + i, context); i++; } return map; }
From source file:org.opensourcebank.batch.partition.HazelcastMapPartitioner.java
public Map<String, ExecutionContext> partition(int gridSize) { Map<Long, Object> itemsMap = Hazelcast.getMap(mapName); Set<Long> itemsIds = itemsMap.keySet(); long min = 0; long max = 0; if (itemsIds.size() > 0) { min = Collections.min(itemsIds); max = Collections.max(itemsIds); }//from w w w . java2s.c om long targetSize = (max - min) / gridSize + 1; Map<String, ExecutionContext> result = new HashMap<String, ExecutionContext>(); int number = 0; long start = min; long end = start + targetSize - 1; while (start <= max) { ExecutionContext value = new ExecutionContext(); result.put("partition" + number, value); if (end >= max) { end = max; } value.putLong("fromId", start); value.putLong("toId", end); value.putString("mapName", mapName); start += targetSize; end += targetSize; number++; } return result; }
From source file:fr.acxio.tools.agia.alfresco.MultiLineNodeListItemReaderTest.java
@Before public void setUp() { ((ItemStream) multiLineNodeListItemReader).open(new ExecutionContext()); }
From source file:de.langmi.spring.batch.examples.readers.support.CompositeItemStreamReaderTest.java
@Test public void testRestart() throws Exception { // setup composite reader reader.setUnifyingMapper(new DefaultUnifyingStringItemsMapper()); reader.setItemReaderStreams(new ArrayList<ItemStreamReader<?>>() { {//from w w w .j a v a2 s. com add(createFlatFileItemReader(INPUT_FILE_1)); add(createFlatFileItemReader(INPUT_FILE_2)); } }); // open, provide "mock" ExecutionContext // fake restart scenario, it works because the name of the FlatFileItemReader // is set to its input_file int alreadyRead = 2; ExecutionContext ec = new ExecutionContext(); ec.put(INPUT_FILE_1 + "." + "read.count", alreadyRead); ec.put(INPUT_FILE_2 + "." + "read.count", alreadyRead); reader.open(ec); // read try { // this makes sure we test a restart scenario, first read item // should be alreadyRead, for my example files this is a "2" and // not the first line a "0" int count = alreadyRead; String line; while ((line = reader.read()) != null) { assertEquals(String.valueOf(count) + String.valueOf(count), line); count++; } // read count includes the alreadyRead items too assertEquals(EXPECTED_COUNT, count); } catch (Exception e) { throw e; } finally { reader.close(); } }
From source file:no.magott.training.ex2.CurrencyImportComponentTests.java
@Test public void canReadWithJdbcReader() throws Exception { dbReader.open(new ExecutionContext()); dbReader.read(); }