Example usage for org.springframework.scheduling.concurrent ThreadPoolTaskExecutor setCorePoolSize

List of usage examples for org.springframework.scheduling.concurrent ThreadPoolTaskExecutor setCorePoolSize

Introduction

In this page you can find the example usage for org.springframework.scheduling.concurrent ThreadPoolTaskExecutor setCorePoolSize.

Prototype

public void setCorePoolSize(int corePoolSize) 

Source Link

Document

Set the ThreadPoolExecutor's core pool size.

Usage

From source file:com.ciphertool.sentencebuilder.etl.importers.WordListImporterImplTest.java

@Test
public void testImportWordList_LeftoversFromBatch() {
    ThreadPoolTaskExecutor taskExecutorSpy = spy(new ThreadPoolTaskExecutor());
    taskExecutorSpy.setCorePoolSize(4);
    taskExecutorSpy.setMaxPoolSize(4);/*from w w  w  .j av a2s .  c o  m*/
    taskExecutorSpy.setQueueCapacity(100);
    taskExecutorSpy.setKeepAliveSeconds(1);
    taskExecutorSpy.setAllowCoreThreadTimeOut(true);
    taskExecutorSpy.initialize();

    WordListImporterImpl wordListImporterImpl = new WordListImporterImpl();
    wordListImporterImpl.setTaskExecutor(taskExecutorSpy);

    Field rowCountField = ReflectionUtils.findField(WordListImporterImpl.class, "rowCount");
    ReflectionUtils.makeAccessible(rowCountField);
    AtomicInteger rowCountFromObject = (AtomicInteger) ReflectionUtils.getField(rowCountField,
            wordListImporterImpl);

    assertEquals(0, rowCountFromObject.intValue());

    WordDao wordDaoMock = mock(WordDao.class);
    when(wordDaoMock.insertBatch(anyListOf(Word.class))).thenReturn(true);
    int persistenceBatchSizeToSet = 3;
    int concurrencyBatchSizeToSet = 2;

    wordListImporterImpl.setWordDao(wordDaoMock);
    wordListImporterImpl.setPersistenceBatchSize(persistenceBatchSizeToSet);
    wordListImporterImpl.setConcurrencyBatchSize(concurrencyBatchSizeToSet);

    Word word1 = new Word(new WordId("george", PartOfSpeechType.NOUN));
    Word word2 = new Word(new WordId("elmer", PartOfSpeechType.NOUN));
    Word word3 = new Word(new WordId("belden", PartOfSpeechType.NOUN));
    List<Word> wordsToReturn = new ArrayList<Word>();
    wordsToReturn.add(word1);
    wordsToReturn.add(word2);
    wordsToReturn.add(word3);
    PartOfSpeechFileParser fileParserMock = mock(PartOfSpeechFileParser.class);
    when(fileParserMock.parseFile()).thenReturn(wordsToReturn);

    wordListImporterImpl.setFileParser(fileParserMock);

    wordListImporterImpl.importWordList();

    rowCountFromObject = (AtomicInteger) ReflectionUtils.getField(rowCountField, wordListImporterImpl);

    assertEquals(3, rowCountFromObject.intValue());
    verify(wordDaoMock, times(2)).insertBatch(anyListOf(Word.class));
    verify(taskExecutorSpy, times(2)).execute(any(Runnable.class));
}

From source file:ch.rasc.wampspring.config.DefaultWampConfiguration.java

@Bean
public Executor clientInboundChannelExecutor() {
    ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
    executor.setThreadNamePrefix("wampClientInboundChannel-");
    executor.setCorePoolSize(Runtime.getRuntime().availableProcessors() * 2);
    executor.setMaxPoolSize(Integer.MAX_VALUE);
    executor.setKeepAliveSeconds(60);//from  w ww.  j av  a  2  s.c  o  m
    executor.setQueueCapacity(Integer.MAX_VALUE);
    executor.setAllowCoreThreadTimeOut(true);

    return executor;
}

From source file:ch.rasc.wampspring.config.DefaultWampConfiguration.java

@Bean
public Executor clientOutboundChannelExecutor() {
    ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
    executor.setThreadNamePrefix("wampClientOutboundChannel-");
    executor.setCorePoolSize(Runtime.getRuntime().availableProcessors() * 2);
    executor.setMaxPoolSize(Integer.MAX_VALUE);
    executor.setKeepAliveSeconds(60);/*from   w w w.j a  v  a  2 s . c  o  m*/
    executor.setQueueCapacity(Integer.MAX_VALUE);
    executor.setAllowCoreThreadTimeOut(true);

    return executor;
}

From source file:com.sinosoft.one.mvc.web.portal.impl.PortalBeanPostProcessor.java

public Object postProcessBeforeInitialization(Object bean, String beanName) throws BeansException {

    if (applicationContext instanceof WebApplicationContext) {
        WebApplicationContext webApplicationContext = (WebApplicationContext) applicationContext;
        if (ThreadPoolTaskExecutor.class == bean.getClass()) {
            ThreadPoolTaskExecutor executor = (ThreadPoolTaskExecutor) bean;
            String paramCorePoolSize = webApplicationContext.getServletContext()
                    .getInitParameter(PORTAL_EXECUTOR_CORE_POOL_SIZE);
            if (StringUtils.isNotBlank(paramCorePoolSize)) {
                if (logger.isInfoEnabled()) {
                    logger.info("found param " + PORTAL_EXECUTOR_CORE_POOL_SIZE + "=" + paramCorePoolSize);
                }//from   ww w  . j  a va 2  s.  co  m
                executor.setCorePoolSize(Integer.parseInt(paramCorePoolSize));
            } else {
                throw new IllegalArgumentException(
                        "please add '<context-param><param-name>portalExecutorCorePoolSize</param-name><param-value>a number here</param-value></context-param>' in your web.xml");
            }
            String paramMaxPoolSize = webApplicationContext.getServletContext()
                    .getInitParameter(PORTAL_EXECUTOR_MAX_POOL_SIZE);
            if (StringUtils.isNotBlank(paramMaxPoolSize)) {
                if (logger.isInfoEnabled()) {
                    logger.info("found param " + PORTAL_EXECUTOR_MAX_POOL_SIZE + "=" + paramMaxPoolSize);
                }
                executor.setMaxPoolSize(Integer.parseInt(paramMaxPoolSize));
            }
            String paramKeepAliveSeconds = webApplicationContext.getServletContext()
                    .getInitParameter(PORTAL_EXECUTOR_KEEP_ALIVE_SECONDS);
            if (StringUtils.isNotBlank(paramKeepAliveSeconds)) {
                if (logger.isInfoEnabled()) {
                    logger.info(
                            "found param " + PORTAL_EXECUTOR_KEEP_ALIVE_SECONDS + "=" + paramKeepAliveSeconds);
                }
                executor.setKeepAliveSeconds(Integer.parseInt(paramKeepAliveSeconds));
            }
        } else if (List.class.isInstance(bean) && "portalListenerList".equals(beanName)) {
            String paramListeners = webApplicationContext.getServletContext()
                    .getInitParameter(PORTAL_LISTENERS);
            @SuppressWarnings("unchecked")
            List<WindowListener> list = (List<WindowListener>) bean;
            if (StringUtils.isNotBlank(paramListeners)) {
                String[] splits = paramListeners.split(",| ");
                if (logger.isInfoEnabled()) {
                    logger.info("found portalListener config: " + Arrays.toString(splits));
                }
                for (String className : splits) {
                    className = className.trim();
                    if (className.length() > 0) {
                        try {
                            Class<?> clazz = Class.forName(className);
                            WindowListener l = (WindowListener) BeanUtils.instantiateClass(clazz);
                            list.add(l);
                            if (logger.isInfoEnabled()) {
                                logger.info("add portalListener: " + l);
                            }
                        } catch (Exception e) {
                            logger.error("", e);
                        }
                    }
                }
            }
        }
    }
    return bean;
}

From source file:com.laxser.blitz.web.portal.impl.PortalBeanPostProcessor.java

@Override
public Object postProcessBeforeInitialization(Object bean, String beanName) throws BeansException {

    if (applicationContext instanceof WebApplicationContext) {
        WebApplicationContext webApplicationContext = (WebApplicationContext) applicationContext;
        if (ThreadPoolTaskExecutor.class == bean.getClass()) {
            ThreadPoolTaskExecutor executor = (ThreadPoolTaskExecutor) bean;
            String paramCorePoolSize = webApplicationContext.getServletContext()
                    .getInitParameter(PORTAL_EXECUTOR_CORE_POOL_SIZE);
            if (StringUtils.isNotBlank(paramCorePoolSize)) {
                if (logger.isInfoEnabled()) {
                    logger.info("found param " + PORTAL_EXECUTOR_CORE_POOL_SIZE + "=" + paramCorePoolSize);
                }//w  w  w.  j a v  a2 s  .c o m
                executor.setCorePoolSize(Integer.parseInt(paramCorePoolSize));
            } else {
                throw new IllegalArgumentException(
                        "please add '<context-param><param-name>portalExecutorCorePoolSize</param-name><param-value>a number here</param-value></context-param>' in your web.xml");
            }
            String paramMaxPoolSize = webApplicationContext.getServletContext()
                    .getInitParameter(PORTAL_EXECUTOR_MAX_POOL_SIZE);
            if (StringUtils.isNotBlank(paramMaxPoolSize)) {
                if (logger.isInfoEnabled()) {
                    logger.info("found param " + PORTAL_EXECUTOR_MAX_POOL_SIZE + "=" + paramMaxPoolSize);
                }
                executor.setMaxPoolSize(Integer.parseInt(paramMaxPoolSize));
            }
            String paramKeepAliveSeconds = webApplicationContext.getServletContext()
                    .getInitParameter(PORTAL_EXECUTOR_KEEP_ALIVE_SECONDS);
            if (StringUtils.isNotBlank(paramKeepAliveSeconds)) {
                if (logger.isInfoEnabled()) {
                    logger.info(
                            "found param " + PORTAL_EXECUTOR_KEEP_ALIVE_SECONDS + "=" + paramKeepAliveSeconds);
                }
                executor.setKeepAliveSeconds(Integer.parseInt(paramKeepAliveSeconds));
            }
        } else if (List.class.isInstance(bean) && "portalListenerList".equals(beanName)) {
            String paramListeners = webApplicationContext.getServletContext()
                    .getInitParameter(PORTAL_LISTENERS);
            @SuppressWarnings("unchecked")
            List<WindowListener> list = (List<WindowListener>) bean;
            if (StringUtils.isNotBlank(paramListeners)) {
                String[] splits = paramListeners.split(",| ");
                if (logger.isInfoEnabled()) {
                    logger.info("found portalListener config: " + Arrays.toString(splits));
                }
                for (String className : splits) {
                    className = className.trim();
                    if (className.length() > 0) {
                        try {
                            Class<?> clazz = Class.forName(className);
                            WindowListener l = (WindowListener) BeanUtils.instantiateClass(clazz);
                            list.add(l);
                            if (logger.isInfoEnabled()) {
                                logger.info("add portalListener: " + l);
                            }
                        } catch (Exception e) {
                            logger.error("", e);
                        }
                    }
                }
            }
        }
    }
    return bean;
}

From source file:com.bt.aloha.batchtest.PerformanceBatchTest.java

private void restartSimpleSipStack(int numberSimpleSipThreads) {
    ThreadPoolTaskExecutor threadPoolTaskExecutor = (ThreadPoolTaskExecutor) applicationContext
            .getBean("taskExecutor");
    threadPoolTaskExecutor.destroy();//from  ww  w  .j ava2 s.c om
    threadPoolTaskExecutor.setCorePoolSize(numberSimpleSipThreads);
    threadPoolTaskExecutor.initialize();
}

From source file:com.ethlo.geodata.GeodataServiceImpl.java

public void load() {
    ensureBaseDirectory();//  w  ww  .  j  a v  a2s .c  o  m

    final SourceDataInfoSet sourceInfo = geoMetaService.getSourceDataInfo();
    if (sourceInfo.isEmpty()) {
        logger.error(
                "Cannot start geodata server as there is no data. Please run with 'update' parameter to import data");
        System.exit(1);
    }

    loadHierarchy();

    final ThreadPoolTaskExecutor taskExecutor = new ThreadPoolTaskExecutor();
    taskExecutor.setCorePoolSize(3);
    taskExecutor.setThreadNamePrefix("data-loading-");
    taskExecutor.initialize();

    taskExecutor.execute(this::loadLocations);
    taskExecutor.execute(this::loadMbr);
    taskExecutor.execute(this::loadIps);

    taskExecutor.setAwaitTerminationSeconds(Integer.MAX_VALUE);
    taskExecutor.setWaitForTasksToCompleteOnShutdown(true);
    taskExecutor.shutdown();

    //final ResultSet<GeoLocation> result = geoNamesRepository.retrieve(QueryFactory.equal(CqGeonamesRepository.ATTRIBUTE_FEATURE_CODE, "ADM1"));
    //result.forEach(this::connectAdm1WithCountry);

    publisher.publishEvent(new DataLoadedEvent(this, DataType.ALL, Operation.LOAD, 1, 1));
}

From source file:com.ciphertool.sentencebuilder.etl.importers.FrequencyListImporterImplTest.java

@Test
public void testImportFrequencyList() {
    ThreadPoolTaskExecutor taskExecutorSpy = spy(new ThreadPoolTaskExecutor());
    taskExecutorSpy.setCorePoolSize(4);
    taskExecutorSpy.setMaxPoolSize(4);//from w w  w.java2s  . c o m
    taskExecutorSpy.setQueueCapacity(100);
    taskExecutorSpy.setKeepAliveSeconds(1);
    taskExecutorSpy.setAllowCoreThreadTimeOut(true);
    taskExecutorSpy.initialize();

    FrequencyListImporterImpl frequencyListImporterImpl = new FrequencyListImporterImpl();
    frequencyListImporterImpl.setTaskExecutor(taskExecutorSpy);

    Field rowUpdateCountField = ReflectionUtils.findField(FrequencyListImporterImpl.class, "rowUpdateCount");
    ReflectionUtils.makeAccessible(rowUpdateCountField);
    AtomicInteger rowUpdateCountFromObject = (AtomicInteger) ReflectionUtils.getField(rowUpdateCountField,
            frequencyListImporterImpl);

    assertEquals(0, rowUpdateCountFromObject.intValue());

    Field rowInsertCountField = ReflectionUtils.findField(FrequencyListImporterImpl.class, "rowInsertCount");
    ReflectionUtils.makeAccessible(rowInsertCountField);
    AtomicInteger rowInsertCountFromObject = (AtomicInteger) ReflectionUtils.getField(rowInsertCountField,
            frequencyListImporterImpl);

    assertEquals(0, rowInsertCountFromObject.intValue());

    WordDao wordDaoMock = mock(WordDao.class);
    when(wordDaoMock.insertBatch(anyListOf(Word.class))).thenReturn(true);
    int persistenceBatchSizeToSet = 2;
    int concurrencyBatchSizeToSet = 2;

    frequencyListImporterImpl.setWordDao(wordDaoMock);
    frequencyListImporterImpl.setPersistenceBatchSize(persistenceBatchSizeToSet);
    frequencyListImporterImpl.setConcurrencyBatchSize(concurrencyBatchSizeToSet);

    Word word1 = new Word(new WordId("george", PartOfSpeechType.NOUN), 100);
    Word word2 = new Word(new WordId("belden", PartOfSpeechType.NOUN), 200);
    Word word3 = new Word(new WordId("is", PartOfSpeechType.VERB_PARTICIPLE), 300);
    Word word4 = new Word(new WordId("awesome", PartOfSpeechType.ADJECTIVE), 400);
    List<Word> wordsToReturn = new ArrayList<Word>();
    wordsToReturn.add(word1);
    wordsToReturn.add(word2);
    wordsToReturn.add(word3);
    wordsToReturn.add(word4);
    FrequencyFileParser fileParserMock = mock(FrequencyFileParser.class);
    when(fileParserMock.parseFile()).thenReturn(wordsToReturn);

    frequencyListImporterImpl.setFileParser(fileParserMock);

    Word wordFromDatabase1 = new Word(new WordId("george", PartOfSpeechType.NOUN));
    Word wordFromDatabase2 = new Word(new WordId("belden", PartOfSpeechType.NOUN));

    when(wordDaoMock.insertBatch(anyListOf(Word.class))).thenReturn(true);
    when(wordDaoMock.updateBatch(anyListOf(Word.class))).thenReturn(true);
    when(wordDaoMock.findByWordString(eq("george"))).thenReturn(Arrays.asList(wordFromDatabase1));
    when(wordDaoMock.findByWordString(eq("belden"))).thenReturn(Arrays.asList(wordFromDatabase2));
    when(wordDaoMock.findByWordString(eq("is"))).thenReturn(null);
    when(wordDaoMock.findByWordString(eq("awesome"))).thenReturn(null);

    frequencyListImporterImpl.importFrequencyList();

    assertEquals(100, wordFromDatabase1.getFrequencyWeight());
    assertEquals(200, wordFromDatabase2.getFrequencyWeight());

    rowUpdateCountFromObject = (AtomicInteger) ReflectionUtils.getField(rowUpdateCountField,
            frequencyListImporterImpl);
    rowInsertCountFromObject = (AtomicInteger) ReflectionUtils.getField(rowInsertCountField,
            frequencyListImporterImpl);

    assertEquals(2, rowUpdateCountFromObject.intValue());
    assertEquals(2, rowInsertCountFromObject.intValue());
    verify(wordDaoMock, times(1)).insertBatch(anyListOf(Word.class));
    verify(wordDaoMock, times(1)).updateBatch(anyListOf(Word.class));
    verify(wordDaoMock, times(4)).findByWordString(anyString());
    verify(taskExecutorSpy, times(2)).execute(any(Runnable.class));
}

From source file:com.ciphertool.sentencebuilder.etl.importers.FrequencyListImporterImplTest.java

@Test
public void testImportFrequencyList_LeftoversFromBatch() {
    ThreadPoolTaskExecutor taskExecutorSpy = spy(new ThreadPoolTaskExecutor());
    taskExecutorSpy.setCorePoolSize(4);
    taskExecutorSpy.setMaxPoolSize(4);// ww  w.ja v a 2s  .c o m
    taskExecutorSpy.setQueueCapacity(100);
    taskExecutorSpy.setKeepAliveSeconds(1);
    taskExecutorSpy.setAllowCoreThreadTimeOut(true);
    taskExecutorSpy.initialize();

    FrequencyListImporterImpl frequencyListImporterImpl = new FrequencyListImporterImpl();
    frequencyListImporterImpl.setTaskExecutor(taskExecutorSpy);

    Field rowUpdateCountField = ReflectionUtils.findField(FrequencyListImporterImpl.class, "rowUpdateCount");
    ReflectionUtils.makeAccessible(rowUpdateCountField);
    AtomicInteger rowUpdateCountFromObject = (AtomicInteger) ReflectionUtils.getField(rowUpdateCountField,
            frequencyListImporterImpl);

    assertEquals(0, rowUpdateCountFromObject.intValue());

    Field rowInsertCountField = ReflectionUtils.findField(FrequencyListImporterImpl.class, "rowInsertCount");
    ReflectionUtils.makeAccessible(rowInsertCountField);
    AtomicInteger rowInsertCountFromObject = (AtomicInteger) ReflectionUtils.getField(rowInsertCountField,
            frequencyListImporterImpl);

    assertEquals(0, rowInsertCountFromObject.intValue());

    WordDao wordDaoMock = mock(WordDao.class);
    when(wordDaoMock.insertBatch(anyListOf(Word.class))).thenReturn(true);
    int persistenceBatchSizeToSet = 3;
    int concurrencyBatchSizeToSet = 2;

    frequencyListImporterImpl.setWordDao(wordDaoMock);
    frequencyListImporterImpl.setPersistenceBatchSize(persistenceBatchSizeToSet);
    frequencyListImporterImpl.setConcurrencyBatchSize(concurrencyBatchSizeToSet);

    Word word1 = new Word(new WordId("george", PartOfSpeechType.NOUN), 100);
    Word word2 = new Word(new WordId("belden", PartOfSpeechType.NOUN), 200);
    Word word3 = new Word(new WordId("is", PartOfSpeechType.VERB_PARTICIPLE), 300);
    Word word4 = new Word(new WordId("super", PartOfSpeechType.ADJECTIVE), 400);
    Word word5 = new Word(new WordId("awesome", PartOfSpeechType.ADJECTIVE), 500);
    List<Word> wordsToReturn = new ArrayList<Word>();
    wordsToReturn.add(word1);
    wordsToReturn.add(word2);
    wordsToReturn.add(word3);
    wordsToReturn.add(word4);
    wordsToReturn.add(word5);
    FrequencyFileParser fileParserMock = mock(FrequencyFileParser.class);
    when(fileParserMock.parseFile()).thenReturn(wordsToReturn);

    frequencyListImporterImpl.setFileParser(fileParserMock);

    Word wordFromDatabase1 = new Word(new WordId("george", PartOfSpeechType.NOUN));
    Word wordFromDatabase2 = new Word(new WordId("belden", PartOfSpeechType.NOUN));
    Word wordFromDatabase3 = new Word(new WordId("is", PartOfSpeechType.ADJECTIVE));

    when(wordDaoMock.insertBatch(anyListOf(Word.class))).thenReturn(true);
    when(wordDaoMock.updateBatch(anyListOf(Word.class))).thenReturn(true);
    when(wordDaoMock.findByWordString(eq("george"))).thenReturn(Arrays.asList(wordFromDatabase1));
    when(wordDaoMock.findByWordString(eq("belden"))).thenReturn(Arrays.asList(wordFromDatabase2));
    when(wordDaoMock.findByWordString(eq("is"))).thenReturn(

            Arrays.asList(wordFromDatabase3));
    when(wordDaoMock.findByWordString(eq("super"))).thenReturn(null);
    when(wordDaoMock.findByWordString(eq("seriously"))).thenReturn(null);
    when(wordDaoMock.findByWordString(eq("awesome"))).thenReturn(null);

    frequencyListImporterImpl.importFrequencyList();

    assertEquals(100, wordFromDatabase1.getFrequencyWeight());
    assertEquals(200, wordFromDatabase2.getFrequencyWeight());
    assertEquals(300, wordFromDatabase3.getFrequencyWeight());

    rowUpdateCountFromObject = (AtomicInteger) ReflectionUtils.getField(rowUpdateCountField,
            frequencyListImporterImpl);
    rowInsertCountFromObject = (AtomicInteger) ReflectionUtils.getField(rowInsertCountField,
            frequencyListImporterImpl);

    assertEquals(3, rowUpdateCountFromObject.intValue());
    assertEquals(2, rowInsertCountFromObject.intValue());
    verify(wordDaoMock, times(2)).insertBatch(anyListOf(Word.class));
    verify(wordDaoMock, times(2)).updateBatch(anyListOf(Word.class));
    verify(wordDaoMock, times(5)).findByWordString(anyString());
    verify(taskExecutorSpy, times(3)).execute(any(Runnable.class));
}

From source file:org.eclipse.gemini.blueprint.extender.internal.support.ExtenderConfiguration.java

private TaskExecutor createDefaultShutdownTaskExecutor() {
    ThreadPoolTaskExecutor taskExecutor = new ThreadPoolTaskExecutor();
    taskExecutor.setDaemon(true);//  www  .  j  a  v  a 2 s .  c o m
    taskExecutor.setCorePoolSize(2);
    taskExecutor.setMaxPoolSize(3);
    taskExecutor.setThreadNamePrefix("Gemini Blueprint context shutdown thread");
    taskExecutor.afterPropertiesSet();
    isShutdownTaskExecutorManagedInternally = true;
    return taskExecutor;
}