Example usage for org.apache.commons.pool2.impl GenericObjectPoolConfig setMaxTotal

List of usage examples for org.apache.commons.pool2.impl GenericObjectPoolConfig setMaxTotal

Introduction

In this page you can find the example usage for org.apache.commons.pool2.impl GenericObjectPoolConfig setMaxTotal.

Prototype

public void setMaxTotal(int maxTotal) 

Source Link

Document

Set the value for the maxTotal configuration attribute for pools created with this configuration instance.

Usage

From source file:de.qucosa.servlet.MetsDisseminatorServlet.java

@Override
public void init() throws ServletException {
    super.init();

    startupProperties = new PropertyCollector().source(getServletContext()).source(System.getProperties())
            .collect();/*from ww  w  . java 2  s  . c o  m*/

    final FedoraClientFactory fedoraClientFactory = attemptToCreateFedoraClientFactoryFrom(startupProperties);

    if (fedoraClientFactory == null) {
        // we need a client factory for startup connections
        log.warn("Fedora connection credentials not configured. No connection pooling possible.");
    } else {
        final GenericObjectPoolConfig poolConfig = new GenericObjectPoolConfig();
        poolConfig.setMaxTotal(
                Integer.parseInt(startupProperties.getProperty(PROP_FEDORA_CONNECTIONPOOL_MAXSIZE, "20")));
        poolConfig.setMinIdle(5);
        poolConfig.setMinEvictableIdleTimeMillis(TimeUnit.MINUTES.toMillis(1));

        fedoraClientPool = new GenericObjectPool<>(fedoraClientFactory, poolConfig);

        log.info("Initialized Fedora connection pool.");
    }

    cacheManager = CacheManager.newInstance();
    cache = cacheManager.getCache("dscache");
}

From source file:gobblin.metrics.kafka.KafkaAvroSchemaRegistry.java

/**
 * @param properties properties should contain property "kafka.schema.registry.url", and optionally
 * "kafka.schema.registry.max.cache.size" (default = 1000) and
 * "kafka.schema.registry.cache.expire.after.write.min" (default = 10).
 *//*from  w  w w  . j  a va  2 s  .  co m*/
public KafkaAvroSchemaRegistry(Properties props) {
    super(props);
    Preconditions.checkArgument(props.containsKey(KAFKA_SCHEMA_REGISTRY_URL),
            String.format("Property %s not provided.", KAFKA_SCHEMA_REGISTRY_URL));

    this.url = props.getProperty(KAFKA_SCHEMA_REGISTRY_URL);

    int objPoolSize = Integer
            .parseInt(props.getProperty(ConfigurationKeys.KAFKA_SOURCE_WORK_UNITS_CREATION_THREADS,
                    "" + ConfigurationKeys.KAFKA_SOURCE_WORK_UNITS_CREATION_DEFAULT_THREAD_COUNT));
    LOG.info("Create HttpClient pool with size " + objPoolSize);

    GenericObjectPoolConfig config = new GenericObjectPoolConfig();
    config.setMaxTotal(objPoolSize);
    config.setMaxIdle(objPoolSize);
    this.httpClientPool = new GenericObjectPool<>(new HttpClientFactory(), config);
}

From source file:com.lambdaworks.redis.RedisConnectionPool.java

/**
 * Create a new connection pool//from w  w  w  . j a  v a  2s.co  m
 * 
 * @param redisConnectionProvider the connection provider
 * @param maxActive max active connections
 * @param maxIdle max idle connections
 * @param maxWait max wait time (ms) for a connection
 */
public RedisConnectionPool(RedisConnectionProvider<T> redisConnectionProvider, int maxActive, int maxIdle,
        long maxWait) {
    this.redisConnectionProvider = redisConnectionProvider;

    GenericObjectPoolConfig config = new GenericObjectPoolConfig();
    config.setMaxIdle(maxIdle);
    config.setMaxTotal(maxActive);
    config.setMaxWaitMillis(maxWait);
    config.setTestOnBorrow(true);

    objectPool = new GenericObjectPool<>(createFactory(redisConnectionProvider), config);
}

From source file:com.streamsets.pipeline.stage.origin.udptokafka.KafkaUDPConsumer.java

public void init() {
    executorService = (ThreadPoolExecutor) Executors.newFixedThreadPool(udpConfigBean.concurrency);
    int max = udpConfigBean.concurrency;
    int minIdle = Math.max(1, udpConfigBean.concurrency / 4);
    int maxIdle = udpConfigBean.concurrency / 2;
    GenericObjectPoolConfig kakfaPoolConfig = new GenericObjectPoolConfig();
    kakfaPoolConfig.setMaxTotal(udpConfigBean.concurrency);
    kakfaPoolConfig.setMinIdle(minIdle);
    kakfaPoolConfig.setMaxIdle(maxIdle);
    LOG.debug("Creating Kafka producer pool with max '{}' minIdle '{}' maxIdle '{}'", max, minIdle, maxIdle);
    kafkaProducerPool = new GenericObjectPool<>(
            new SdcKafkaProducerPooledObjectFactory(kafkaTargetConfig, DataFormat.BINARY), kakfaPoolConfig);
    GenericObjectPoolConfig serializerPoolConfig = new GenericObjectPoolConfig();
    serializerPoolConfig.setMaxTotal(udpConfigBean.concurrency);
    serializerPoolConfig.setMinIdle(udpConfigBean.concurrency);
    serializerPoolConfig.setMaxIdle(udpConfigBean.concurrency);
    udpSerializerPool = new GenericObjectPool<>(new UDPMessageSerializerPooledObjectFactory(),
            serializerPoolConfig);/*  w  w w .  j a va  2 s  .  c  o  m*/
    udpType = UDP_DATA_FORMAT_MAP.get(udpConfigBean.dataFormat);
    LOG.debug("Started, concurrency '{}'", udpConfigBean.concurrency);
}

From source file:com.reydentx.core.client.MySQLClient.java

private void _initPool() {
    _clientFactory = new MySqlClientFactory(_url);
    _maxActive = 100;/*from  w w  w  .ja va  2  s .  c om*/
    _maxIdle = 10;
    _maxWaitTimeWhenExhausted = 5000L;

    GenericObjectPoolConfig poolConf = new GenericObjectPoolConfig();
    poolConf.setMaxIdle(_maxIdle);
    poolConf.setMaxTotal(_maxActive);
    poolConf.setMaxWaitMillis(_maxWaitTimeWhenExhausted);

    _pool = new GenericObjectPool<Connection>(_clientFactory, poolConf);
}

From source file:herddb.jdbc.BasicHerdDBDataSource.java

protected synchronized void ensureClient() throws SQLException {
    if (client == null) {
        ClientConfiguration clientConfiguration = new ClientConfiguration(properties);
        Properties propsNoPassword = new Properties(properties);
        if (propsNoPassword.contains("password")) {
            propsNoPassword.setProperty("password", "-------");
        }/*from w  ww.j av a 2  s  .  c o m*/
        LOGGER.log(Level.INFO, "Booting HerdDB Client, url:" + url + ", properties:" + propsNoPassword
                + " clientConfig " + clientConfiguration);
        clientConfiguration.readJdbcUrl(url);
        if (properties.containsKey("discoverTableSpaceFromQuery")) {
            this.discoverTableSpaceFromQuery = clientConfiguration.getBoolean("discoverTableSpaceFromQuery",
                    true);
        }
        client = new HDBClient(clientConfiguration);
    }
    if (pool == null) {
        if (properties.containsKey("maxActive")) {
            this.maxActive = Integer.parseInt(properties.get("maxActive").toString());
        }
        GenericObjectPoolConfig config = new GenericObjectPoolConfig();
        config.setBlockWhenExhausted(true);
        config.setMaxTotal(maxActive);
        config.setMaxIdle(maxActive);
        config.setMinIdle(maxActive / 2);
        config.setJmxNamePrefix("HerdDBClient");
        pool = new GenericObjectPool<>(new ConnectionsFactory(), config);
    }
}

From source file:com.hurence.logisland.processor.useragent.ParseUserAgent.java

@Override
public void init(final ProcessContext context) {
    super.init(context);
    getLogger().debug("Initializing User-Agent Processor");

    debug = context.getPropertyValue(DEBUG).asBoolean();
    userAgentField = context.getPropertyValue(USERAGENT_FIELD).asString();
    userAgentKeep = context.getPropertyValue(USERAGENT_KEEP).asBoolean();
    useCache = context.getPropertyValue(CACHE_ENABLED).asBoolean();
    cacheSize = context.getPropertyValue(CACHE_SIZE).asInteger();
    String tmp = context.getPropertyValue(FIELDS_TO_RETURN).asString();
    selectedFields = Arrays.asList(tmp.split(",")).stream().map(String::trim).collect(Collectors.toList());
    confidenceEnabled = context.getPropertyValue(CONFIDENCE_ENABLED).asBoolean();
    ambiguityEnabled = context.getPropertyValue(AMBIGUITY_ENABLED).asBoolean();

    if (debug) {//from w w w .j  a  va2 s  .com
        getLogger().info(KEY_USERAGENT_FIELD + "\t: " + userAgentField);
        getLogger().info(KEY_USERAGENT_KEEP + "\t: " + userAgentKeep);
        getLogger().info(KEY_DEBUG + "\t: " + debug);
        getLogger().info(KEY_CACHE_ENABLED + "\t: " + useCache);
        getLogger().info(KEY_CACHE_SIZE + "\t: " + cacheSize);
        getLogger().info(KEY_FIELDS_TO_RETURN + "\t: " + selectedFields);
        getLogger().info(KEY_CONFIDENCE_ENABLED + "\t: " + confidenceEnabled);
        getLogger().info(KEY_AMBIGUITY_ENABLED + "\t: " + ambiguityEnabled);
    }

    if (Singleton.get() == null) {
        getLogger().debug("Initializing UserAgentAnalyzerPool");
        synchronized (sync) {
            if (Singleton.get() == null) {

                GenericObjectPoolConfig config = new GenericObjectPoolConfig();

                //config.setMaxIdle(1);
                config.setMaxTotal(10);

                //TestOnBorrow=true --> To ensure that we get a valid object from pool
                //config.setTestOnBorrow(true);

                //TestOnReturn=true --> To ensure that valid object is returned to pool
                //config.setTestOnReturn(true);

                PooledUserAgentAnalyzerFactory factory = new PooledUserAgentAnalyzerFactory(selectedFields,
                        cacheSize);

                UserAgentAnalyzerPool pool = new UserAgentAnalyzerPool(factory, config);
                Singleton.set(pool);
            }
        }
    }
}

From source file:gobblin.hive.metastore.HiveMetaStoreBasedRegister.java

public HiveMetaStoreBasedRegister(State state, Optional<String> metastoreURI) throws IOException {
    super(state);

    GenericObjectPoolConfig config = new GenericObjectPoolConfig();
    config.setMaxTotal(this.props.getNumThreads());
    config.setMaxIdle(this.props.getNumThreads());
    this.clientPool = HiveMetastoreClientPool.get(this.props.getProperties(), metastoreURI);

    MetricContext metricContext = GobblinMetricsRegistry.getInstance().getMetricContext(state,
            HiveMetaStoreBasedRegister.class, GobblinMetrics.getCustomTagsFromState(state));

    this.eventSubmitter = new EventSubmitter.Builder(metricContext, "gobblin.hive.HiveMetaStoreBasedRegister")
            .build();//w  ww. j  a v  a 2s.c o  m
}

From source file:edu.harvard.hul.ois.fits.service.servlets.FitsServlet.java

public void init() throws ServletException {

    // "fits.home" property set differently in Tomcat 7 and JBoss 7.
    // Tomcat: set in catalina.properties
    // JBoss: set as a command line value "-Dfits.home=<path/to/fits/home>
    fitsHome = System.getProperty(FITS_HOME_SYSTEM_PROP_NAME);
    logger.info(FITS_HOME_SYSTEM_PROP_NAME + ": " + fitsHome);

    if (StringUtils.isEmpty(fitsHome)) {
        logger.fatal(FITS_HOME_SYSTEM_PROP_NAME
                + " system property HAS NOT BEEN SET!!! This web application will not properly run.");
        throw new ServletException(FITS_HOME_SYSTEM_PROP_NAME
                + " system property HAS NOT BEEN SET!!! This web application will not properly run.");
    }//w  w w .  j  a v  a2s.c  o  m

    // Set the projects properties.
    // First look for a system property pointing to a project properties file. (e.g. - file:/path/to/file)
    // If this value either does not exist or is not valid, the default
    // file that comes with this application will be used for initialization.
    String environmentProjectPropsFile = System.getProperty(ENV_PROJECT_PROPS);
    logger.info(
            "Value of environment property: [ + ENV_PROJECT_PROPS + ] for finding external properties file in location: ["
                    + environmentProjectPropsFile + "]");
    if (environmentProjectPropsFile != null) {
        logger.info("Will look for properties file from environment in location: ["
                + environmentProjectPropsFile + "]");
        try {
            File projectProperties = new File(environmentProjectPropsFile);
            if (projectProperties.exists() && projectProperties.isFile() && projectProperties.canRead()) {
                InputStream is = new FileInputStream(projectProperties);
                applicationProps = new Properties();
                applicationProps.load(is);
            }
        } catch (IOException e) {
            // fall back to default file
            logger.error("Unable to load properties file: [" + environmentProjectPropsFile + "] -- reason: "
                    + e.getMessage(), e);
            logger.error("Falling back to default project.properties file: [" + PROPERTIES_FILE_NAME + "]");
            applicationProps = null;
        }
    }

    if (applicationProps == null) { // did not load from environment variable location
        try {
            ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
            InputStream resourceStream = classLoader.getResourceAsStream(PROPERTIES_FILE_NAME);
            if (resourceStream != null) {
                applicationProps = new Properties();
                applicationProps.load(resourceStream);
                logger.info("loaded default applicationProps");
            } else {
                logger.warn("project.properties not found!!!");
            }
        } catch (IOException e) {
            logger.error("Could not load properties file: [" + PROPERTIES_FILE_NAME + "]", e);
            // couldn't load default properties so bail...
            throw new ServletException("Couldn't load an applications properties file.", e);
        }
    }
    int maxPoolSize = Integer
            .valueOf(applicationProps.getProperty("max.objects.in.pool", DEFAULT_MAX_OBJECTS_IN_POOL));
    maxFileUploadSizeMb = Long
            .valueOf(applicationProps.getProperty("max.upload.file.size.MB", DEFAULT_MAX_UPLOAD_SIZE));
    maxRequestSizeMb = Long
            .valueOf(applicationProps.getProperty("max.request.size.MB", DEFAULT_MAX_REQUEST_SIZE));
    maxInMemoryFileSizeMb = Integer
            .valueOf(applicationProps.getProperty("max.in.memory.file.size.MB", DEFAULT_IN_MEMORY_FILE_SIZE));
    logger.info("Max objects in object pool: " + maxPoolSize + " -- Max file upload size: "
            + maxFileUploadSizeMb + "MB -- Max request object size: " + maxRequestSizeMb
            + "MB -- Max in-memory file size: " + maxInMemoryFileSizeMb + "MB");

    logger.debug("Initializing FITS pool");
    GenericObjectPoolConfig poolConfig = new GenericObjectPoolConfig();
    poolConfig.setMinIdle(MIN_IDLE_OBJECTS_IN_POOL);
    poolConfig.setMaxTotal(maxPoolSize);
    poolConfig.setTestOnBorrow(true);
    poolConfig.setBlockWhenExhausted(true);
    fitsWrapperPool = new FitsWrapperPool(new FitsWrapperFactory(), poolConfig);
    logger.debug("FITS pool finished Initializing");

    String uploadBaseDirName = getServletContext().getRealPath("") + File.separator + UPLOAD_DIRECTORY;
    uploadBaseDir = new File(uploadBaseDirName);
    if (!uploadBaseDir.exists()) {
        uploadBaseDir.mkdir();
        logger.info("Created upload base directory: " + uploadBaseDir.getAbsolutePath());
    }
}

From source file:edu.harvard.hul.ois.drs.pdfaconvert.service.servlets.PdfaConverterServlet.java

@Override
public void init() throws ServletException {

    // Set the projects properties.
    // First look for a system property pointing to a project properties file.
    // This value can be either a file path, file protocol (e.g. - file:/path/to/file),
    // or a URL (http://some/server/file).
    // If this value either does not exist or is not valid, the default
    // file that comes with this application will be used for initialization.
    String environmentProjectPropsFile = System.getProperty(ENV_PROJECT_PROPS);
    logger.info("Value of environment property: [{}] for finding external properties file in location: {}",
            ENV_PROJECT_PROPS, environmentProjectPropsFile);
    if (environmentProjectPropsFile != null) {
        logger.info("Will look for properties file from environment in location: {}",
                environmentProjectPropsFile);
        try {// w  w w.j av  a2 s .  com
            File projectProperties = new File(environmentProjectPropsFile);
            if (projectProperties.exists() && projectProperties.isFile() && projectProperties.canRead()) {
                InputStream is = new FileInputStream(projectProperties);
                applicationProps = new Properties();
                applicationProps.load(is);
            }
        } catch (IOException e) {
            // fall back to default file
            logger.error("Unable to load properties file: {} -- reason: {}", environmentProjectPropsFile,
                    e.getMessage());
            logger.error("Falling back to default project.properties file: {}", PROPERTIES_FILE_NAME);
            applicationProps = null;
        }
    }

    if (applicationProps == null) { // did not load from environment variable location
        try {
            ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
            InputStream resourceStream = classLoader.getResourceAsStream(PROPERTIES_FILE_NAME);
            if (resourceStream != null) {
                applicationProps = new Properties();
                applicationProps.load(resourceStream);
                logger.info("loaded default applicationProps");
            } else {
                logger.warn("project.properties not found!!!");
            }
        } catch (IOException e) {
            logger.error("Could not load properties file: {}", PROPERTIES_FILE_NAME, e);
            // couldn't load default properties so bail...
            throw new ServletException("Couldn't load an applications properties file.", e);
        }
    }
    int maxPoolSize = Integer
            .valueOf(applicationProps.getProperty("max.objects.in.pool", DEFAULT_MAX_OBJECTS_IN_POOL));
    long maxFileUploadSizeMb = Long
            .valueOf(applicationProps.getProperty("max.upload.file.size.MB", DEFAULT_MAX_UPLOAD_SIZE));
    long maxRequestSizeMb = Long
            .valueOf(applicationProps.getProperty("max.request.size.MB", DEFAULT_MAX_REQUEST_SIZE));
    maxInMemoryFileSizeMb = Integer
            .valueOf(applicationProps.getProperty("max.in.memory.file.size.MB", DEFAULT_IN_MEMORY_FILE_SIZE));
    logger.info(
            "Max objects in object pool: {} -- Max file upload size: {}MB -- Max request object size: {}MB -- Max in-memory file size: {}MB",
            maxPoolSize, maxFileUploadSizeMb, maxRequestSizeMb, maxInMemoryFileSizeMb);

    logger.debug("Initializing PdfaConverter pool");
    GenericObjectPoolConfig poolConfig = new GenericObjectPoolConfig();
    poolConfig.setMinIdle(MIN_IDLE_OBJECTS_IN_POOL);
    poolConfig.setMaxTotal(maxPoolSize);
    poolConfig.setTestOnBorrow(true);
    poolConfig.setBlockWhenExhausted(true);
    pdfaConverterWrapperPool = new PdfaConverterWrapperPool(new PdfaConverterWrapperFactory(), poolConfig);

    // configures upload settings
    factory = new DiskFileItemFactory();
    factory.setSizeThreshold((maxInMemoryFileSizeMb * (int) MB_MULTIPLIER));
    File tempUploadDir = new File(System.getProperty(UPLOAD_DIRECTORY));
    if (!tempUploadDir.exists()) {
        tempUploadDir.mkdir();
    }
    factory.setRepository(tempUploadDir);

    upload = new ServletFileUpload(factory);
    upload.setFileSizeMax(maxFileUploadSizeMb * MB_MULTIPLIER); // convert from MB to bytes
    upload.setSizeMax(maxRequestSizeMb * MB_MULTIPLIER); // convert from MB to bytes

    logger.debug("PdfaConverter pool finished Initializing");
}