List of usage examples for org.apache.commons.pool2.impl GenericObjectPool GenericObjectPool
public GenericObjectPool(PooledObjectFactory<T> factory, GenericObjectPoolConfig config)
GenericObjectPool
using a specific configuration. From source file:ch.cyberduck.core.pool.DefaultSessionPool.java
public DefaultSessionPool(final ConnectionService connect, final X509TrustManager trust, final X509KeyManager key, final VaultRegistry registry, final PathCache cache, final TranscriptListener transcript, final Host bookmark) { this.connect = connect; this.registry = registry; this.cache = cache; this.bookmark = bookmark; this.transcript = transcript; final GenericObjectPoolConfig configuration = new GenericObjectPoolConfig(); configuration.setJmxEnabled(false);//from w w w. j a v a 2 s .com configuration.setEvictionPolicyClassName(CustomPoolEvictionPolicy.class.getName()); configuration.setBlockWhenExhausted(true); configuration.setMaxWaitMillis(BORROW_MAX_WAIT_INTERVAL); this.pool = new GenericObjectPool<Session>( new PooledSessionFactory(connect, trust, key, cache, bookmark, registry), configuration); final AbandonedConfig abandon = new AbandonedConfig(); abandon.setUseUsageTracking(true); this.pool.setAbandonedConfig(abandon); }
From source file:gobblin.kafka.schemareg.LiKafkaSchemaRegistry.java
/** * @param props properties should contain property "kafka.schema.registry.url", and optionally * "kafka.schema.registry.max.cache.size" (default = 1000) and * "kafka.schema.registry.cache.expire.after.write.min" (default = 10). *//*from ww w .j ava2 s . com*/ public LiKafkaSchemaRegistry(Properties props) { Preconditions.checkArgument( props.containsKey(KafkaSchemaRegistryConfigurationKeys.KAFKA_SCHEMA_REGISTRY_URL), String.format("Property %s not provided.", KafkaSchemaRegistryConfigurationKeys.KAFKA_SCHEMA_REGISTRY_URL)); this.url = props.getProperty(KafkaSchemaRegistryConfigurationKeys.KAFKA_SCHEMA_REGISTRY_URL); int objPoolSize = Integer .parseInt(props.getProperty(ConfigurationKeys.KAFKA_SOURCE_WORK_UNITS_CREATION_THREADS, "" + ConfigurationKeys.KAFKA_SOURCE_WORK_UNITS_CREATION_DEFAULT_THREAD_COUNT)); LOG.info("Create HttpClient pool with size " + objPoolSize); GenericObjectPoolConfig config = new GenericObjectPoolConfig(); config.setMaxTotal(objPoolSize); config.setMaxIdle(objPoolSize); this.httpClientPool = new GenericObjectPool<>(new HttpClientFactory(), config); }
From source file:gobblin.metrics.kafka.KafkaAvroSchemaRegistry.java
/** * @param properties properties should contain property "kafka.schema.registry.url", and optionally * "kafka.schema.registry.max.cache.size" (default = 1000) and * "kafka.schema.registry.cache.expire.after.write.min" (default = 10). *///from ww w. j a v a 2s . com public KafkaAvroSchemaRegistry(Properties props) { super(props); Preconditions.checkArgument(props.containsKey(KAFKA_SCHEMA_REGISTRY_URL), String.format("Property %s not provided.", KAFKA_SCHEMA_REGISTRY_URL)); this.url = props.getProperty(KAFKA_SCHEMA_REGISTRY_URL); int objPoolSize = Integer .parseInt(props.getProperty(ConfigurationKeys.KAFKA_SOURCE_WORK_UNITS_CREATION_THREADS, "" + ConfigurationKeys.KAFKA_SOURCE_WORK_UNITS_CREATION_DEFAULT_THREAD_COUNT)); LOG.info("Create HttpClient pool with size " + objPoolSize); GenericObjectPoolConfig config = new GenericObjectPoolConfig(); config.setMaxTotal(objPoolSize); config.setMaxIdle(objPoolSize); this.httpClientPool = new GenericObjectPool<>(new HttpClientFactory(), config); }
From source file:de.qucosa.servlet.MetsDisseminatorServlet.java
@Override public void init() throws ServletException { super.init(); startupProperties = new PropertyCollector().source(getServletContext()).source(System.getProperties()) .collect();// ww w.j a v a 2 s . c o m final FedoraClientFactory fedoraClientFactory = attemptToCreateFedoraClientFactoryFrom(startupProperties); if (fedoraClientFactory == null) { // we need a client factory for startup connections log.warn("Fedora connection credentials not configured. No connection pooling possible."); } else { final GenericObjectPoolConfig poolConfig = new GenericObjectPoolConfig(); poolConfig.setMaxTotal( Integer.parseInt(startupProperties.getProperty(PROP_FEDORA_CONNECTIONPOOL_MAXSIZE, "20"))); poolConfig.setMinIdle(5); poolConfig.setMinEvictableIdleTimeMillis(TimeUnit.MINUTES.toMillis(1)); fedoraClientPool = new GenericObjectPool<>(fedoraClientFactory, poolConfig); log.info("Initialized Fedora connection pool."); } cacheManager = CacheManager.newInstance(); cache = cacheManager.getCache("dscache"); }
From source file:com.streamsets.pipeline.stage.origin.tokafka.KafkaFragmentWriter.java
@VisibleForTesting GenericObjectPool<SdcKafkaProducer> createKafkaProducerPool() { int minIdle = Math.max(1, maxConcurrency / 4); int maxIdle = maxConcurrency / 2; GenericObjectPoolConfig poolConfig = new GenericObjectPoolConfig(); poolConfig.setMaxTotal(maxConcurrency); poolConfig.setMinIdle(minIdle);/*from w w w. java 2s . com*/ poolConfig.setMaxIdle(maxIdle); LOG.debug("Creating Kafka producer pool with max '{}' minIdle '{}' maxIdle '{}'", maxConcurrency, minIdle, maxIdle); return new GenericObjectPool<>(new SdcKafkaProducerPooledObjectFactory(kafkaConfigs, DataFormat.SDC_JSON), poolConfig); }
From source file:com.streamsets.pipeline.lib.parser.DataParserFactory.java
protected GenericObjectPool<StringBuilder> getStringBuilderPool(Settings settings) { int maxRecordLen = getSettings().getMaxRecordLen(); int poolSize = getSettings().getStringBuilderPoolSize(); GenericObjectPoolConfig stringBuilderPoolConfig = new GenericObjectPoolConfig(); stringBuilderPoolConfig.setMaxTotal(poolSize); stringBuilderPoolConfig.setMinIdle(poolSize); stringBuilderPoolConfig.setMaxIdle(poolSize); stringBuilderPoolConfig.setBlockWhenExhausted(false); return new GenericObjectPool<>( new StringBuilderPoolFactory(maxRecordLen > 0 ? maxRecordLen : DEFAULT_MAX_RECORD_LENGTH), stringBuilderPoolConfig);/*from w ww.j a v a2s . c o m*/ }
From source file:com.streamsets.pipeline.lib.parser.log.TestCEFParser.java
private GenericObjectPool<StringBuilder> getStringBuilderPool() { GenericObjectPoolConfig stringBuilderPoolConfig = new GenericObjectPoolConfig(); stringBuilderPoolConfig.setMaxTotal(1); stringBuilderPoolConfig.setMinIdle(1); stringBuilderPoolConfig.setMaxIdle(1); stringBuilderPoolConfig.setBlockWhenExhausted(false); return new GenericObjectPool<>(new StringBuilderPoolFactory(1024), stringBuilderPoolConfig); }
From source file:io.lettuce.core.support.ConnectionPoolSupport.java
/** * Creates a new {@link GenericObjectPool} using the {@link Supplier}. * * @param connectionSupplier must not be {@literal null}. * @param config must not be {@literal null}. * @param wrapConnections {@literal false} to return direct connections that need to be returned to the pool using * {@link ObjectPool#returnObject(Object)}. {@literal true} to return wrapped connection that are returned to the * pool when invoking {@link StatefulConnection#close()}. * @param <T> connection type./* w w w. ja v a 2 s . com*/ * @return the connection pool. */ @SuppressWarnings("unchecked") public static <T extends StatefulConnection<?, ?>> GenericObjectPool<T> createGenericObjectPool( Supplier<T> connectionSupplier, GenericObjectPoolConfig config, boolean wrapConnections) { LettuceAssert.notNull(connectionSupplier, "Connection supplier must not be null"); LettuceAssert.notNull(config, "GenericObjectPoolConfig must not be null"); AtomicReference<Origin<T>> poolRef = new AtomicReference<>(); GenericObjectPool<T> pool = new GenericObjectPool<T>(new RedisPooledObjectFactory<T>(connectionSupplier), config) { @Override public T borrowObject() throws Exception { return wrapConnections ? ConnectionWrapping.wrapConnection(super.borrowObject(), poolRef.get()) : super.borrowObject(); } @Override public void returnObject(T obj) { if (wrapConnections && obj instanceof HasTargetConnection) { super.returnObject((T) ((HasTargetConnection) obj).getTargetConnection()); return; } super.returnObject(obj); } }; poolRef.set(new ObjectPoolWrapper<>(pool)); return pool; }
From source file:com.streamsets.pipeline.stage.origin.sdcipctokafka.IpcToKafkaServlet.java
@Override public void init() throws ServletException { super.init(); int max = configs.maxConcurrentRequests; int minIdle = Math.max(1, configs.maxConcurrentRequests / 4); int maxIdle = configs.maxConcurrentRequests / 2; GenericObjectPoolConfig poolConfig = new GenericObjectPoolConfig(); poolConfig.setMaxTotal(max);//from w ww .ja va2s . c o m poolConfig.setMinIdle(minIdle); poolConfig.setMaxIdle(maxIdle); LOG.debug("Creating Kafka producer pool with max '{}' minIdle '{}' maxIdle '{}'", max, minIdle, maxIdle); kafkaProducerPool = new GenericObjectPool<>(new SdcKafkaProducerPooledObjectFactory(configs), poolConfig); }
From source file:edumsg.core.PostgresConnection.java
public static void initSource() { try {//w ww .j a v a 2 s . c om try { Class.forName("org.postgresql.Driver"); } catch (ClassNotFoundException ex) { LOGGER.log(Level.SEVERE, "Error loading Postgres driver: " + ex.getMessage(), ex); } try { readConfFile(); } catch (Exception e) { e.printStackTrace(); } Properties props = new Properties(); // System.out.println(DB_USERNAME); props.setProperty("user", DB_USERNAME); props.setProperty("password", DB_PASSWORD); props.setProperty("initialSize", DB_INIT_CONNECTIONS); props.setProperty("maxActive", DB_MAX_CONNECTIONS); ConnectionFactory connectionFactory = new DriverManagerConnectionFactory(DB_URL, props); PoolableConnectionFactory poolableConnectionFactory = new PoolableConnectionFactory(connectionFactory, null); poolableConnectionFactory.setPoolStatements(true); GenericObjectPoolConfig poolConfig = new GenericObjectPoolConfig(); poolConfig.setMaxIdle(Integer.parseInt(DB_INIT_CONNECTIONS)); poolConfig.setMaxTotal(Integer.parseInt(DB_MAX_CONNECTIONS)); ObjectPool<PoolableConnection> connectionPool = new GenericObjectPool<>(poolableConnectionFactory, poolConfig); poolableConnectionFactory.setPool(connectionPool); Class.forName("org.apache.commons.dbcp2.PoolingDriver"); dbDriver = (PoolingDriver) DriverManager.getDriver("jdbc:apache:commons:dbcp:"); dbDriver.registerPool(DB_NAME, connectionPool); dataSource = new PoolingDataSource<>(connectionPool); } catch (Exception ex) { LOGGER.log(Level.SEVERE, "Got error initializing data source: " + ex.getMessage(), ex); } }