Example usage for org.springframework.kafka.core DefaultKafkaProducerFactory DefaultKafkaProducerFactory

List of usage examples for org.springframework.kafka.core DefaultKafkaProducerFactory DefaultKafkaProducerFactory

Introduction

In this page you can find the example usage for org.springframework.kafka.core DefaultKafkaProducerFactory DefaultKafkaProducerFactory.

Prototype

public DefaultKafkaProducerFactory(Map<String, Object> configs) 

Source Link

Document

Construct a factory with the provided configuration.

Usage

From source file:org.s1p.JsonConfiguration.java

@Bean
public ProducerFactory<String, Foo> producerFactory() {
    Map<String, Object> props = new HashMap<>();
    props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, this.configProperties.getBrokerAddress());
    props.put(ProducerConfig.RETRIES_CONFIG, 0);
    props.put(ProducerConfig.BATCH_SIZE_CONFIG, 16384);
    props.put(ProducerConfig.LINGER_MS_CONFIG, 1);
    props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, 33554432);
    props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
    props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class);
    return new DefaultKafkaProducerFactory<>(props);
}

From source file:org.s1p.CommonConfiguration.java

@Bean
public ProducerFactory<String, String> producerFactory() {
    Map<String, Object> props = new HashMap<>();
    props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, this.configProperties.getBrokerAddress());
    props.put(ProducerConfig.RETRIES_CONFIG, 0);
    props.put(ProducerConfig.BATCH_SIZE_CONFIG, 16384);
    props.put(ProducerConfig.LINGER_MS_CONFIG, 1);
    props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, 33554432);
    props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
    props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
    return new DefaultKafkaProducerFactory<>(props);
}

From source file:io.pivotal.cf.service.connector.KafkaRepository.java

private KafkaTemplate<Integer, String> getTemplate() {
    return new KafkaTemplate<>(new DefaultKafkaProducerFactory<>(senderProperties()));
}

From source file:org.springframework.cloud.stream.binder.kafka.streams.KafkaStreamsDlqDispatch.java

private DefaultKafkaProducerFactory<byte[], byte[]> getProducerFactory(
        ExtendedProducerProperties<KafkaProducerProperties> producerProperties,
        KafkaBinderConfigurationProperties configurationProperties) {
    Map<String, Object> props = new HashMap<>();
    props.put(ProducerConfig.RETRIES_CONFIG, 0);
    props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, 33554432);
    props.put(ProducerConfig.ACKS_CONFIG, configurationProperties.getRequiredAcks());
    Map<String, Object> mergedConfig = configurationProperties.mergedProducerConfiguration();
    if (!ObjectUtils.isEmpty(mergedConfig)) {
        props.putAll(mergedConfig);//from   w  w w . j a  v a  2s  . c  o  m
    }
    if (ObjectUtils.isEmpty(props.get(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG))) {
        props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, configurationProperties.getKafkaConnectionString());
    }
    if (ObjectUtils.isEmpty(props.get(ProducerConfig.BATCH_SIZE_CONFIG))) {
        props.put(ProducerConfig.BATCH_SIZE_CONFIG,
                String.valueOf(producerProperties.getExtension().getBufferSize()));
    }
    if (ObjectUtils.isEmpty(props.get(ProducerConfig.LINGER_MS_CONFIG))) {
        props.put(ProducerConfig.LINGER_MS_CONFIG,
                String.valueOf(producerProperties.getExtension().getBatchTimeout()));
    }
    if (ObjectUtils.isEmpty(props.get(ProducerConfig.COMPRESSION_TYPE_CONFIG))) {
        props.put(ProducerConfig.COMPRESSION_TYPE_CONFIG,
                producerProperties.getExtension().getCompressionType().toString());
    }
    if (!ObjectUtils.isEmpty(producerProperties.getExtension().getConfiguration())) {
        props.putAll(producerProperties.getExtension().getConfiguration());
    }
    //Always send as byte[] on dlq (the same byte[] that the consumer received)
    props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class);
    props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class);

    return new DefaultKafkaProducerFactory<>(props);
}

From source file:org.springframework.kafka.listener.ConcurrentMessageListenerContainerTests.java

@Test
public void testAutoCommit() throws Exception {
    this.logger.info("Start auto");
    Map<String, Object> props = KafkaTestUtils.consumerProps("test1", "true", embeddedKafka);
    DefaultKafkaConsumerFactory<Integer, String> cf = new DefaultKafkaConsumerFactory<>(props);
    ContainerProperties containerProps = new ContainerProperties(topic1);

    final CountDownLatch latch = new CountDownLatch(4);
    final Set<String> listenerThreadNames = new ConcurrentSkipListSet<>();
    containerProps.setMessageListener((MessageListener<Integer, String>) message -> {
        ConcurrentMessageListenerContainerTests.this.logger.info("auto: " + message);
        listenerThreadNames.add(Thread.currentThread().getName());
        latch.countDown();//w  w  w  .  j  a  v a  2  s .co m
    });

    ConcurrentMessageListenerContainer<Integer, String> container = new ConcurrentMessageListenerContainer<>(cf,
            containerProps);
    container.setConcurrency(2);
    container.setBeanName("testAuto");
    container.start();

    ContainerTestUtils.waitForAssignment(container, embeddedKafka.getPartitionsPerTopic());

    Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
    ProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
    KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf);
    template.setDefaultTopic(topic1);
    template.sendDefault(0, "foo");
    template.sendDefault(2, "bar");
    template.sendDefault(0, "baz");
    template.sendDefault(2, "qux");
    template.flush();
    assertThat(latch.await(60, TimeUnit.SECONDS)).isTrue();
    assertThat(listenerThreadNames).allMatch(threadName -> threadName.contains("-consumer-"));
    @SuppressWarnings("unchecked")
    List<KafkaMessageListenerContainer<Integer, String>> containers = KafkaTestUtils.getPropertyValue(container,
            "containers", List.class);
    assertThat(containers.size()).isEqualTo(2);
    for (int i = 0; i < 2; i++) {
        assertThat(KafkaTestUtils.getPropertyValue(containers.get(i), "listenerConsumer.acks", Collection.class)
                .size()).isEqualTo(0);
    }
    container.stop();
    this.logger.info("Stop auto");
}

From source file:org.springframework.kafka.listener.ConcurrentMessageListenerContainerTests.java

@Test
public void testAutoCommitWithRebalanceListener() throws Exception {
    this.logger.info("Start auto");
    Map<String, Object> props = KafkaTestUtils.consumerProps("test10", "true", embeddedKafka);
    DefaultKafkaConsumerFactory<Integer, String> cf = new DefaultKafkaConsumerFactory<>(props);
    ContainerProperties containerProps = new ContainerProperties(topic1);

    final CountDownLatch latch = new CountDownLatch(4);
    final Set<String> listenerThreadNames = new ConcurrentSkipListSet<>();
    containerProps.setMessageListener((MessageListener<Integer, String>) message -> {
        ConcurrentMessageListenerContainerTests.this.logger.info("auto: " + message);
        listenerThreadNames.add(Thread.currentThread().getName());
        latch.countDown();//w  w  w .  j  a v a2  s  .c  o  m
    });
    final CountDownLatch rebalancePartitionsAssignedLatch = new CountDownLatch(2);
    final CountDownLatch rebalancePartitionsRevokedLatch = new CountDownLatch(2);
    containerProps.setConsumerRebalanceListener(new ConsumerRebalanceListener() {

        @Override
        public void onPartitionsRevoked(Collection<TopicPartition> partitions) {
            ConcurrentMessageListenerContainerTests.this.logger
                    .info("In test, partitions revoked:" + partitions);
            rebalancePartitionsRevokedLatch.countDown();
        }

        @Override
        public void onPartitionsAssigned(Collection<TopicPartition> partitions) {
            ConcurrentMessageListenerContainerTests.this.logger
                    .info("In test, partitions assigned:" + partitions);
            rebalancePartitionsAssignedLatch.countDown();
        }

    });

    ConcurrentMessageListenerContainer<Integer, String> container = new ConcurrentMessageListenerContainer<>(cf,
            containerProps);
    container.setConcurrency(2);
    container.setBeanName("testAuto");
    container.start();

    ContainerTestUtils.waitForAssignment(container, embeddedKafka.getPartitionsPerTopic());

    Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
    ProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
    KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf);
    template.setDefaultTopic(topic1);
    template.sendDefault(0, "foo");
    template.sendDefault(2, "bar");
    template.sendDefault(0, "baz");
    template.sendDefault(2, "qux");
    template.flush();
    assertThat(latch.await(60, TimeUnit.SECONDS)).isTrue();
    assertThat(rebalancePartitionsAssignedLatch.await(60, TimeUnit.SECONDS)).isTrue();
    assertThat(rebalancePartitionsRevokedLatch.await(60, TimeUnit.SECONDS)).isTrue();
    assertThat(listenerThreadNames).allMatch(threadName -> threadName.contains("-consumer-"));
    container.stop();
    this.logger.info("Stop auto");
}

From source file:org.springframework.kafka.listener.ConcurrentMessageListenerContainerTests.java

@Test
public void testAfterListenCommit() throws Exception {
    this.logger.info("Start manual");
    Map<String, Object> props = KafkaTestUtils.consumerProps("test2", "false", embeddedKafka);
    DefaultKafkaConsumerFactory<Integer, String> cf = new DefaultKafkaConsumerFactory<>(props);
    ContainerProperties containerProps = new ContainerProperties(topic2);

    final CountDownLatch latch = new CountDownLatch(4);
    final Set<String> listenerThreadNames = new ConcurrentSkipListSet<>();
    containerProps.setMessageListener((MessageListener<Integer, String>) message -> {
        ConcurrentMessageListenerContainerTests.this.logger.info("manual: " + message);
        listenerThreadNames.add(Thread.currentThread().getName());
        latch.countDown();/*w w w. j  ava  2  s. c o  m*/
    });

    ConcurrentMessageListenerContainer<Integer, String> container = new ConcurrentMessageListenerContainer<>(cf,
            containerProps);
    container.setConcurrency(2);
    container.setBeanName("testBatch");
    container.start();

    ContainerTestUtils.waitForAssignment(container, embeddedKafka.getPartitionsPerTopic());

    Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
    ProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
    KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf);
    template.setDefaultTopic(topic2);
    template.sendDefault(0, "foo");
    template.sendDefault(2, "bar");
    template.sendDefault(0, "baz");
    template.sendDefault(2, "qux");
    template.flush();
    assertThat(latch.await(60, TimeUnit.SECONDS)).isTrue();
    assertThat(listenerThreadNames).allMatch(threadName -> threadName.contains("-listener-"));
    container.stop();
    this.logger.info("Stop manual");
}

From source file:org.springframework.kafka.listener.ConcurrentMessageListenerContainerTests.java

private void testManualCommitGuts(AckMode ackMode, String topic) throws Exception {
    this.logger.info("Start " + ackMode);
    Map<String, Object> props = KafkaTestUtils.consumerProps("test" + ackMode, "false", embeddedKafka);
    DefaultKafkaConsumerFactory<Integer, String> cf = new DefaultKafkaConsumerFactory<>(props);
    ContainerProperties containerProps = new ContainerProperties(topic);
    final CountDownLatch latch = new CountDownLatch(4);
    containerProps.setMessageListener((AcknowledgingMessageListener<Integer, String>) (message, ack) -> {
        ConcurrentMessageListenerContainerTests.this.logger.info("manual: " + message);
        ack.acknowledge();/*from   w w w  .  j ava2s . c om*/
        latch.countDown();
    });

    containerProps.setAckMode(ackMode);
    ConcurrentMessageListenerContainer<Integer, String> container = new ConcurrentMessageListenerContainer<>(cf,
            containerProps);
    container.setConcurrency(2);
    container.setBeanName("test" + ackMode);
    container.start();

    ContainerTestUtils.waitForAssignment(container, embeddedKafka.getPartitionsPerTopic());

    Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
    ProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
    KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf);
    template.setDefaultTopic(topic);
    template.sendDefault(0, "foo");
    template.sendDefault(2, "bar");
    template.sendDefault(0, "baz");
    template.sendDefault(2, "qux");
    template.flush();
    assertThat(latch.await(60, TimeUnit.SECONDS)).isTrue();
    container.stop();
    this.logger.info("Stop " + ackMode);
}

From source file:org.springframework.kafka.listener.ConcurrentMessageListenerContainerTests.java

@Test
@Ignore // TODO https://github.com/spring-projects/spring-kafka/issues/62 using SYNC for avoidance
public void testManualCommitExisting() throws Exception {
    this.logger.info("Start MANUAL_IMMEDIATE with Existing");
    Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
    ProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
    KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf);
    template.setDefaultTopic(topic7);//  www  .  j  ava  2 s  . c  o m
    template.sendDefault(0, "foo");
    template.sendDefault(2, "bar");
    template.sendDefault(0, "baz");
    template.sendDefault(2, "qux");
    template.flush();
    Map<String, Object> props = KafkaTestUtils.consumerProps("testManualExisting", "false", embeddedKafka);
    props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
    DefaultKafkaConsumerFactory<Integer, String> cf = new DefaultKafkaConsumerFactory<>(props);
    ContainerProperties containerProps = new ContainerProperties(topic7);
    final CountDownLatch latch = new CountDownLatch(8);
    containerProps.setMessageListener((AcknowledgingMessageListener<Integer, String>) (message, ack) -> {
        ConcurrentMessageListenerContainerTests.this.logger.info("manualExisting: " + message);
        ack.acknowledge();
        latch.countDown();
    });
    containerProps.setAckMode(AckMode.MANUAL_IMMEDIATE);

    final CountDownLatch commits = new CountDownLatch(8);
    final AtomicReference<Exception> exceptionRef = new AtomicReference<>();
    containerProps.setCommitCallback((offsets, exception) -> {
        commits.countDown();
        if (exception != null) {
            exceptionRef.compareAndSet(null, exception);
        }
    });

    ConcurrentMessageListenerContainer<Integer, String> container = new ConcurrentMessageListenerContainer<>(cf,
            containerProps);
    container.setConcurrency(1);
    container.setBeanName("testManualExisting");

    container.start();
    ContainerTestUtils.waitForAssignment(container, embeddedKafka.getPartitionsPerTopic());
    template.sendDefault(0, "fooo");
    template.sendDefault(2, "barr");
    template.sendDefault(0, "bazz");
    template.sendDefault(2, "quxx");
    template.flush();
    assertThat(latch.await(60, TimeUnit.SECONDS)).isTrue();
    assertThat(commits.await(60, TimeUnit.SECONDS)).isTrue();
    assertThat(exceptionRef.get()).isNull();
    container.stop();
    this.logger.info("Stop MANUAL_IMMEDIATE with Existing");
}

From source file:org.springframework.kafka.listener.ConcurrentMessageListenerContainerTests.java

@Test
public void testManualCommitSyncExisting() throws Exception {
    this.logger.info("Start MANUAL_IMMEDIATE with Existing");
    Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
    ProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<Integer, String>(senderProps);
    KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf);
    template.setDefaultTopic(topic8);//from   w  w  w  .j a  va 2 s  . c  o m
    template.sendDefault(0, "foo");
    template.sendDefault(2, "bar");
    template.sendDefault(0, "baz");
    template.sendDefault(2, "qux");
    template.flush();
    Map<String, Object> props = KafkaTestUtils.consumerProps("testManualExistingSync", "false", embeddedKafka);
    props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
    DefaultKafkaConsumerFactory<Integer, String> cf = new DefaultKafkaConsumerFactory<Integer, String>(props);
    ContainerProperties containerProps = new ContainerProperties(topic8);
    containerProps.setSyncCommits(true);
    final CountDownLatch latch = new CountDownLatch(8);
    final BitSet bitSet = new BitSet(8);
    containerProps.setMessageListener((AcknowledgingMessageListener<Integer, String>) (message, ack) -> {
        ConcurrentMessageListenerContainerTests.this.logger.info("manualExisting: " + message);
        ack.acknowledge();
        bitSet.set((int) (message.partition() * 4 + message.offset()));
        latch.countDown();
    });
    containerProps.setAckMode(AckMode.MANUAL_IMMEDIATE);

    ConcurrentMessageListenerContainer<Integer, String> container = new ConcurrentMessageListenerContainer<>(cf,
            containerProps);
    container.setConcurrency(1);
    container.setBeanName("testManualExisting");
    container.start();
    ContainerTestUtils.waitForAssignment(container, embeddedKafka.getPartitionsPerTopic());
    template.sendDefault(0, "fooo");
    template.sendDefault(2, "barr");
    template.sendDefault(0, "bazz");
    template.sendDefault(2, "quxx");
    template.flush();
    assertThat(latch.await(60, TimeUnit.SECONDS)).isTrue();
    assertThat(bitSet.cardinality()).isEqualTo(8);
    container.stop();
    this.logger.info("Stop MANUAL_IMMEDIATE with Existing");
}