Example usage for org.springframework.kafka.core KafkaTemplate send

List of usage examples for org.springframework.kafka.core KafkaTemplate send

Introduction

In this page you can find the example usage for org.springframework.kafka.core KafkaTemplate send.

Prototype

@Override
    public ListenableFuture<SendResult<K, V>> send(String topic, @Nullable V data) 

Source Link

Usage

From source file:io.pivotal.cf.service.connector.KafkaRepository.java

public ListenableFuture<SendResult<Integer, String>> sendMessage(String message)
        throws ExecutionException, InterruptedException {
    KafkaTemplate<Integer, String> template = getTemplate();
    ListenableFuture<SendResult<Integer, String>> future = template.send(info.getTopicName(), message);
    template.flush();/*from   w w  w  .j a v a  2 s .c  o m*/
    return future;
}

From source file:org.springframework.kafka.listener.TransactionalContainerTests.java

@SuppressWarnings({ "rawtypes", "unchecked" })
private void testConsumeAndProduceTransactionGuts(boolean chained, boolean handleError) throws Exception {
    Consumer consumer = mock(Consumer.class);
    final TopicPartition topicPartition = new TopicPartition("foo", 0);
    willAnswer(i -> {//from w ww  .  j a  v  a 2 s  .c om
        ((ConsumerRebalanceListener) i.getArgument(1))
                .onPartitionsAssigned(Collections.singletonList(topicPartition));
        return null;
    }).given(consumer).subscribe(any(Collection.class), any(ConsumerRebalanceListener.class));
    ConsumerRecords records = new ConsumerRecords(Collections.singletonMap(topicPartition,
            Collections.singletonList(new ConsumerRecord<>("foo", 0, 0, "key", "value"))));
    final AtomicBoolean done = new AtomicBoolean();
    willAnswer(i -> {
        if (done.compareAndSet(false, true)) {
            return records;
        } else {
            Thread.sleep(500);
            return null;
        }
    }).given(consumer).poll(any(Duration.class));
    ConsumerFactory cf = mock(ConsumerFactory.class);
    willReturn(consumer).given(cf).createConsumer("group", "", null);
    Producer producer = mock(Producer.class);
    final CountDownLatch closeLatch = new CountDownLatch(2);
    willAnswer(i -> {
        closeLatch.countDown();
        return null;
    }).given(producer).close();
    ProducerFactory pf = mock(ProducerFactory.class);
    given(pf.transactionCapable()).willReturn(true);
    final List<String> transactionalIds = new ArrayList<>();
    willAnswer(i -> {
        transactionalIds.add(TransactionSupport.getTransactionIdSuffix());
        return producer;
    }).given(pf).createProducer();
    KafkaTransactionManager tm = new KafkaTransactionManager(pf);
    PlatformTransactionManager ptm = tm;
    if (chained) {
        ptm = new ChainedKafkaTransactionManager(new SomeOtherTransactionManager(), tm);
    }
    ContainerProperties props = new ContainerProperties("foo");
    props.setGroupId("group");
    props.setTransactionManager(ptm);
    final KafkaTemplate template = new KafkaTemplate(pf);
    props.setMessageListener((MessageListener) m -> {
        template.send("bar", "baz");
        if (handleError) {
            throw new RuntimeException("fail");
        }
    });
    KafkaMessageListenerContainer container = new KafkaMessageListenerContainer<>(cf, props);
    container.setBeanName("commit");
    if (handleError) {
        container.setErrorHandler((e, data) -> {
        });
    }
    container.start();
    assertThat(closeLatch.await(10, TimeUnit.SECONDS)).isTrue();
    InOrder inOrder = inOrder(producer);
    inOrder.verify(producer).beginTransaction();
    inOrder.verify(producer).sendOffsetsToTransaction(
            Collections.singletonMap(topicPartition, new OffsetAndMetadata(0)), "group");
    inOrder.verify(producer).commitTransaction();
    inOrder.verify(producer).close();
    inOrder.verify(producer).beginTransaction();
    ArgumentCaptor<ProducerRecord> captor = ArgumentCaptor.forClass(ProducerRecord.class);
    inOrder.verify(producer).send(captor.capture(), any(Callback.class));
    assertThat(captor.getValue()).isEqualTo(new ProducerRecord("bar", "baz"));
    inOrder.verify(producer).sendOffsetsToTransaction(
            Collections.singletonMap(topicPartition, new OffsetAndMetadata(1)), "group");
    inOrder.verify(producer).commitTransaction();
    inOrder.verify(producer).close();
    container.stop();
    verify(pf, times(2)).createProducer();
    verifyNoMoreInteractions(producer);
    assertThat(transactionalIds.get(0)).isEqualTo("group.foo.0");
    assertThat(transactionalIds.get(0)).isEqualTo("group.foo.0");
}

From source file:org.springframework.kafka.listener.TransactionalContainerTests.java

@SuppressWarnings({ "rawtypes", "unchecked" })
@Test/*ww w.j  a  v a  2  s .c o  m*/
public void testConsumeAndProduceTransactionRollback() throws Exception {
    Consumer consumer = mock(Consumer.class);
    final TopicPartition topicPartition0 = new TopicPartition("foo", 0);
    final TopicPartition topicPartition1 = new TopicPartition("foo", 1);
    Map<TopicPartition, List<ConsumerRecord<String, String>>> recordMap = new HashMap<>();
    recordMap.put(topicPartition0,
            Collections.singletonList(new ConsumerRecord<>("foo", 0, 0, "key", "value")));
    recordMap.put(topicPartition1,
            Collections.singletonList(new ConsumerRecord<>("foo", 1, 0, "key", "value")));
    ConsumerRecords records = new ConsumerRecords(recordMap);
    final AtomicBoolean done = new AtomicBoolean();
    willAnswer(i -> {
        if (done.compareAndSet(false, true)) {
            return records;
        } else {
            Thread.sleep(500);
            return null;
        }
    }).given(consumer).poll(any(Duration.class));
    final CountDownLatch seekLatch = new CountDownLatch(2);
    willAnswer(i -> {
        seekLatch.countDown();
        return null;
    }).given(consumer).seek(any(), anyLong());
    ConsumerFactory cf = mock(ConsumerFactory.class);
    willReturn(consumer).given(cf).createConsumer("group", "", null);
    Producer producer = mock(Producer.class);
    final CountDownLatch closeLatch = new CountDownLatch(1);
    willAnswer(i -> {
        closeLatch.countDown();
        return null;
    }).given(producer).close();
    ProducerFactory pf = mock(ProducerFactory.class);
    given(pf.transactionCapable()).willReturn(true);
    given(pf.createProducer()).willReturn(producer);
    KafkaTransactionManager tm = new KafkaTransactionManager(pf);
    ContainerProperties props = new ContainerProperties(new TopicPartitionInitialOffset("foo", 0),
            new TopicPartitionInitialOffset("foo", 1));
    props.setGroupId("group");
    props.setTransactionManager(tm);
    final KafkaTemplate template = new KafkaTemplate(pf);
    props.setMessageListener((MessageListener) m -> {
        template.send("bar", "baz");
        throw new RuntimeException("fail");
    });
    KafkaMessageListenerContainer container = new KafkaMessageListenerContainer<>(cf, props);
    container.setBeanName("rollback");
    container.start();
    assertThat(closeLatch.await(10, TimeUnit.SECONDS)).isTrue();
    assertThat(seekLatch.await(10, TimeUnit.SECONDS)).isTrue();
    InOrder inOrder = inOrder(producer);
    inOrder.verify(producer).beginTransaction();
    ArgumentCaptor<ProducerRecord> captor = ArgumentCaptor.forClass(ProducerRecord.class);
    verify(producer).send(captor.capture(), any(Callback.class));
    assertThat(captor.getValue()).isEqualTo(new ProducerRecord("bar", "baz"));
    inOrder.verify(producer, never()).sendOffsetsToTransaction(anyMap(), anyString());
    inOrder.verify(producer, never()).commitTransaction();
    inOrder.verify(producer).abortTransaction();
    inOrder.verify(producer).close();
    verify(consumer).seek(topicPartition0, 0);
    verify(consumer).seek(topicPartition1, 0);
    verify(consumer, never()).commitSync(anyMap());
    container.stop();
    verify(pf, times(1)).createProducer();
}

From source file:org.springframework.kafka.listener.TransactionalContainerTests.java

@SuppressWarnings({ "rawtypes", "unchecked" })
@Test// ww  w .j a  v  a  2 s .co m
public void testConsumeAndProduceTransactionRollbackBatch() throws Exception {
    Consumer consumer = mock(Consumer.class);
    final TopicPartition topicPartition0 = new TopicPartition("foo", 0);
    final TopicPartition topicPartition1 = new TopicPartition("foo", 1);
    Map<TopicPartition, List<ConsumerRecord<String, String>>> recordMap = new HashMap<>();
    recordMap.put(topicPartition0,
            Collections.singletonList(new ConsumerRecord<>("foo", 0, 0, "key", "value")));
    recordMap.put(topicPartition1,
            Collections.singletonList(new ConsumerRecord<>("foo", 1, 0, "key", "value")));
    ConsumerRecords records = new ConsumerRecords(recordMap);
    final AtomicBoolean done = new AtomicBoolean();
    willAnswer(i -> {
        if (done.compareAndSet(false, true)) {
            return records;
        } else {
            Thread.sleep(500);
            return null;
        }
    }).given(consumer).poll(any(Duration.class));
    final CountDownLatch seekLatch = new CountDownLatch(2);
    willAnswer(i -> {
        seekLatch.countDown();
        return null;
    }).given(consumer).seek(any(), anyLong());
    ConsumerFactory cf = mock(ConsumerFactory.class);
    willReturn(consumer).given(cf).createConsumer("group", "", null);
    Producer producer = mock(Producer.class);
    final CountDownLatch closeLatch = new CountDownLatch(1);
    willAnswer(i -> {
        closeLatch.countDown();
        return null;
    }).given(producer).close();
    ProducerFactory pf = mock(ProducerFactory.class);
    given(pf.transactionCapable()).willReturn(true);
    given(pf.createProducer()).willReturn(producer);
    KafkaTransactionManager tm = new KafkaTransactionManager(pf);
    ContainerProperties props = new ContainerProperties(new TopicPartitionInitialOffset("foo", 0),
            new TopicPartitionInitialOffset("foo", 1));
    props.setGroupId("group");
    props.setTransactionManager(tm);
    final KafkaTemplate template = new KafkaTemplate(pf);
    props.setMessageListener((BatchMessageListener) recordlist -> {
        template.send("bar", "baz");
        throw new RuntimeException("fail");
    });
    KafkaMessageListenerContainer container = new KafkaMessageListenerContainer<>(cf, props);
    container.setBeanName("rollback");
    container.start();
    assertThat(closeLatch.await(10, TimeUnit.SECONDS)).isTrue();
    assertThat(seekLatch.await(10, TimeUnit.SECONDS)).isTrue();
    InOrder inOrder = inOrder(producer);
    inOrder.verify(producer).beginTransaction();
    ArgumentCaptor<ProducerRecord> captor = ArgumentCaptor.forClass(ProducerRecord.class);
    verify(producer).send(captor.capture(), any(Callback.class));
    assertThat(captor.getValue()).isEqualTo(new ProducerRecord("bar", "baz"));
    inOrder.verify(producer, never()).sendOffsetsToTransaction(anyMap(), anyString());
    inOrder.verify(producer, never()).commitTransaction();
    inOrder.verify(producer).abortTransaction();
    inOrder.verify(producer).close();
    verify(consumer).seek(topicPartition0, 0);
    verify(consumer).seek(topicPartition1, 0);
    verify(consumer, never()).commitSync(anyMap());
    container.stop();
    verify(pf, times(1)).createProducer();
}

From source file:org.springframework.kafka.listener.TransactionalContainerTests.java

@SuppressWarnings({ "rawtypes", "unchecked" })
@Test/*from  w  w  w . j  a v a2s  . c o  m*/
public void testConsumeAndProduceTransactionExternalTM() throws Exception {
    Consumer consumer = mock(Consumer.class);
    final TopicPartition topicPartition = new TopicPartition("foo", 0);
    willAnswer(i -> {
        ((ConsumerRebalanceListener) i.getArgument(1))
                .onPartitionsAssigned(Collections.singletonList(topicPartition));
        return null;
    }).given(consumer).subscribe(any(Collection.class), any(ConsumerRebalanceListener.class));
    final ConsumerRecords records = new ConsumerRecords(Collections.singletonMap(topicPartition,
            Collections.singletonList(new ConsumerRecord<>("foo", 0, 0, "key", "value"))));
    final AtomicBoolean done = new AtomicBoolean();
    willAnswer(i -> {
        if (done.compareAndSet(false, true)) {
            return records;
        } else {
            Thread.sleep(500);
            return null;
        }
    }).given(consumer).poll(any(Duration.class));
    ConsumerFactory cf = mock(ConsumerFactory.class);
    willReturn(consumer).given(cf).createConsumer("group", "", null);
    Producer producer = mock(Producer.class);

    final CountDownLatch closeLatch = new CountDownLatch(1);

    willAnswer(i -> {
        closeLatch.countDown();
        return null;
    }).given(producer).close();

    final ProducerFactory pf = mock(ProducerFactory.class);
    given(pf.transactionCapable()).willReturn(true);
    given(pf.createProducer()).willReturn(producer);
    ContainerProperties props = new ContainerProperties("foo");
    props.setGroupId("group");
    props.setTransactionManager(new SomeOtherTransactionManager());
    final KafkaTemplate template = new KafkaTemplate(pf);
    props.setMessageListener((MessageListener<String, String>) m -> {
        template.send("bar", "baz");
        template.sendOffsetsToTransaction(Collections.singletonMap(new TopicPartition(m.topic(), m.partition()),
                new OffsetAndMetadata(m.offset() + 1)));
    });
    KafkaMessageListenerContainer container = new KafkaMessageListenerContainer<>(cf, props);
    container.setBeanName("commit");
    container.start();

    assertThat(closeLatch.await(10, TimeUnit.SECONDS)).isTrue();

    InOrder inOrder = inOrder(producer);
    inOrder.verify(producer).beginTransaction();
    ArgumentCaptor<ProducerRecord> captor = ArgumentCaptor.forClass(ProducerRecord.class);
    inOrder.verify(producer).send(captor.capture(), any(Callback.class));
    assertThat(captor.getValue()).isEqualTo(new ProducerRecord("bar", "baz"));
    inOrder.verify(producer).sendOffsetsToTransaction(
            Collections.singletonMap(topicPartition, new OffsetAndMetadata(1)), "group");
    inOrder.verify(producer).commitTransaction();
    inOrder.verify(producer).close();
    container.stop();
    verify(pf).createProducer();
}

From source file:org.springframework.kafka.listener.TransactionalContainerTests.java

@SuppressWarnings("unchecked")
@Test//w  ww.  j  a va 2  s .  com
public void testRollbackRecord() throws Exception {
    logger.info("Start testRollbackRecord");
    Map<String, Object> props = KafkaTestUtils.consumerProps("txTest1", "false", embeddedKafka);
    props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
    props.put(ConsumerConfig.GROUP_ID_CONFIG, "group");
    props.put(ConsumerConfig.ISOLATION_LEVEL_CONFIG, "read_committed");
    DefaultKafkaConsumerFactory<Integer, String> cf = new DefaultKafkaConsumerFactory<>(props);
    ContainerProperties containerProps = new ContainerProperties(topic1, topic2);
    containerProps.setGroupId("group");
    containerProps.setPollTimeout(10_000);

    Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
    senderProps.put(ProducerConfig.RETRIES_CONFIG, 1);
    DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
    pf.setTransactionIdPrefix("rr.");

    final KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf);
    final AtomicBoolean failed = new AtomicBoolean();
    final CountDownLatch latch = new CountDownLatch(3);
    final AtomicReference<String> transactionalId = new AtomicReference<>();
    containerProps.setMessageListener((MessageListener<Integer, String>) message -> {
        latch.countDown();
        if (failed.compareAndSet(false, true)) {
            throw new RuntimeException("fail");
        }
        /*
         * Send a message to topic2 and wait for it so we don't stop the container too soon.
         */
        if (message.topic().equals(topic1)) {
            template.send(topic2, "bar");
            template.flush();
            transactionalId.set(KafkaTestUtils.getPropertyValue(
                    ProducerFactoryUtils.getTransactionalResourceHolder(pf).getProducer(),
                    "delegate.transactionManager.transactionalId", String.class));
        }
    });

    @SuppressWarnings({ "rawtypes" })
    KafkaTransactionManager tm = new KafkaTransactionManager(pf);
    containerProps.setTransactionManager(tm);
    KafkaMessageListenerContainer<Integer, String> container = new KafkaMessageListenerContainer<>(cf,
            containerProps);
    container.setBeanName("testRollbackRecord");
    container.start();

    template.setDefaultTopic(topic1);
    template.executeInTransaction(t -> {
        template.sendDefault(0, 0, "foo");
        return null;
    });
    assertThat(latch.await(60, TimeUnit.SECONDS)).isTrue();
    container.stop();
    Consumer<Integer, String> consumer = cf.createConsumer();
    final CountDownLatch subsLatch = new CountDownLatch(1);
    consumer.subscribe(Arrays.asList(topic1), new ConsumerRebalanceListener() {

        @Override
        public void onPartitionsRevoked(Collection<TopicPartition> partitions) {
            // empty
        }

        @Override
        public void onPartitionsAssigned(Collection<TopicPartition> partitions) {
            subsLatch.countDown();
        }

    });
    ConsumerRecords<Integer, String> records = null;
    int n = 0;
    while (subsLatch.getCount() > 0 && n++ < 600) {
        records = consumer.poll(Duration.ofMillis(100));
    }
    assertThat(subsLatch.await(1, TimeUnit.MILLISECONDS)).isTrue();
    assertThat(records.count()).isEqualTo(0);
    // depending on timing, the position might include the offset representing the commit in the log
    assertThat(consumer.position(new TopicPartition(topic1, 0))).isGreaterThanOrEqualTo(1L);
    assertThat(transactionalId.get()).startsWith("rr.group.txTopic");
    assertThat(KafkaTestUtils.getPropertyValue(pf, "consumerProducers", Map.class)).isEmpty();
    logger.info("Stop testRollbackRecord");
    pf.destroy();
    consumer.close();
}