Example usage for org.springframework.kafka.support TopicPartitionInitialOffset TopicPartitionInitialOffset

List of usage examples for org.springframework.kafka.support TopicPartitionInitialOffset TopicPartitionInitialOffset

Introduction

In this page you can find the example usage for org.springframework.kafka.support TopicPartitionInitialOffset TopicPartitionInitialOffset.

Prototype

public TopicPartitionInitialOffset(String topic, int partition) 

Source Link

Document

Construct an instance with no initial offset management.

Usage

From source file:org.springframework.kafka.annotation.KafkaListenerAnnotationBeanPostProcessor.java

@SuppressWarnings("unchecked")
private void resolvePartitionAsInteger(String topic, Object resolvedValue,
        List<TopicPartitionInitialOffset> result) {
    if (resolvedValue instanceof String[]) {
        for (Object object : (String[]) resolvedValue) {
            resolvePartitionAsInteger(topic, object, result);
        }/* w ww .j  a va 2s.co m*/
    } else if (resolvedValue instanceof String) {
        Assert.state(StringUtils.hasText((String) resolvedValue),
                "partition in @TopicPartition for topic '" + topic + "' cannot be empty");
        result.add(new TopicPartitionInitialOffset(topic, Integer.valueOf((String) resolvedValue)));
    } else if (resolvedValue instanceof Integer[]) {
        for (Integer partition : (Integer[]) resolvedValue) {
            result.add(new TopicPartitionInitialOffset(topic, partition));
        }
    } else if (resolvedValue instanceof Integer) {
        result.add(new TopicPartitionInitialOffset(topic, (Integer) resolvedValue));
    } else if (resolvedValue instanceof Iterable) {
        for (Object object : (Iterable<Object>) resolvedValue) {
            resolvePartitionAsInteger(topic, object, result);
        }
    } else {
        throw new IllegalArgumentException(
                String.format("@KafKaListener for topic '%s' can't resolve '%s' as an Integer or String", topic,
                        resolvedValue));
    }
}

From source file:org.springframework.kafka.listener.ConcurrentMessageListenerContainerTests.java

@Test
@SuppressWarnings("unchecked")
public void testConcurrencyWithPartitions() {
    TopicPartitionInitialOffset[] topic1PartitionS = new TopicPartitionInitialOffset[] {
            new TopicPartitionInitialOffset(topic1, 0), new TopicPartitionInitialOffset(topic1, 1),
            new TopicPartitionInitialOffset(topic1, 2), new TopicPartitionInitialOffset(topic1, 3),
            new TopicPartitionInitialOffset(topic1, 4), new TopicPartitionInitialOffset(topic1, 5),
            new TopicPartitionInitialOffset(topic1, 6) };
    ConsumerFactory<Integer, String> cf = mock(ConsumerFactory.class);
    Consumer<Integer, String> consumer = mock(Consumer.class);
    given(cf.createConsumer()).willReturn(consumer);
    given(consumer.poll(anyLong())).willAnswer(new Answer<ConsumerRecords<Integer, String>>() {

        @Override//from   w  w w.  j  a v a  2 s .  c  o  m
        public ConsumerRecords<Integer, String> answer(InvocationOnMock invocation) throws Throwable {
            Thread.sleep(100);
            return null;
        }

    });
    ContainerProperties containerProps = new ContainerProperties(topic1PartitionS);
    containerProps.setMessageListener((MessageListener<Integer, String>) message -> {
    });

    ConcurrentMessageListenerContainer<Integer, String> container = new ConcurrentMessageListenerContainer<>(cf,
            containerProps);
    container.setConcurrency(3);
    container.start();
    List<KafkaMessageListenerContainer<Integer, String>> containers = KafkaTestUtils.getPropertyValue(container,
            "containers", List.class);
    assertThat(containers.size()).isEqualTo(3);
    for (int i = 0; i < 3; i++) {
        assertThat(KafkaTestUtils.getPropertyValue(containers.get(i), "topicPartitions",
                TopicPartitionInitialOffset[].class).length).isEqualTo(i < 2 ? 2 : 3);
    }
    container.stop();
}

From source file:org.springframework.kafka.listener.TransactionalContainerTests.java

@SuppressWarnings({ "rawtypes", "unchecked" })
@Test//w  w  w  .  jav  a  2 s.c o m
public void testConsumeAndProduceTransactionRollback() throws Exception {
    Consumer consumer = mock(Consumer.class);
    final TopicPartition topicPartition0 = new TopicPartition("foo", 0);
    final TopicPartition topicPartition1 = new TopicPartition("foo", 1);
    Map<TopicPartition, List<ConsumerRecord<String, String>>> recordMap = new HashMap<>();
    recordMap.put(topicPartition0,
            Collections.singletonList(new ConsumerRecord<>("foo", 0, 0, "key", "value")));
    recordMap.put(topicPartition1,
            Collections.singletonList(new ConsumerRecord<>("foo", 1, 0, "key", "value")));
    ConsumerRecords records = new ConsumerRecords(recordMap);
    final AtomicBoolean done = new AtomicBoolean();
    willAnswer(i -> {
        if (done.compareAndSet(false, true)) {
            return records;
        } else {
            Thread.sleep(500);
            return null;
        }
    }).given(consumer).poll(any(Duration.class));
    final CountDownLatch seekLatch = new CountDownLatch(2);
    willAnswer(i -> {
        seekLatch.countDown();
        return null;
    }).given(consumer).seek(any(), anyLong());
    ConsumerFactory cf = mock(ConsumerFactory.class);
    willReturn(consumer).given(cf).createConsumer("group", "", null);
    Producer producer = mock(Producer.class);
    final CountDownLatch closeLatch = new CountDownLatch(1);
    willAnswer(i -> {
        closeLatch.countDown();
        return null;
    }).given(producer).close();
    ProducerFactory pf = mock(ProducerFactory.class);
    given(pf.transactionCapable()).willReturn(true);
    given(pf.createProducer()).willReturn(producer);
    KafkaTransactionManager tm = new KafkaTransactionManager(pf);
    ContainerProperties props = new ContainerProperties(new TopicPartitionInitialOffset("foo", 0),
            new TopicPartitionInitialOffset("foo", 1));
    props.setGroupId("group");
    props.setTransactionManager(tm);
    final KafkaTemplate template = new KafkaTemplate(pf);
    props.setMessageListener((MessageListener) m -> {
        template.send("bar", "baz");
        throw new RuntimeException("fail");
    });
    KafkaMessageListenerContainer container = new KafkaMessageListenerContainer<>(cf, props);
    container.setBeanName("rollback");
    container.start();
    assertThat(closeLatch.await(10, TimeUnit.SECONDS)).isTrue();
    assertThat(seekLatch.await(10, TimeUnit.SECONDS)).isTrue();
    InOrder inOrder = inOrder(producer);
    inOrder.verify(producer).beginTransaction();
    ArgumentCaptor<ProducerRecord> captor = ArgumentCaptor.forClass(ProducerRecord.class);
    verify(producer).send(captor.capture(), any(Callback.class));
    assertThat(captor.getValue()).isEqualTo(new ProducerRecord("bar", "baz"));
    inOrder.verify(producer, never()).sendOffsetsToTransaction(anyMap(), anyString());
    inOrder.verify(producer, never()).commitTransaction();
    inOrder.verify(producer).abortTransaction();
    inOrder.verify(producer).close();
    verify(consumer).seek(topicPartition0, 0);
    verify(consumer).seek(topicPartition1, 0);
    verify(consumer, never()).commitSync(anyMap());
    container.stop();
    verify(pf, times(1)).createProducer();
}

From source file:org.springframework.kafka.listener.TransactionalContainerTests.java

@SuppressWarnings({ "rawtypes", "unchecked" })
@Test//from w  w w  . j  av  a 2  s .  c o m
public void testConsumeAndProduceTransactionRollbackBatch() throws Exception {
    Consumer consumer = mock(Consumer.class);
    final TopicPartition topicPartition0 = new TopicPartition("foo", 0);
    final TopicPartition topicPartition1 = new TopicPartition("foo", 1);
    Map<TopicPartition, List<ConsumerRecord<String, String>>> recordMap = new HashMap<>();
    recordMap.put(topicPartition0,
            Collections.singletonList(new ConsumerRecord<>("foo", 0, 0, "key", "value")));
    recordMap.put(topicPartition1,
            Collections.singletonList(new ConsumerRecord<>("foo", 1, 0, "key", "value")));
    ConsumerRecords records = new ConsumerRecords(recordMap);
    final AtomicBoolean done = new AtomicBoolean();
    willAnswer(i -> {
        if (done.compareAndSet(false, true)) {
            return records;
        } else {
            Thread.sleep(500);
            return null;
        }
    }).given(consumer).poll(any(Duration.class));
    final CountDownLatch seekLatch = new CountDownLatch(2);
    willAnswer(i -> {
        seekLatch.countDown();
        return null;
    }).given(consumer).seek(any(), anyLong());
    ConsumerFactory cf = mock(ConsumerFactory.class);
    willReturn(consumer).given(cf).createConsumer("group", "", null);
    Producer producer = mock(Producer.class);
    final CountDownLatch closeLatch = new CountDownLatch(1);
    willAnswer(i -> {
        closeLatch.countDown();
        return null;
    }).given(producer).close();
    ProducerFactory pf = mock(ProducerFactory.class);
    given(pf.transactionCapable()).willReturn(true);
    given(pf.createProducer()).willReturn(producer);
    KafkaTransactionManager tm = new KafkaTransactionManager(pf);
    ContainerProperties props = new ContainerProperties(new TopicPartitionInitialOffset("foo", 0),
            new TopicPartitionInitialOffset("foo", 1));
    props.setGroupId("group");
    props.setTransactionManager(tm);
    final KafkaTemplate template = new KafkaTemplate(pf);
    props.setMessageListener((BatchMessageListener) recordlist -> {
        template.send("bar", "baz");
        throw new RuntimeException("fail");
    });
    KafkaMessageListenerContainer container = new KafkaMessageListenerContainer<>(cf, props);
    container.setBeanName("rollback");
    container.start();
    assertThat(closeLatch.await(10, TimeUnit.SECONDS)).isTrue();
    assertThat(seekLatch.await(10, TimeUnit.SECONDS)).isTrue();
    InOrder inOrder = inOrder(producer);
    inOrder.verify(producer).beginTransaction();
    ArgumentCaptor<ProducerRecord> captor = ArgumentCaptor.forClass(ProducerRecord.class);
    verify(producer).send(captor.capture(), any(Callback.class));
    assertThat(captor.getValue()).isEqualTo(new ProducerRecord("bar", "baz"));
    inOrder.verify(producer, never()).sendOffsetsToTransaction(anyMap(), anyString());
    inOrder.verify(producer, never()).commitTransaction();
    inOrder.verify(producer).abortTransaction();
    inOrder.verify(producer).close();
    verify(consumer).seek(topicPartition0, 0);
    verify(consumer).seek(topicPartition1, 0);
    verify(consumer, never()).commitSync(anyMap());
    container.stop();
    verify(pf, times(1)).createProducer();
}