This commit is contained in:
Florian THIERRY
2021-04-30 15:44:28 +02:00
commit df3b76f166
27 changed files with 1052 additions and 0 deletions

34
.gitignore vendored Normal file
View File

@@ -0,0 +1,34 @@
HELP.md
kafka-consumer/target/
!.mvn/wrapper/maven-wrapper.jar
!**/src/main/**/target/
!**/src/test/**/target/
**/target/
### STS ###
.apt_generated
.classpath
.factorypath
.project
.settings
.springBeans
.sts4-cache
### IntelliJ IDEA ###
.idea
*.iws
*.iml
*.ipr
### NetBeans ###
/nbproject/private/
/nbbuild/
/dist/
/nbdist/
/.nb-gradle/
build/
!**/src/main/**/build/
!**/src/test/**/build/
### VS Code ###
.vscode/

View File

@@ -0,0 +1 @@
{"_type":"export","__export_format":4,"__export_date":"2021-01-11T16:08:14.068Z","__export_source":"insomnia.desktop.app:v2020.5.2","resources":[{"_id":"req_72df07e95f3a48c0b49c9f0a93f9f82f","parentId":"fld_952979615ccb49a48fa38d35fcae9957","modified":1610378023564,"created":1610377899987,"url":"http://{{ _.kafkaProducer.host }}:{{ _.kafkaProducer.port }}/messages","name":"Send message","description":"","method":"POST","body":{"mimeType":"application/json","text":"\"Hello world!\""},"parameters":[],"headers":[{"name":"Content-Type","value":"application/json","id":"pair_184c9a3648114ff68c5a1bf3fb37b198"}],"authentication":{},"metaSortKey":-1610377899987,"isPrivate":false,"settingStoreCookies":true,"settingSendCookies":true,"settingDisableRenderRequestBody":false,"settingEncodeUrl":true,"settingRebuildPath":true,"settingFollowRedirects":"global","_type":"request"},{"_id":"fld_952979615ccb49a48fa38d35fcae9957","parentId":"wrk_05388a4751734d0e82825b35e6ef8689","modified":1610377891368,"created":1610377891368,"name":"KafkaProducer","description":"","environment":{},"environmentPropertyOrder":null,"metaSortKey":-1610377891368,"_type":"request_group"},{"_id":"wrk_05388a4751734d0e82825b35e6ef8689","parentId":null,"modified":1610377834700,"created":1610377834700,"name":"Insomnia","description":"","scope":null,"_type":"workspace"},{"_id":"env_339b114ac8e86fda9f4de7d2181e4b03f3650c0b","parentId":"wrk_05388a4751734d0e82825b35e6ef8689","modified":1610377970871,"created":1610377834725,"name":"Base Environment","data":{"kafkaProducer":{"host":"localhost","port":"8081"},"kafkaConsumer":{"host":"localhost","port":"8082"}},"dataPropertyOrder":{"&":["kafkaProducer","kafkaConsumer"],"&~|kafkaProducer":["host","port"],"&~|kafkaConsumer":["host","port"]},"color":null,"isPrivate":false,"metaSortKey":1610377834725,"_type":"environment"},{"_id":"jar_339b114ac8e86fda9f4de7d2181e4b03f3650c0b","parentId":"wrk_05388a4751734d0e82825b35e6ef8689","modified":1610377834726,"created":1610377834726,"name":"Default Jar","cookies":[],"_type":"cookie_jar"},{"_id":"spc_e64ff1b4a38a4c988025b81b1d7e1003","parentId":"wrk_05388a4751734d0e82825b35e6ef8689","modified":1610377834700,"created":1610377834700,"fileName":"Insomnia","contents":"","contentType":"yaml","_type":"api_spec"},{"_id":"env_54124313de4d4467b7f6feccaba48848","parentId":"env_339b114ac8e86fda9f4de7d2181e4b03f3650c0b","modified":1610381253855,"created":1610377974133,"name":"Localhost","data":{"kafkaConsumer":{"host":"localhost","port":"8081"},"kafkaProducer":{"host":"localhost","port":"8082"}},"dataPropertyOrder":{"&":["kafkaConsumer","kafkaProducer"],"&~|kafkaConsumer":["host","port"],"&~|kafkaProducer":["host","port"]},"color":null,"isPrivate":false,"metaSortKey":1610377974133,"_type":"environment"}]}

11
README.md Normal file
View File

@@ -0,0 +1,11 @@
# Kafka
## Commands
### Launch CMD Producer
```bash
$ kafka-console-producer.sh --broker-list localhost:9092 --topic test
```
### Launch CMD Consumer
```bash
$ kafka-console-consumer.sh --bootstrap-server localhost:9092 --topic test
```

13
docker/docker-compose.yml Normal file
View File

@@ -0,0 +1,13 @@
version: '3'
services:
zookeeper:
image: wurstmeister/zookeeper
ports:
- "2181:2181"
kafka:
image: wurstmeister/kafka
ports:
- "9092:9092"
environment:
KAFKA_ADVERTISED_HOST_NAME: "192.168.0.11"
KAFKA_ZOOKEEPER_CONNECT: "zookeeper:2181"

59
kafka-common-test/pom.xml Normal file
View File

@@ -0,0 +1,59 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>com.ippon.trainning.kafkaintegrationtest</groupId>
<artifactId>kafka-integration-test-parent</artifactId>
<version>0.0.1-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>
<artifactId>kafka-common-test</artifactId>
<version>0.0.1-SNAPSHOT</version>
<properties>
<java.version>11</java.version>
<jupiter.version>5.7.0</jupiter.version>
</properties>
<dependencies>
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<exclusions>
<exclusion>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka-test</artifactId>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-engine</artifactId>
<version>${jupiter.version}</version>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-api</artifactId>
<version>${jupiter.version}</version>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-params</artifactId>
<version>${jupiter.version}</version>
</dependency>
<dependency>
<groupId>org.awaitility</groupId>
<artifactId>awaitility</artifactId>
</dependency>
</dependencies>
</project>

View File

@@ -0,0 +1,73 @@
package com.ippon.trainning.kafkaintegrationtest.kafkacommontest.assertor;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.common.header.Header;
import org.assertj.core.api.AbstractObjectAssert;
import java.util.Iterator;
import org.assertj.core.api.Assertions;
public class ConsumerRecordAssert<K, V> extends AbstractObjectAssert<ConsumerRecordAssert<K, V>, ConsumerRecord<K, V>> {
private final ObjectMapper objectMapper = new ObjectMapper();
private ConsumerRecordAssert(ConsumerRecord<K, V> record) {
super(record, ConsumerRecordAssert.class);
}
public static <K, V> ConsumerRecordAssert<K, V> assertThat(ConsumerRecord<K, V> record) {
return new ConsumerRecordAssert<>(record);
}
public ConsumerRecordAssert<K, V> hasValue(V expectedValue) {
isNotNull();
Assertions.assertThat(actual.value()).usingRecursiveComparison().isEqualTo(expectedValue);
return this;
}
public ConsumerRecordAssert<K, V> hasKey(K expectedKey) {
isNotNull();
Assertions.assertThat(actual.key()).usingRecursiveComparison().isEqualTo(expectedKey);
return this;
}
public ConsumerRecordAssert<K, V> hasHeader(String headerName, Object expectedHeaderValue) {
isNotNull();
String expectedHeaderValueAsJsonString = null;
try {
expectedHeaderValueAsJsonString = objectMapper.writeValueAsString(expectedHeaderValue);
} catch (JsonProcessingException ex) {
throw new IllegalArgumentException(String.format(
"Could not initialize <hasHeader> assertion due to JSON serialization of expected value <%s>",
expectedHeaderValue
));
}
Assertions.assertThat(actual.headers()).isNotEmpty();
Iterator<Header> headerIterator = actual.headers().headers(headerName).iterator();
if (!headerIterator.hasNext()) {
failWithMessage("<%s> should contain header <%s> with value <%s>, but it does not.",
ConsumerRecord.class.getSimpleName(),
headerName,
expectedHeaderValueAsJsonString);
}
String headerValue = new String(headerIterator.next().value());
if (!expectedHeaderValueAsJsonString.equals(headerValue)) {
failWithMessage("<%s> should contain header <%s> with value <%s>, but has <%s>.",
ConsumerRecord.class.getSimpleName(),
headerName,
expectedHeaderValueAsJsonString,
headerValue);
}
return this;
}
}

View File

@@ -0,0 +1,66 @@
package com.ippon.trainning.kafkaintegrationtest.kafkacommontest.extension;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.junit.jupiter.api.extension.AfterEachCallback;
import org.junit.jupiter.api.extension.BeforeEachCallback;
import org.junit.jupiter.api.extension.ExtensionContext;
import org.springframework.context.ApplicationContext;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.listener.ContainerProperties;
import org.springframework.kafka.listener.KafkaMessageListenerContainer;
import org.springframework.kafka.listener.MessageListener;
import org.springframework.kafka.test.EmbeddedKafkaBroker;
import org.springframework.kafka.test.utils.ContainerTestUtils;
import org.springframework.test.context.junit.jupiter.SpringExtension;
import java.util.Map;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
public class EmbeddedKafkaExtension implements BeforeEachCallback, AfterEachCallback {
private EmbeddedKafkaBroker embeddedKafkaBroker;
private KafkaMessageListenerContainer<String, String> container;
/**
* This queue is required for {@link EmbeddedKafkaExtension#container} to start.
* <p>
* We can improve this extension by providing this queue as a Spring bean but with setting up of a sort
* of factory to define genericity of {@link ConsumerRecord}.
* </p>
*/
@SuppressWarnings({"FieldCanBeLocal", "MismatchedQueryAndUpdateOfCollection"})
private BlockingQueue<ConsumerRecord<String, String>> records;
@Override
public void beforeEach(ExtensionContext context) throws Exception {
ApplicationContext applicationContext = SpringExtension.getApplicationContext(context);
embeddedKafkaBroker = applicationContext.getBean(EmbeddedKafkaBroker.class);
DefaultKafkaConsumerFactory<String, String> consumerFactory = new DefaultKafkaConsumerFactory<>(getConsumerProperties());
ContainerProperties containerProperties = new ContainerProperties("test-topic");
container = new KafkaMessageListenerContainer<>(consumerFactory, containerProperties);
records = new LinkedBlockingQueue<>();
container.setupMessageListener((MessageListener<String, String>) records::add);
container.start();
ContainerTestUtils.waitForAssignment(container, embeddedKafkaBroker.getPartitionsPerTopic());
}
private Map<String, Object> getConsumerProperties() {
return Map.of(
ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, embeddedKafkaBroker.getBrokersAsString(),
ConsumerConfig.GROUP_ID_CONFIG, "consumer",
ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true",
ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "10",
ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "60000",
ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class,
ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class,
ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"
);
}
@Override
public void afterEach(ExtensionContext context) {
container.stop();
}
}

View File

@@ -0,0 +1,68 @@
package com.ippon.trainning.kafkaintegrationtest.kafkacommontest.utils;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.kafka.annotation.KafkaListener;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingDeque;
import static java.util.concurrent.TimeUnit.SECONDS;
/**
* Listener that will consume a kafka topic and store consumed message.
* <p>
* This class serves to test a kafka producer.
* </p>
* @param <V> The kafka producer messages type.
*/
public class KafkaTopicListener<K, V> {
/**
* Queue to store consumed messages.
*/
private final BlockingQueue<ConsumerRecord<K, V>> messages = new LinkedBlockingDeque<>();
/**
* Serves to consume a kafka topic, and store consumed messages in the queue.
* <p>
* This method has to be annotated with {@link KafkaListener} annotation in classes that extend this one.
* </p>
* <p>
* Example:
* <pre>{@code
* new KafkaTopicListener<>() {
* @KafkaListener(
* topics = "topic-name",
* groupId = "groupId-name",
* containerFactory = "container-factory-bean-name"
* )
* @Override
* public void listen(ConsumerRecord<String, String> record) {
* super.listen(record);
* }
* };
* }</pre>
* </p>
* <p>
* Note: The {@code container-factory-bean-name} is a bean that you have to declare
* (with a {@code @Bean} annotated method) that should be an instance of
* {@link org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory}.
* </p>
* @param message Topic consumed message.
*/
public void listen(ConsumerRecord<K, V> message) {
messages.add(message);
}
/**
* Waits at most 5 seconds that kafka listener consumes a message,
* and return it if one was consumed in this period, otherwise, {@code null} will be returned.
* @return The kafka consumed message, or {@code null}.
*/
public ConsumerRecord<K, V> getMessage() {
try {
return messages.poll(5, SECONDS);
} catch (InterruptedException ex) {
throw new AssertionError(ex);
}
}
}

131
kafka-consumer/pom.xml Normal file
View File

@@ -0,0 +1,131 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>com.ippon.trainning.kafkaintegrationtest</groupId>
<artifactId>kafka-integration-test-parent</artifactId>
<version>0.0.1-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>
<artifactId>kafka-consumer</artifactId>
<version>0.0.1-SNAPSHOT</version>
<properties>
<java.version>11</java.version>
<jupiter.version>5.7.0</jupiter.version>
<build-helper-maven-plugin.version>3.2.0</build-helper-maven-plugin.version>
<integration-test.source.directory>src/integration-test/java</integration-test.source.directory>
<integration-test.resources.directory>src/integration-test/resources</integration-test.resources.directory>
<maven-failsafe-plugin.version>2.22.2</maven-failsafe-plugin.version>
</properties>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
</dependency>
<dependency>
<groupId>com.ippon.trainning.kafkaintegrationtest</groupId>
<artifactId>kafka-common-test</artifactId>
<version>0.0.1-SNAPSHOT</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka-test</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-engine</artifactId>
<version>${jupiter.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-api</artifactId>
<version>${jupiter.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-params</artifactId>
<version>${jupiter.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.awaitility</groupId>
<artifactId>awaitility</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<!-- Mandatory for executable jar -->
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
<configuration>
<classifier>exec</classifier>
</configuration>
</plugin>
<!-- Tests d'intégrations, ajout des dossiers sources + exécution -->
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>build-helper-maven-plugin</artifactId>
<version>${build-helper-maven-plugin.version}</version>
<executions>
<execution>
<id>add-integration-test</id>
<goals>
<goal>add-test-source</goal>
<goal>add-test-resource</goal>
</goals>
<configuration>
<sources>
<source>${integration-test.source.directory}</source>
</sources>
<resources>
<resource>
<directory>${integration-test.resources.directory}</directory>
</resource>
</resources>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-failsafe-plugin</artifactId>
<version>${maven-failsafe-plugin.version}</version>
<executions>
<execution>
<goals>
<goal>integration-test</goal>
<goal>verify</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>

View File

@@ -0,0 +1,85 @@
package com.ippon.trainning.kafkaintegrationtest.kafkaconsumer.consumer;
import com.ippon.trainning.kafkaintegrationtest.kafkacommontest.extension.EmbeddedKafkaExtension;
import com.ippon.trainning.kafkaintegrationtest.kafkaconsumer.service.MessageService;
import org.apache.kafka.common.serialization.StringSerializer;
import org.awaitility.Durations;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestInstance;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.junit.jupiter.MockitoExtension;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.test.context.TestConfiguration;
import org.springframework.boot.test.mock.mockito.MockBean;
import org.springframework.context.annotation.Bean;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.core.ProducerFactory;
import org.springframework.kafka.test.context.EmbeddedKafka;
import org.springframework.test.context.ActiveProfiles;
import org.springframework.test.context.junit.jupiter.SpringExtension;
import java.util.Map;
import java.util.UUID;
import static org.apache.kafka.clients.producer.ProducerConfig.*;
import static org.awaitility.Awaitility.await;
import static org.mockito.BDDMockito.then;
@ExtendWith({
SpringExtension.class,
MockitoExtension.class,
EmbeddedKafkaExtension.class
})
@SpringBootTest(classes = {
KafkaConsumerIT.KafkaConsumerITConfiguration.class
}, properties = "spring.kafka.bootstrap-servers=${spring.embedded.kafka.brokers}")
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
@EmbeddedKafka
@ActiveProfiles({"test"})
public class KafkaConsumerIT {
@TestConfiguration
public static class KafkaConsumerITConfiguration {
@Bean
public DefaultKafkaProducerFactory<String, String> producerFactory(
@Value("${server.kafka.bootstrapAddress}") String bootstrapAddress
) {
Map<String, Object> configProperties = Map.of(
BOOTSTRAP_SERVERS_CONFIG, bootstrapAddress,
KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class,
VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class
);
return new DefaultKafkaProducerFactory<>(configProperties);
}
@Bean
public KafkaTemplate<String, String> kafkaTemplate(ProducerFactory<String, String> producerFactory) {
return new KafkaTemplate<>(producerFactory);
}
}
@Autowired
private KafkaTemplate<String, String> kafkaTemplate;
@Value("${server.kafka.topic}")
private String topic;
@MockBean
private MessageService messageService;
@Test
void should_comsume_a_message_from_kafka_topic() {
// given
String key = UUID.randomUUID().toString();
String message = "A message to consume";
// when
kafkaTemplate.send(topic, key, message);
// then
await().atMost(Durations.FIVE_SECONDS).untilAsserted(() -> then(messageService).should().handleMessage(key, message));
}
}

View File

@@ -0,0 +1,5 @@
server:
kafka:
bootstrapAddress: "${spring.embedded.kafka.brokers:}"
topic: topic-test
groupId: groupId-test

View File

@@ -0,0 +1,11 @@
package com.ippon.trainning.kafkaintegrationtest.kafkaconsumer;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@SpringBootApplication
public class KafkaConsumerApplication {
public static void main(String[] args) {
SpringApplication.run(KafkaConsumerApplication.class, args);
}
}

View File

@@ -0,0 +1,46 @@
package com.ippon.trainning.kafkaintegrationtest.kafkaconsumer.config;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import java.util.Map;
import static org.apache.kafka.clients.CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG;
import static org.apache.kafka.clients.CommonClientConfigs.GROUP_ID_CONFIG;
import static org.apache.kafka.clients.consumer.ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG;
import static org.apache.kafka.clients.consumer.ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG;
@Configuration
public class KafkaConsumerConfiguration {
private final String bootstrapAddress;
private final String groupId;
public KafkaConsumerConfiguration(@Value("${server.kafka.bootstrapAddress}") String bootstrapAddress,
@Value("${server.kafka.groupId}") String groupId) {
this.bootstrapAddress = bootstrapAddress;
this.groupId = groupId;
}
@Bean
public ConsumerFactory<String, String> consumerFactory() {
Map<String, Object> props = Map.of(
BOOTSTRAP_SERVERS_CONFIG, bootstrapAddress,
VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class,
KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class,
GROUP_ID_CONFIG, groupId
);
return new DefaultKafkaConsumerFactory<>(props);
}
@Bean
public ConcurrentKafkaListenerContainerFactory<String, String> containerFactory(ConsumerFactory<String, String> consumerFactory) {
ConcurrentKafkaListenerContainerFactory<String, String> factory = new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory);
return factory;
}
}

View File

@@ -0,0 +1,25 @@
package com.ippon.trainning.kafkaintegrationtest.kafkaconsumer.consumer;
import com.ippon.trainning.kafkaintegrationtest.kafkaconsumer.service.MessageService;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.stereotype.Service;
@Service
@Slf4j
public class KafkaConsumer {
private final MessageService messageService;
public KafkaConsumer(MessageService messageService) {
this.messageService = messageService;
}
@KafkaListener(
containerFactory = "containerFactory",
topics = "${server.kafka.topic}"
)
public void listenTopic(ConsumerRecord<String, String> record) {
messageService.handleMessage(record.key(), record.value());
}
}

View File

@@ -0,0 +1,12 @@
package com.ippon.trainning.kafkaintegrationtest.kafkaconsumer.service;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Service;
@Service
@Slf4j
public class MessageService {
public void handleMessage(String key, String message) {
log.info("Message consumed: <{}> - <{}>", key, message);
}
}

View File

@@ -0,0 +1,6 @@
server:
port: 8081
kafka:
bootstrapAddress: localhost:9092
topic: topic-test
groupId: groupId-test

View File

@@ -0,0 +1,13 @@
package com.ippon.trainning.kafkaintegrationtest.kafkaconsumer;
import org.junit.jupiter.api.Test;
import org.springframework.boot.test.context.SpringBootTest;
@SpringBootTest
class KafkaConsumerApplicationTests {
@Test
void contextLoads() {
}
}

137
kafka-producer/pom.xml Normal file
View File

@@ -0,0 +1,137 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>com.ippon.trainning.kafkaintegrationtest</groupId>
<artifactId>kafka-integration-test-parent</artifactId>
<version>0.0.1-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>
<artifactId>kafka-producer</artifactId>
<version>0.0.1-SNAPSHOT</version>
<properties>
<java.version>11</java.version>
<jupiter.version>5.7.0</jupiter.version>
<build-helper-maven-plugin.version>3.2.0</build-helper-maven-plugin.version>
<integration-test.source.directory>src/integration-test/java</integration-test.source.directory>
<integration-test.resources.directory>src/integration-test/resources</integration-test.resources.directory>
<maven-failsafe-plugin.version>2.22.2</maven-failsafe-plugin.version>
</properties>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
</dependency>
<dependency>
<groupId>com.ippon.trainning.kafkaintegrationtest</groupId>
<artifactId>kafka-common-test</artifactId>
<version>0.0.1-SNAPSHOT</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka-test</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-engine</artifactId>
<version>${jupiter.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-api</artifactId>
<version>${jupiter.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-params</artifactId>
<version>${jupiter.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.awaitility</groupId>
<artifactId>awaitility</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<!-- Mandatory for executable jar -->
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
<configuration>
<classifier>exec</classifier>
</configuration>
</plugin>
<!-- Tests d'intégrations, ajout des dossiers sources + exécution -->
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>build-helper-maven-plugin</artifactId>
<version>${build-helper-maven-plugin.version}</version>
<executions>
<execution>
<id>add-integration-test</id>
<goals>
<goal>add-test-source</goal>
<goal>add-test-resource</goal>
</goals>
<configuration>
<sources>
<source>${integration-test.source.directory}</source>
</sources>
<resources>
<resource>
<directory>${integration-test.resources.directory}</directory>
</resource>
</resources>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-failsafe-plugin</artifactId>
<version>${maven-failsafe-plugin.version}</version>
<executions>
<execution>
<goals>
<goal>integration-test</goal>
<goal>verify</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>

View File

@@ -0,0 +1,96 @@
package com.ippon.trainning.kafkaintegrationtest.kafkaproducer.producer;
import com.ippon.trainning.kafkaintegrationtest.kafkacommontest.extension.EmbeddedKafkaExtension;
import com.ippon.trainning.kafkaintegrationtest.kafkacommontest.utils.KafkaTopicListener;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestInstance;
import org.junit.jupiter.api.extension.ExtendWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.test.context.TestConfiguration;
import org.springframework.context.annotation.Bean;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.test.context.EmbeddedKafka;
import org.springframework.test.context.ActiveProfiles;
import org.springframework.test.context.junit.jupiter.SpringExtension;
import java.util.HashMap;
import java.util.Map;
import static com.ippon.trainning.kafkaintegrationtest.kafkacommontest.assertor.ConsumerRecordAssert.assertThat;
@ExtendWith({
SpringExtension.class,
EmbeddedKafkaExtension.class
})
@SpringBootTest(classes = {
ProducerIT.ProducerITConfiguration.class
}, properties = "spring.kafka.bootstrap-servers=${spring.embedded.kafka.brokers}")
@EmbeddedKafka
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
@ActiveProfiles({"test"})
public class ProducerIT {
@TestConfiguration
public static class ProducerITConfiguration {
@Bean("message-container-factory")
public ConcurrentKafkaListenerContainerFactory<String, String> containerFactory(
@Value("${server.kafka.bootstrapAddress}") String bootstrapAddress
) {
var factory = new ConcurrentKafkaListenerContainerFactory<String, String>();
factory.setConsumerFactory(consumerFactory(bootstrapAddress));
factory.setConcurrency(1);
return factory;
}
private ConsumerFactory<String, String> consumerFactory(String bootstrapAddress) {
Map<String, Object> props = new HashMap<>();
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapAddress);
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
return new DefaultKafkaConsumerFactory<>(props);
}
@Bean
public KafkaTopicListener<String, String> messageKafkaTopicListener() {
return new KafkaTopicListener<>() {
@KafkaListener(
topics = "${server.kafka.topic}",
groupId = "${server.kafka.groupId}",
containerFactory = "message-container-factory"
)
@Override
public void listen(ConsumerRecord<String, String> message) {
super.listen(message);
}
};
}
}
@Autowired
private KafkaProducer producer;
@Autowired
@Qualifier("messageKafkaTopicListener")
private KafkaTopicListener<String, String> messageKafkaTopicListener;
@Test
void should_send_a_message_into_kafka_topic() {
// given
String message = "A test message to send into topic";
// when
producer.sendMessage(message);
// then
ConsumerRecord<String, String> messageConsumed = messageKafkaTopicListener.getMessage();
assertThat(messageConsumed)
.hasValue(message);
}
}

View File

@@ -0,0 +1,5 @@
server:
kafka:
bootstrapAddress: "${spring.embedded.kafka.brokers:}"
topic: topic-test
groupId: groupId-test

View File

@@ -0,0 +1,11 @@
package com.ippon.trainning.kafkaintegrationtest.kafkaproducer;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@SpringBootApplication
public class KafkaProducerApplication {
public static void main(String[] args) {
SpringApplication.run(KafkaProducerApplication.class, args);
}
}

View File

@@ -0,0 +1,37 @@
package com.ippon.trainning.kafkaintegrationtest.kafkaproducer.config;
import org.apache.kafka.common.serialization.StringSerializer;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.core.ProducerFactory;
import java.util.Map;
import static org.apache.kafka.clients.producer.ProducerConfig.*;
@Configuration
public class KafkaProducerConfiguration {
private final String bootstrapAddress;
public KafkaProducerConfiguration(@Value("${server.kafka.bootstrapAddress}") String bootstrapAddress) {
this.bootstrapAddress = bootstrapAddress;
}
@Bean
public ProducerFactory<String, String> producerFactory() {
Map<String, Object> configProperties = Map.of(
BOOTSTRAP_SERVERS_CONFIG, bootstrapAddress,
KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class,
VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class
);
return new DefaultKafkaProducerFactory<>(configProperties);
}
@Bean
public KafkaTemplate<String, String> kafkaTemplate() {
return new KafkaTemplate<>(producerFactory());
}
}

View File

@@ -0,0 +1,30 @@
package com.ippon.trainning.kafkaintegrationtest.kafkaproducer.controller;
import com.ippon.trainning.kafkaintegrationtest.kafkaproducer.producer.KafkaProducer;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RestController;
import static org.springframework.http.HttpStatus.INTERNAL_SERVER_ERROR;
@RestController
public class MessageController {
private final KafkaProducer kafkaProducer;
public MessageController(KafkaProducer kafkaProducer) {
this.kafkaProducer = kafkaProducer;
}
@PostMapping("/messages")
public ResponseEntity<String> sendMessage(@RequestBody String message) {
ResponseEntity<String> response;
try {
kafkaProducer.sendMessage(message);
response = ResponseEntity.ok("Message sent.");
} catch(Exception ex) {
response = ResponseEntity.status(INTERNAL_SERVER_ERROR).body("Message no sent.");
}
return response;
}
}

View File

@@ -0,0 +1,23 @@
package com.ippon.trainning.kafkaintegrationtest.kafkaproducer.producer;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.stereotype.Service;
import java.util.UUID;
@Service
public class KafkaProducer {
private final KafkaTemplate<String, String> kafkaTemplate;
private final String topic;
public KafkaProducer(KafkaTemplate<String, String> kafkaTemplate,
@Value("${server.kafka.topic}") String topic) {
this.kafkaTemplate = kafkaTemplate;
this.topic = topic;
}
public void sendMessage(String message) {
kafkaTemplate.send(topic, UUID.randomUUID().toString(), message);
}
}

View File

@@ -0,0 +1,5 @@
server:
port: 8082
kafka:
bootstrapAddress: "localhost:9092"
topic: topic-test

View File

@@ -0,0 +1,13 @@
package com.ippon.trainning.kafkaintegrationtest.kafkaproducer;
import org.junit.jupiter.api.Test;
import org.springframework.boot.test.context.SpringBootTest;
@SpringBootTest
class KafkaProducerApplicationTests {
@Test
void contextLoads() {
}
}

36
pom.xml Normal file
View File

@@ -0,0 +1,36 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.ippon.trainning.kafkaintegrationtest</groupId>
<artifactId>kafka-integration-test-parent</artifactId>
<version>0.0.1-SNAPSHOT</version>
<packaging>pom</packaging>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<java.version>11</java.version>
<maven.compiler.source>${java.version}</maven.compiler.source>
<maven.compiler.target>${java.version}</maven.compiler.target>
</properties>
<modules>
<module>kafka-common-test</module>
<module>kafka-consumer</module>
<module>kafka-producer</module>
</modules>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-dependencies</artifactId>
<version>2.4.5</version>
<type>pom</type>
<scope>import</scope>
</dependency>
</dependencies>
</dependencyManagement>
</project>