1.引入依赖
<groupId>org.springframework.kafkagroupId>
<artifactId>spring-kafkaartifactId>
2.配置参数
bootstrap-servers: 101.34.251.168:9092
retries: 3 # 设置⼤于0的值,则客户端会将发送失败的记录重新发送
key-serializer: org.apache.kafka.common.serialization.StringSerializer
value-serializer: org.apache.kafka.common.serialization.StringSerializer
enable-auto-commit: false
auto-offset-reset: earliest
org.apache.kafka.common.serialization.StringDeserializer
org.apache.kafka.common.serialization.StringDeserializer
# 当每⼀条记录被消费者监听器(ListenerConsumer)处理之后提交
# 当每⼀批poll()的数据被消费者监听器(ListenerConsumer)处理之后提交
# 当每⼀批poll()的数据被消费者监听器(ListenerConsumer)处理之后,距离上次提交时间⼤于TIME时提交
# 当每⼀批poll()的数据被消费者监听器(ListenerConsumer)处理之后,被处理record数量⼤于等于COUNT时提交
# TIME | COUNT 有⼀个条件满⾜时提交
# 当每⼀批poll()的数据被消费者监听器(ListenerConsumer)处理之后, ⼿动调⽤Acknowledgment.acknowledge()后提交
# ⼿动调⽤Acknowledgment.acknowledge()后⽴即提交,⼀般使⽤这种
ack-mode: MANUAL_IMMEDIATE

3.生产者搭建
package com.wen.kafka.controller;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
public class ProducerController {
private KafkaTemplate kafkaTemplate;
public String sendMessage(){
kafkaTemplate.send("test", "key", "msg2");
4.消费者搭建
package com.wen.kafka.consumer;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.stereotype.Component;
@KafkaListener(topics = "test", groupId = "GroupOne")
public void listenGroup(ConsumerRecord record){
System.out.println(record);