代码
package com.xz.kafka.consumer;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.StringDeserializer;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Properties;
/***
* 独立消费者,消费某一个主题中的数据
*/
public class CustomConsumer {
public static void main(String[] args) {
// 配置
Properties properties = new Properties();
// 连接 bootstrap.servers
properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,"192.168.136.27:9092,192.168.136.28:9092,192.168.136.29:9092");
// 反序列化
properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
// 配置消费者组id
properties.put(ConsumerConfig.GROUP_ID_CONFIG,"test5");
// 设置分区分配策略
properties.put(ConsumerConfig.PARTITION_ASSIGNMENT_STRATEGY_CONFIG,"org.apache.kafka.clients.consumer.StickyAssignor");
// 1 创建一个消费者 "", "hello"
KafkaConsumer<String, String> kafkaConsumer = new KafkaConsumer<>(properties);
// 2 订阅主题 first
ArrayList<String> topics = new ArrayList<>();
topics.add("firstTopic");
kafkaConsumer.subscribe(topics);
// 3 消费数据
while (true){
//每一秒拉取一次数据
ConsumerRecords<String, String> consumerRecords = kafkaConsumer.poll(Duration.ofSeconds(1));
//输出数据
for (ConsumerRecord<String, String> consumerRecord : consumerRecords) {
System.out.println(consumerRecord);
}
kafkaConsumer.commitAsync();
}
}
}
在 Kafka 集群控制台,创建firstTopic主题
bin/kafka-topics.sh --bootstrap-server 192.168.136.27:9092 --create --partitions 3 --replication-factor 1 --topic firstTopic
在 IDEA中启动案例代码
在 Kafka 集群控制台,创建 Kafka生产者,并输入数据。
bin/kafka-console-producer.sh --bootstrap-server 192.168.136.27:9092 --topic firstTopic
在 IDEA 控制台观察接收到的数据。
ConsumerRecord(topic = firstTopic, partition = 0, leaderEpoch = 0, offset = 0, CreateT
ime = 1694097579736, serialized key size = -1, serialized value size = 10, headers =
RecordHeaders(headers = [], isReadOnly = false), key = null, value = helo kafka)