Kafka原生API使用Java代码-消费者组-消费模式

发布于:2024-06-01 ⋅ 阅读:(125) ⋅ 点赞:(0)

1、消费模式

消费模式

  1. 点对点:一个组消费消息时,只能由组内的一个消费者消费一次 避免重复消费
  2. 发布订阅:多个组消费消息时,每个组都可以消费一次消息

1.1、创建一个3分区1副本的 主题 my_topic1

在这里插入图片描述

1.2、创建生产者 KafkaProducer1

package com.atguigu.kafka.producer;
import org.apache.kafka.clients.producer.*;
import java.util.Properties;
import java.util.concurrent.ExecutionException;
public class KafkaProducer1 {

    /**
     * 主函数用于演示如何向Kafka的特定主题发送消息。
     *
     * @param args 命令行参数(未使用)
     */
    public static void main(String[] args) throws ExecutionException, InterruptedException {
        // 初始化Kafka生产者配置
        Properties props = new Properties();
        props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "192.168.74.148:9092"); // 指定Kafka broker的地址和端口
        props.put("acks", "all"); // 确认消息写入策略
        props.put("retries", 0); // 消息发送失败时的重试次数
        props.put("linger.ms", 1); // 发送缓冲区等待时间
        props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); // 指定键的序列化器
        props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); // 指定值的序列化器

        // 创建Kafka生产者实例
        Producer<String, String> producer = new KafkaProducer<>(props);

        // 发送消息到主题"my_topic3"
        // 异步发送消息:不接收kafka的响应
        //producer.send(new ProducerRecord<String, String>("my_topic3",  "hello,1,2,3"));

        // 注释掉的循环代码块展示了如何批量发送消息
        //for (int i = 0; i < 100; i++)
        //    producer.send(new ProducerRecord<String, String>("my-topic", Integer.toString(i), Integer.toString(i)));


        for (int i=0;i<20;i++) {
            producer.send(new ProducerRecord<String, String>("my_topic1",
                    i%3,
                    "null",
                    "我是"+i),new Callback() {

                //消息发送成功,kafka broker ack 以后回调
                @Override
                public void onCompletion(RecordMetadata recordMetadata, Exception e) {
                    //exception:如果有异常代表消息未能正常发送到kafka,没有异常代表消息发送成功:
                    //此时kafka的消息不一定持久化成功(需要kafka生产者加配置)
                    //RecordMetadata代表发送成功的消息的元数据
                    System.out.println("partition = " + recordMetadata.partition());
                }
            });
        }


        // 关闭生产者实例
        producer.close();
    }
}

partition = 2
partition = 2
partition = 2
partition = 2
partition = 2
partition = 2
partition = 1
partition = 1
partition = 1
partition = 1
partition = 1
partition = 1
partition = 1
partition = 0
partition = 0
partition = 0
partition = 0
partition = 0
partition = 0
partition = 0

1.2、创建消费者

1.2.1、创建消费者 KafkaConsumer1Group1 并指定组 my_group1

package com.atguigu.kafka.consumer;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import java.time.Duration;
import java.util.Arrays;
import java.util.Properties;
public class KafkaConsumer1Group1 {
    /**
     * 主函数入口,创建并运行一个Kafka消费者来消费主题"foo"和"bar"的消息。
     *
     * @param args 命令行参数(未使用)
     */
    public static void main(String[] args) {
        // 初始化Kafka消费者配置
        Properties props = new Properties();
        props.setProperty("bootstrap.servers", "192.168.74.148:9092"); // Kafka broker的地址和端口
        props.setProperty("group.id", "my_group1"); // 消费者组ID
        props.setProperty("enable.auto.commit", "true"); // 自动提交偏移量
        props.setProperty("auto.commit.interval.ms", "1000"); // 自动提交偏移量的时间间隔
        props.setProperty("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); // 键的反序列化器
        props.setProperty("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); // 值的反序列化器

        props.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");

        // 使用配置创建KafkaConsumer实例
        KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);

        // 订阅要消费的主题
        consumer.subscribe(Arrays.asList("my_topic1"));

        // 持续消费消息
        while (true) {
            // 从Kafka服务器拉取一批消息
            ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(100));

            // 遍历并处理收到的消息记录
            for (ConsumerRecord<String, String> record : records)
                System.out.printf("offset = %d,partition: %d,value = %s%n",record.offset(),record.partition(), record.value());
        }
    }

}

offset = 0,partition: 2,value = 我是2
offset = 1,partition: 2,value = 我是5
offset = 2,partition: 2,value = 我是8
offset = 3,partition: 2,value = 我是11
offset = 4,partition: 2,value = 我是14
offset = 5,partition: 2,value = 我是17

1.2.3、创建消费者 KafkaConsumer2Group1 并指定组 my_group1

package com.atguigu.kafka.consumer;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import java.time.Duration;
import java.util.Arrays;
import java.util.Properties;
public class KafkaConsumer2Group1 {
    /**
     * 主函数入口,创建并运行一个Kafka消费者来消费主题"foo"和"bar"的消息。
     *
     * @param args 命令行参数(未使用)
     */
    public static void main(String[] args) {
        // 初始化Kafka消费者配置
        Properties props = new Properties();
        props.setProperty("bootstrap.servers", "192.168.74.148:9092"); // Kafka broker的地址和端口
        props.setProperty("group.id", "my_group1"); // 消费者组ID
        props.setProperty("enable.auto.commit", "true"); // 自动提交偏移量
        props.setProperty("auto.commit.interval.ms", "1000"); // 自动提交偏移量的时间间隔
        props.setProperty("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); // 键的反序列化器
        props.setProperty("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); // 值的反序列化器

        props.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");

        // 使用配置创建KafkaConsumer实例
        KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);

        // 订阅要消费的主题
        consumer.subscribe(Arrays.asList("my_topic1"));

        // 持续消费消息
        while (true) {
            // 从Kafka服务器拉取一批消息
            ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(100));

            // 遍历并处理收到的消息记录
            for (ConsumerRecord<String, String> record : records)
                System.out.printf("offset = %d,partition: %d,value = %s%n",record.offset(),record.partition(), record.value());
        }
    }

}

offset = 0,partition: 1,value = 我是1
offset = 1,partition: 1,value = 我是4
offset = 2,partition: 1,value = 我是7
offset = 3,partition: 1,value = 我是10
offset = 4,partition: 1,value = 我是13
offset = 5,partition: 1,value = 我是16
offset = 6,partition: 1,value = 我是19

1.2.3、创建消费者 KafkaConsumer3Group1 并指定组 my_group1

package com.atguigu.kafka.consumer;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import java.time.Duration;
import java.util.Arrays;
import java.util.Properties;
public class KafkaConsumer3Group1 {
    /**
     * 主函数入口,创建并运行一个Kafka消费者来消费主题"foo"和"bar"的消息。
     *
     * @param args 命令行参数(未使用)
     */
    public static void main(String[] args) {
        // 初始化Kafka消费者配置
        Properties props = new Properties();
        props.setProperty("bootstrap.servers", "192.168.74.148:9092"); // Kafka broker的地址和端口
        props.setProperty("group.id", "my_group1"); // 消费者组ID
        props.setProperty("enable.auto.commit", "true"); // 自动提交偏移量
        props.setProperty("auto.commit.interval.ms", "1000"); // 自动提交偏移量的时间间隔
        props.setProperty("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); // 键的反序列化器
        props.setProperty("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); // 值的反序列化器

        props.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");

        // 使用配置创建KafkaConsumer实例
        KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);

        // 订阅要消费的主题
        consumer.subscribe(Arrays.asList("my_topic1"));

        // 持续消费消息
        while (true) {
            // 从Kafka服务器拉取一批消息
            ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(100));

            // 遍历并处理收到的消息记录
            for (ConsumerRecord<String, String> record : records)
                System.out.printf("offset = %d,partition: %d,value = %s%n",record.offset(),record.partition(), record.value());
        }
    }

}

offset = 0,partition: 0,value = 我是0
offset = 1,partition: 0,value = 我是3
offset = 2,partition: 0,value = 我是6
offset = 3,partition: 0,value = 我是9
offset = 4,partition: 0,value = 我是12
offset = 5,partition: 0,value = 我是15
offset = 6,partition: 0,value = 我是18

1.2.4、创建消费者 KafkaConsumer1Group2 并指定组 my_group2

package com.atguigu.kafka.consumer;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import java.time.Duration;
import java.util.Arrays;
import java.util.Properties;
public class KafkaConsumer1Group2 {
    /**
     * 主函数入口,创建并运行一个Kafka消费者来消费主题"foo"和"bar"的消息。
     *
     * @param args 命令行参数(未使用)
     */
    public static void main(String[] args) {
        // 初始化Kafka消费者配置
        Properties props = new Properties();
        props.setProperty("bootstrap.servers", "192.168.74.148:9092"); // Kafka broker的地址和端口
        props.setProperty("group.id", "my_group2"); // 消费者组ID
        props.setProperty("enable.auto.commit", "true"); // 自动提交偏移量
        props.setProperty("auto.commit.interval.ms", "1000"); // 自动提交偏移量的时间间隔
        props.setProperty("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); // 键的反序列化器
        props.setProperty("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); // 值的反序列化器

        props.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");

        // 使用配置创建KafkaConsumer实例
        KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);

        // 订阅要消费的主题
        consumer.subscribe(Arrays.asList("my_topic1"));

        // 持续消费消息
        while (true) {
            // 从Kafka服务器拉取一批消息
            ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(100));

            // 遍历并处理收到的消息记录
            for (ConsumerRecord<String, String> record : records)
                System.out.printf("offset = %d,partition: %d,value = %s%n",record.offset(),record.partition(), record.value());
        }
    }

}

offset = 0,partition: 2,value = 我是2
offset = 1,partition: 2,value = 我是5
offset = 2,partition: 2,value = 我是8
offset = 3,partition: 2,value = 我是11
offset = 4,partition: 2,value = 我是14
offset = 5,partition: 2,value = 我是17
offset = 0,partition: 1,value = 我是1
offset = 1,partition: 1,value = 我是4
offset = 2,partition: 1,value = 我是7
offset = 3,partition: 1,value = 我是10
offset = 4,partition: 1,value = 我是13
offset = 5,partition: 1,value = 我是16
offset = 6,partition: 1,value = 我是19
offset = 0,partition: 0,value = 我是0
offset = 1,partition: 0,value = 我是3
offset = 2,partition: 0,value = 我是6
offset = 3,partition: 0,value = 我是9
offset = 4,partition: 0,value = 我是12
offset = 5,partition: 0,value = 我是15
offset = 6,partition: 0,value = 我是18

在这里插入图片描述

1.3、eagle for apache kafka

在这里插入图片描述
在这里插入图片描述
在这里插入图片描述

1.3.1、查看分区0的数据

在这里插入图片描述

[
  [
    {
      "partition": 0,
      "offset": 0,
      "msg": "我是0",
      "timespan": 1717226677707,
      "date": "2024-06-01 07:24:37"
    },
    {
      "partition": 0,
      "offset": 1,
      "msg": "我是3",
      "timespan": 1717226677720,
      "date": "2024-06-01 07:24:37"
    },
    {
      "partition": 0,
      "offset": 2,
      "msg": "我是6",
      "timespan": 1717226677721,
      "date": "2024-06-01 07:24:37"
    },
    {
      "partition": 0,
      "offset": 3,
      "msg": "我是9",
      "timespan": 1717226677721,
      "date": "2024-06-01 07:24:37"
    },
    {
      "partition": 0,
      "offset": 4,
      "msg": "我是12",
      "timespan": 1717226677721,
      "date": "2024-06-01 07:24:37"
    },
    {
      "partition": 0,
      "offset": 5,
      "msg": "我是15",
      "timespan": 1717226677722,
      "date": "2024-06-01 07:24:37"
    },
    {
      "partition": 0,
      "offset": 6,
      "msg": "我是18",
      "timespan": 1717226677722,
      "date": "2024-06-01 07:24:37"
    }
  ]
]

1.3.2、查看分区1的数据

在这里插入图片描述

[
  [
    {
      "partition": 1,
      "offset": 0,
      "msg": "我是1",
      "timespan": 1717226677720,
      "date": "2024-06-01 07:24:37"
    },
    {
      "partition": 1,
      "offset": 1,
      "msg": "我是4",
      "timespan": 1717226677721,
      "date": "2024-06-01 07:24:37"
    },
    {
      "partition": 1,
      "offset": 2,
      "msg": "我是7",
      "timespan": 1717226677721,
      "date": "2024-06-01 07:24:37"
    },
    {
      "partition": 1,
      "offset": 3,
      "msg": "我是10",
      "timespan": 1717226677721,
      "date": "2024-06-01 07:24:37"
    },
    {
      "partition": 1,
      "offset": 4,
      "msg": "我是13",
      "timespan": 1717226677721,
      "date": "2024-06-01 07:24:37"
    },
    {
      "partition": 1,
      "offset": 5,
      "msg": "我是16",
      "timespan": 1717226677722,
      "date": "2024-06-01 07:24:37"
    },
    {
      "partition": 1,
      "offset": 6,
      "msg": "我是19",
      "timespan": 1717226677722,
      "date": "2024-06-01 07:24:37"
    }
  ]
]

1.3.3、查看分区2的数据

在这里插入图片描述

[
  [
    {
      "partition": 2,
      "offset": 0,
      "msg": "我是2",
      "timespan": 1717226677720,
      "date": "2024-06-01 07:24:37"
    },
    {
      "partition": 2,
      "offset": 1,
      "msg": "我是5",
      "timespan": 1717226677721,
      "date": "2024-06-01 07:24:37"
    },
    {
      "partition": 2,
      "offset": 2,
      "msg": "我是8",
      "timespan": 1717226677721,
      "date": "2024-06-01 07:24:37"
    },
    {
      "partition": 2,
      "offset": 3,
      "msg": "我是11",
      "timespan": 1717226677721,
      "date": "2024-06-01 07:24:37"
    },
    {
      "partition": 2,
      "offset": 4,
      "msg": "我是14",
      "timespan": 1717226677721,
      "date": "2024-06-01 07:24:37"
    },
    {
      "partition": 2,
      "offset": 5,
      "msg": "我是17",
      "timespan": 1717226677722,
      "date": "2024-06-01 07:24:37"
    }
  ]
]