spring+kafka 注解实现多个group消费者组

kafka | 2019-09-23 09:56:55

往topic发一个消息,如果需要处理n个业务,就可以写到n个消费者组,如果是单独的业务模块不需要关心也不影响老业务,这种方式对于都写到一个消费者里,有利于代码解耦和模块扩展。下面就用spring集成kafka,实现一个topic可以被多个group消费。
1、pom依赖

<dependency>
  <groupId>org.springframework</groupId>
  <artifactId>spring-webmvc</artifactId>
  <version>4.2.3.RELEASE</version>
</dependency>

<dependency>
  <groupId>org.springframework.kafka</groupId>
  <artifactId>spring-kafka</artifactId>
  <version>1.3.1.RELEASE</version>
</dependency>

2、生产者代码

/**
 * kafka produce 单例模式只初始化一个生产者
 */
public class KafkaProducer {

    private static KafkaTemplate kafkaTemplate = new KafkaTemplate<>(producerFactory());;


    public static void send(String topic, String key, String data){
        ListenableFuture<SendResult<String, String>> future = kafkaTemplate.send(topic, key, data);
        future.addCallback(new CallBackSuccess(),new FailCallBack(topic, key, data));
    }

    public static void send(String topic, String data){
        ListenableFuture<SendResult<String, String>> future = kafkaTemplate.send(topic, data);
        future.addCallback(new CallBackSuccess(),new FailCallBack(topic,"",data));
    }

    private static void send(String topic, Integer parti, Long time, Object key, String value){
        kafkaTemplate.send(topic,parti,time,key,value);
    }

    /** 
     *  
     * Description:获取配置 
     * Date:        2017年7月11日 
     * @author      shaqf 
     */  
    private static Map<String, Object> producerConfigs() {
        Map<String, Object> props = Maps.newHashMap();
        String list = Properties.appProps.getValue("kafka.broker");
        props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, list);
        props.put(ProducerConfig.RETRIES_CONFIG, 0);  
        props.put(ProducerConfig.BATCH_SIZE_CONFIG, 4096);  
        props.put(ProducerConfig.LINGER_MS_CONFIG, 1);  
        props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, 40960);  
        props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);  
        props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);  
        return props;  
    }  

    /** 获取工厂 */
    private static ProducerFactory<String, String> producerFactory() {
        return new DefaultKafkaProducerFactory<>(producerConfigs());  
    }

    /**
     * 发送消息后的成功回调
     */
    static class CallBackSuccess implements SuccessCallback{
        @Override
        public void onSuccess(Object o) {
            System.out.println("成功");
        }

    }

    /**
     * 发送消息后的失败回调
     */
   static class FailCallBack implements FailureCallback{
       String topic;
       String key;
       String data;

       FailCallBack(String topic, String key, String data){
           this.data = data;
           this.key = key;
           this.topic = topic;
       }
        @Override
        public void onFailure(Throwable throwable) {
            System.out.println("失败 topid:"+topic+",key:"+key+",data:"+data);
            throwable.printStackTrace();
        }
    }


    public static void main(String[] args) throws Exception{
        KafkaTemplate<String,String> hh = kafkaTemplate;
        System.out.print(hh);
        for (int i=0 ; i< 500;i++){
            ListenableFuture<SendResult<String, String>> r =  hh.send("yyy7","key2",""+i);
            r.addCallback(new CallBackSuccess(),new FailCallBack("","",""));
            hh.flush();
            Thread.sleep(1000);
        }
    }
}

3、消费者代码

//通过注解监听topic进行消费
@Configuration  
@EnableKafka
public class KafkaConsumer {

    final static String list = Properties.appProps.getValue("kafka.broker");

    /** 
     *  
     * Description:获取配置 
     * Date:        2017年7月11日 
     * @author      shaqf 
     */  
    private Map<String, Object> consumerConfigs() {
        Map<String, Object> props = Maps.newHashMap();
        props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, list);
        props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false);  
        props.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "100");  
        props.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "15000");  
        props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);  
        props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);  
        props.put(ConsumerConfig.GROUP_ID_CONFIG, "group1");
        props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
        System.out.println("KafkaConsumer consumerConfigs "+ JsonUtil.object2Json(props));
        return props;  
    }
    /** 获取工厂 */
    private ConsumerFactory<String, String> consumerFactory() {
        return new DefaultKafkaConsumerFactory<>(consumerConfigs());
    }
    /** 获取实例 */  
    @Bean  
    public KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<String, String>> kafkaListenerContainerFactory() {
        ConcurrentKafkaListenerContainerFactory<String, String> factory1 = new ConcurrentKafkaListenerContainerFactory<>();
        factory1.setConsumerFactory(consumerFactory());
        factory1.setConcurrency(2);
        factory1.getContainerProperties().setPollTimeout(3000);
        System.out.println("KafkaConsumer kafkaListenerContainerFactory factory"+ JsonUtil.object2Json(factory1));
        return factory1;
    }

    /**
     * topic的消费者组1监听
     * @return
     */
    @Bean
    public Group1Listener listener1() {
        return new Group1Listener();
    }

    /**
     * topic的消费者组2监听
     * @return
     */
    @Bean
    public Group2Listener listener2() {
        return new Group2Listener();
    }

}

4.消费者组1

public class Group1Listener {

    @KafkaListener(topics = {"test-topic"})
    public void listen(ConsumerRecord<?, ?> record) {
        Optional<?> kafkaMessage = Optional.ofNullable(record.value());
        if (kafkaMessage.isPresent()) {
            Object message = kafkaMessage.get();
            System.out.println("listen1 " + message);
        }
    }

    @KafkaListener(topics = { "task1" },groupId = "group1")
    public void task1(ConsumerRecord<?, ?> record) {
        System.out.println("这是"+" task1 的消费者");
        System.out.println("这是group1 topic task1 KafkaConsumer ---------->>>>>>>>:"+ JsonUtil.object2Json(record));
        Object message = record.value();
        System.out.println("group1 topic task1 "+record.topic());
        System.out.println(message);
        System.out.println(record.key());
        System.out.println(record);
    }


    @KafkaListener(topics = { "gift" },groupId = "group1")
    public void gift(ConsumerRecord<String, String> record) {

        String key = record.key();
        String value = record.value();

        System.out.println("groupId1 kafka gift Consumer value:"+value);

    }

}

5.消费者组2

public class Group2Listener {

    @KafkaListener(topics = { "taskCmd" })
    public void taskCmd(ConsumerRecord<?, ?> record) {
        System.out.println("  KafkaConsumer ---------->>>>>>>>:"+ JsonUtil.object2Json(record));
        Object message = record.value();
        System.out.println(" 这是group2 topic taskCmd "+record.topic());
        System.out.println(message);
        System.out.println(record.key());
        System.out.println(record);
    }

    @KafkaListener(topics = { "task" })
    public void task(ConsumerRecord<?, ?> record) {
        System.out.println("这是group2 topic task KafkaConsumer ---------->>>>>>>>:"+ JsonUtil.object2Json(record));
        Object message = record.value();
        System.out.println("这是group2 topic task "+record.topic());
        System.out.println(message);
        System.out.println(record.key());
        System.out.println(record);
    }

    @KafkaListener(topics = { "task1" },groupId = "group2")
    public void task1(ConsumerRecord<?, ?> record) {
        System.out.println("这是group2"+" task1 的消费者");
        System.out.println("这是group2 topic task1 KafkaConsumer ---------->>>>>>>>:"+ JsonUtil.object2Json(record));
        Object message = record.value();
        System.out.println("group2 topic task1 "+record.topic());
        System.out.println(message);
        System.out.println(record.key());
        System.out.println(record);
    }

    @KafkaListener(topics = { "gift" },groupId = "group2")
    public void gift(ConsumerRecord<String, String> record) {

        String key = record.key();
        String value = record.value();

        System.out.println("groupId2 kafka gift Consumer value:"+value);

    }

}

 

 

登录后即可回复 登录 | 注册
    
关注编程学问公众号