• kafka入门及使用(不用集成spring)


    1、引入坐标

    <dependency>
        <groupId>org.apache.kafka</groupId>
        <artifactId>kafka_2.12</artifactId>
        <version>2.0.0</version>
    </dependency>

    2、生产者

    package com.search.kafka;
    
    import java.util.ArrayList;
    import java.util.List;
    import java.util.Properties;
    
    import org.apache.kafka.clients.producer.KafkaProducer;
    import org.apache.kafka.clients.producer.Producer;
    import org.apache.kafka.clients.producer.ProducerRecord;
    
    import com.lvmama.searchclient.bean.HotelTimePriceBean;
    
    /**
     * Kafka生产者
     * 先启动生产者,发送消息到broker,这里简单发送了10条从0-9的消息,再启动消费者,控制台输出如下:
     */
    public class SimpleKafkaProducer {
    
        public static void main(String[] args) {
            // TODO Auto-generated method stub
    
            Properties props = new Properties();
    
            //broker地址
            props.put("bootstrap.servers", "localhost:9092");  // "localhost:9092"
    
            //请求时候需要验证
            props.put("acks", "all");
    
            //请求失败时候需要重试
            props.put("retries", 0);
    
            //内存缓存区大小
            props.put("buffer.memory", 33554432);
    
            //指定消息key序列化方式
            props.put("key.serializer",
                    "org.apache.kafka.common.serialization.StringSerializer");
    
            //指定消息本身的序列化方式
            props.put("value.serializer",
                    "com.search.kafka.DataSerializer");
    
            Producer<String, List<HotelTimePriceBean>> producer = new KafkaProducer<>(props);
            List<HotelTimePriceBean> list = new ArrayList<HotelTimePriceBean>();
            for (int i = 0; i < 10; i++) {  //i < 10
                // 生产一条消息的时间有点长
                HotelTimePriceBean bean = new HotelTimePriceBean();
                bean.setProductId(Long.valueOf(i));
                list.add(bean);
            }
            producer.send(new ProducerRecord<>("HOTEL_PRICE_HOTSEARCH_TOPIC", list));
            System.out.println("Message sent successfully");
            producer.close();
        }
    
    }

    3、消费者package com.search.kafka;

    package com.vst.search.kafka;
    
    import java.time.Duration;
    import java.util.ArrayList;
    import java.util.Collections;
    import java.util.Date;
    import java.util.HashSet;
    import java.util.List;
    import java.util.Properties;
    import java.util.Set;
    import org.apache.commons.collections.CollectionUtils;
    import org.apache.commons.lang3.time.DateFormatUtils;
    import org.apache.commons.lang3.time.DateUtils;
    import org.apache.kafka.clients.consumer.ConsumerRecord;
    import org.apache.kafka.clients.consumer.ConsumerRecords;
    import org.apache.kafka.clients.consumer.KafkaConsumer;
    import org.slf4j.Logger;
    import org.slf4j.LoggerFactory;
    import com.searchclient.bean.HotelTimePriceBean;
    import com.vst.search.beans.RealTimePriceBean;
    import com.vst.search.common.mapping.PropertiesDataMapping;
    import com.vst.search.common.util.CommonSearchUtils;
    import com.vst.search.common.util.RealTimePriceUtils;
    
    public enum SimpleKafkaConsumer implements Runnable {
    
        INSTANCE;
    
        private static final Logger logger = LoggerFactory.getLogger(SimpleKafkaConsumer.class);
    
        public static String getJdbcValue(String key) {
            return PropertiesDataMapping.INSTANCE.getPropsMap("kafka.properties").get(key);
        }
    
        private static final String BOOTSTRAP_SERVERS = getJdbcValue("kafka.bootstrap.servers");
        private static final String TOPIC = getJdbcValue("kafka.hotsearch.topic");
        private static final String GROUP_ID = getJdbcValue("kafka.hotsearch.group.id");
    
        @Override
        public void run() {
    
            Properties props = new Properties();
    
            props.put("bootstrap.servers", BOOTSTRAP_SERVERS); // "localhost:9092"
            // 每个消费者分配独立的组号
            props.put("group.id", GROUP_ID);
    
            // 如果value合法,则自动提交偏移量
            props.put("enable.auto.commit", "true");
    
            // 设置多久一次更新被消费消息的偏移量
            props.put("auto.commit.interval.ms", "1000");
    
            // 设置会话响应的时间,超过这个时间kafka可以选择放弃消费或者消费下一条消息
            props.put("session.timeout.ms", "30000");
    
            // props.put("auto.offset.reset", "earliest");
    
            props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
            props.put("value.deserializer", "com.vst.search.kafka.DataDeserializer");
    
            KafkaConsumer<String, List<HotelTimePriceBean>> consumer = new KafkaConsumer<>(props);
    
            consumer.subscribe(Collections.singletonList(TOPIC)); // 核心函数1:订阅topic
    
            while (true) {
                if (!"true".equals(CommonSearchUtils.getConstValue("kafka.enable"))) {
                    logger.info("kafka is disable...");
                    try {
                        Thread.sleep(1000);
                    } catch (InterruptedException e) {
                        logger.error("kafka thread is interrupted...");
                    }
                } else {
                    // 核心函数2:long poll,一次拉取回来多个消息,读取数据,读取超时时间为100ms
                    ConsumerRecords<String, List<HotelTimePriceBean>> records = consumer.poll(Duration.ofMillis(100));
                    for (ConsumerRecord<String, List<HotelTimePriceBean>> record : records) {
                        List<HotelTimePriceBean> value = record.value();
                        if (CollectionUtils.isNotEmpty(value)) {
                            logger.info("HotSearchConsumer:{}", value);
                            List<RealTimePriceBean> realTimePriceBeans = buildRealTimePriceBeans(value);// 转化
                            RealTimePriceUtils.buildAndInsertBatch(realTimePriceBeans); // 批量插入
                        }
                    }
                }
            }
        }
    }

    4、对象序列化

    package com.search.kafka;
    
    import java.util.List;
    import java.util.Map;
    
    import org.apache.kafka.common.serialization.Serializer;
    
    import com.alibaba.fastjson.JSON;
    import com.searchclient.bean.HotelTimePriceBean;
    
    public class DataSerializer implements Serializer<List<HotelTimePriceBean>> {
    
        @Override
        public void configure(Map<String, ?> configs, boolean isKey) {}
    
        @Override
        public byte[] serialize(String topic, List<HotelTimePriceBean> data) {
            return JSON.toJSONBytes(data);
        }
    
        @Override
        public void close() {}
    
    }

    5、对象反序列化

    package com.search.kafka;
    
    import java.util.List;
    import java.util.Map;
    
    import org.apache.kafka.common.serialization.Deserializer;
    
    import com.alibaba.fastjson.JSONArray;
    import com.searchclient.bean.HotelTimePriceBean;
    
    public class DataDeserializer implements Deserializer<List<HotelTimePriceBean>> {
    
        @Override
        public void configure(Map<String, ?> configs, boolean isKey) {}
    
        @Override
        public List<HotelTimePriceBean> deserialize(String topic, byte[] data) {
            return JSONArray.parseArray(new String(data), HotelTimePriceBean.class);
        }
    
        @Override
        public void close() {}
    
    }

    6、启动

    package com.search.web.listener;
    
    import javax.servlet.ServletContextEvent;
    import javax.servlet.ServletContextListener;
    
    import org.slf4j.Logger;
    import org.slf4j.LoggerFactory;
    import org.springframework.web.context.ContextLoaderListener;
    
    import com.search.common.mapping.PropertiesDataMapping;
    import com.search.elasticsearch.processer.StoreCommissionsProcessor;
    import com.search.kafka.SimpleKafkaConsumer;
    
    
    public class SearchDataLoader extends ContextLoaderListener implements ServletContextListener {
    
        private static final Logger logger = LoggerFactory.getLogger(SearchDataLoader.class);
    
        @Override
        public void contextInitialized(ServletContextEvent event) {
            // Kafka消费启动监听线程
            Thread simpleKafkaConsumerThread = new Thread(SimpleKafkaConsumer.INSTANCE);
            simpleKafkaConsumerThread.setDaemon(true);
            simpleKafkaConsumerThread.start();
        }
    }

    7、配置监听器

    <listener>
            <description>Used to initialize Search Data.</description>
            <listener-class>com.search.web.listener.SearchDataLoader</listener-class>
    </listener>

    注:web项目下依赖监听器启动线程

  • 相关阅读:
    HTTP Status 500
    HTTP Status 500
    HTTP Status 500
    测试错误ERROR StatusLogger No log4j2 configuration file found. Using default configuration: logging only errors to the console.问题的解决
    页面报错误:HTTP Status 500
    eclipse怎么设置在新建JSP文件的编码为UTF-8?
    linux 开启oracle监听
    linux 修改环境变量
    linux 修改oracle的字符集
    Cannot change version of project facet Dynamic Web Module to 2.5
  • 原文地址:https://www.cnblogs.com/zhanh247/p/11579022.html
Copyright © 2020-2023  润新知