• Kafka Config


    KafkaProducerConfig

    package com.example.springboot.config;
     
    import lombok.extern.slf4j.Slf4j;
    import org.apache.commons.io.FileUtils;
    import org.apache.kafka.clients.producer.ProducerConfig;
    import org.apache.kafka.common.config.SaslConfigs;
    import org.apache.kafka.common.config.SslConfigs;
    import org.springframework.boot.system.ApplicationHome;
    import org.springframework.context.annotation.Bean;
    import org.springframework.context.annotation.Configuration;
    import org.springframework.kafka.core.DefaultKafkaProducerFactory;
    import org.springframework.kafka.core.KafkaTemplate;
    import org.springframework.kafka.core.ProducerFactory;
     
    import java.io.File;
    import java.io.IOException;
    import java.io.InputStream;
    import java.util.HashMap;
    import java.util.Map;
    import java.util.Objects;
     
    /**
     * Kafka服务端配置
     */
    @Slf4j
    @Configuration
    public class KafkaProducerConfig {
     
        @Bean
        public KafkaTemplate<String, String> kafkaTemplate() {
            return new KafkaTemplate<>(producerFactory());
        }
     
        /**
         * the producer factory config
         */
        @Bean
        public ProducerFactory<String, String> producerFactory() {
            Map<String, Object> props = new HashMap();
     
            props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "127.0.0.1:9002");
            props.put(ProducerConfig.ACKS_CONFIG, "all");
            props.put(ProducerConfig.RETRIES_CONFIG, 3);
            props.put(ProducerConfig.BATCH_SIZE_CONFIG, 106384);
            props.put(ProducerConfig.LINGER_MS_CONFIG, 1);
            props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, 33554432);
            props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
            props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.springframework.kafka.support.serializer.JsonSerializer");
     
            props.put("security.protocol", "SASL_SSL");
            props.put(SaslConfigs.SASL_MECHANISM, "SCRAM-SHA-512");
            //打包成jar后,无法找到文件的解决方法(复制jar包里的文件,到文件目录里)
            ApplicationHome applicationHome = new ApplicationHome(KafkaProducerConfig.class);
            //项目打包成jar包所在的根路径
            String rootPath = applicationHome.getSource().getParentFile().toString();
            String configFilePath = rootPath + "\\client_truststore.jks";
            log.info("证书文件地址:" + configFilePath);
            File configFile = new File(configFilePath);
            if (!configFile.exists()) {
                try {
                    //获取类路径下的指定文件流  (项目目录下的:  /resource/client_truststore.jks)
                    InputStream in = this.getClass().getClassLoader().getResourceAsStream("client_truststore.jks");
                    FileUtils.copyInputStreamToFile(Objects.requireNonNull(in, "client_truststore.jks文件找不到"), configFile);
                } catch (IOException e) {
                    throw new IllegalArgumentException("client_truststore.jks文件找不到->" + e.getMessage());
                }
            }
            props.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, configFilePath);
            props.put(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, "Abc123");
            //注意passwod结尾的分号一定不要漏
            props.put(SaslConfigs.SASL_JAAS_CONFIG, "org.apache.kafka.common.security.scram.ScramLoginModule required username='YouName' password='YouPass';");
     
            return new DefaultKafkaProducerFactory<String, String>(props);
        }
     
    }

    KafkaConsumerConfig

    package com.example.springboot.config;
     
    import com.example.springboot.listener.KafkaMessListener;
    import lombok.extern.slf4j.Slf4j;
    import org.apache.commons.io.FileUtils;
    import org.apache.kafka.clients.consumer.ConsumerConfig;
    import org.apache.kafka.common.config.SaslConfigs;
    import org.apache.kafka.common.config.SslConfigs;
    import org.springframework.boot.ApplicationRunner;
    import org.springframework.boot.system.ApplicationHome;
    import org.springframework.context.annotation.Bean;
    import org.springframework.context.annotation.Configuration;
    import org.springframework.kafka.annotation.EnableKafka;
    import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
    import org.springframework.kafka.config.KafkaListenerEndpointRegistry;
    import org.springframework.kafka.core.ConsumerFactory;
    import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
    import org.springframework.kafka.listener.MessageListenerContainer;
    import org.springframework.kafka.support.serializer.JsonDeserializer;
     
    import java.io.File;
    import java.io.IOException;
    import java.io.InputStream;
    import java.util.HashMap;
    import java.util.Map;
    import java.util.Objects;
     
    /**
     * Kafka消费端配置
     */
    @Configuration
    @EnableKafka
    @Slf4j
    public class KafkaConsumerConfig {
        @Bean
        public ApplicationRunner runner(KafkaListenerEndpointRegistry registry) {
            return args -> {
                MessageListenerContainer kafkaMessListener = registry.getListenerContainer("KafkaMessListener");
                kafkaMessListener.start();
            };
        }
     
        /**
         * 配置监听,将消费工厂信息配置进去
         */
        @Bean
        public ConcurrentKafkaListenerContainerFactory<Integer, String> kafkaListenerContainerFactory() {
            ConcurrentKafkaListenerContainerFactory<Integer, String> factory = new ConcurrentKafkaListenerContainerFactory<Integer, String>();
            factory.setConsumerFactory(consumerFactory());
            return factory;
        }
     
        /**
         * 消费 消费工厂
         */
        @Bean
        public ConsumerFactory<Integer, String> consumerFactory() {
            return new DefaultKafkaConsumerFactory<Integer, String>(consumerConfigs());
        }
     
        /**
         * 消费配置
         */
        public Map<String, Object> consumerConfigs() {
            Map<String, Object> props = new HashMap();
     
            props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "127.0.0.1:9002");
            props.put(ConsumerConfig.GROUP_ID_CONFIG, "YouGroup_ID");
            props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false");
            // earliest:当各分区下有已提交的offset时,从提交的offset开始消费;无提交的offset时,从头开始消费
            // latest:当各分区下有已提交的offset时,从提交的offset开始消费;无提交的offset时,消费新产生的该分区下的数据
            // none:topic各分区都存在已提交的offset时,从offset后开始消费;只要有一个分区不存在已提交的offset,则抛出异常
            props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest");
            props.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "1000");
            props.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "30000");
            props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");
            props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.springframework.kafka.support.serializer.JsonDeserializer");
            props.put(JsonDeserializer.TRUSTED_PACKAGES, "*"); // 信任所有包可以序列化
     
            props.put("security.protocol", "SASL_SSL");
            props.put(SaslConfigs.SASL_MECHANISM, "SCRAM-SHA-512");
            //打包成jar后,无法找到文件的解决方法(复制jar包里的文件,到文件目录里)
            ApplicationHome applicationHome = new ApplicationHome(KafkaConsumerConfig.class);
            //项目打包成jar包所在的根路径
            String rootPath = applicationHome.getSource().getParentFile().toString();
            String configFilePath = rootPath + "\\client_truststore.jks";
            log.info("证书文件地址:" + configFilePath);
            File configFile = new File(configFilePath);
            if (!configFile.exists()) {
                try {
                    //获取类路径下的指定文件流  (项目目录下的:  /resource/client_truststore.jks)
                    InputStream in = this.getClass().getClassLoader().getResourceAsStream("client_truststore.jks");
                    FileUtils.copyInputStreamToFile(Objects.requireNonNull(in, "client_truststore.jks文件找不到"), configFile);
                } catch (IOException e) {
                    throw new IllegalArgumentException("client_truststore.jks文件找不到->" + e.getMessage());
                }
            }
            props.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, configFilePath);
            props.put(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, "Abc123");
            //注意passwod结尾的分号一定不要漏
            props.put(SaslConfigs.SASL_JAAS_CONFIG, "org.apache.kafka.common.security.scram.ScramLoginModule required username='YouName' password='YouPass';");
     
            return props;
        }
    }
  • 相关阅读:
    常见jvm命令
    服务后台启动
    kafka创建topic,生产和消费指定topic消息
    kafka-manager安装
    修改ssh主机名
    设置虚拟机静态ip
    kafka术语
    cas和oauth2的区别
    会Python的大学生,步入职场将会非常抢手!
    python爬虫把url链接编码成gbk2312格式过程解析
  • 原文地址:https://www.cnblogs.com/vipsoft/p/16427188.html
Copyright © 2020-2023  润新知