• Kafka中使用Avro编码、解码消息


    1.消费者代码

    import com.twitter.bijection.Injection;
    import com.twitter.bijection.avro.GenericAvroCodecs;
    import org.apache.avro.Schema;
    import org.apache.avro.generic.GenericData;
    import org.apache.avro.generic.GenericRecord;
    import org.apache.kafka.clients.producer.KafkaProducer;
    import org.apache.kafka.clients.producer.ProducerRecord;
    
    import java.util.Properties;
    
    /**
     * Created by p on 2018/10/8.
     */
    public class AvroKafkaProducer {
        public static final String USER_SCHEMA = "{
    " +
                "    "type":"record",
    " +
                "    "name":"Customer",
    " +
                "    "fields":[
    " +
                "        {"name":"id","type":"int"},
    " +
                "        {"name":"name","type":"string"},
    " +
                "        {"name":"email","type":["null","string"],"default":"null"}
    " +
                "    ]
    " +
                "}";
    
    
        public static void main(String[] args){
    
            Properties kafkaProps = new Properties();
            kafkaProps.put("bootstrap.servers","ip:9092");
            kafkaProps.put("key.serializer","org.apache.kafka.common.serialization.StringSerializer");
            kafkaProps.put("value.serializer","org.apache.kafka.common.serialization.ByteArraySerializer");
            kafkaProps.put("partitioner.class","MyPartitioner");
    
            Schema.Parser parser = new Schema.Parser();
            Schema schema = parser.parse(USER_SCHEMA);
    
            Injection<GenericRecord,byte[]> injection = GenericAvroCodecs.toBinary(schema);
            KafkaProducer producer = new KafkaProducer<String,byte[]>(kafkaProps);
            for(int i = 0;i < 1000;i++){
                GenericData.Record record = new GenericData.Record(schema);
                record.put("id",i);
                record.put("name","name-"+i);
                record.put("email","email-"+i);
                byte[] bytes = injection.apply(record);
                ProducerRecord<String,byte[]> record1 = new ProducerRecord<String, byte[]>("Customer","customer-"+i,bytes);
                producer.send(record1);
            }
            producer.close();
            System.out.println(USER_SCHEMA);
        }
    }

    2. 消费者代码

    import com.twitter.bijection.Injection;
    import com.twitter.bijection.avro.GenericAvroCodecs;
    import org.apache.avro.Schema;
    import org.apache.avro.generic.GenericRecord;
    import org.apache.kafka.clients.consumer.ConsumerRecord;
    import org.apache.kafka.clients.consumer.ConsumerRecords;
    import org.apache.kafka.clients.consumer.KafkaConsumer;
    
    import java.util.Collections;
    import java.util.Properties;
    
    /**
     * Created by p on 2018/10/14.
     */
    public class AvroKafkaConsumer {
    
        public static final String USER_SCHEMA = "{
    " +
                "    "type":"record",
    " +
                "    "name":"Customer",
    " +
                "    "fields":[
    " +
                "        {"name":"id","type":"int"},
    " +
                "        {"name":"name","type":"string"},
    " +
                "        {"name":"email","type":["null","string"],"default":"null"}
    " +
                "    ]
    " +
                "}";
    
        public static void main(String[] args){
            Properties kafkaProps = new Properties();
            kafkaProps.put("bootstrap.servers","ip:9092");
    
            kafkaProps.put("key.deserializer","org.apache.kafka.common.serialization.StringDeserializer");
            kafkaProps.put("value.deserializer","org.apache.kafka.common.serialization.ByteArrayDeserializer");
    
            kafkaProps.put("group.id","DemoAvroKafkaConsumer");
    
            kafkaProps.put("auto.offset.reset","earliest");
    
            KafkaConsumer<String ,byte[]> consumer = new KafkaConsumer<String, byte[]>(kafkaProps);
    
            consumer.subscribe(Collections.singletonList("Customer"));
    
            Schema.Parser parser = new Schema.Parser();
            Schema schema = parser.parse(USER_SCHEMA);
    
            Injection<GenericRecord,byte[]> injection = GenericAvroCodecs.toBinary(schema);
    
            try {
                while (true){
                    ConsumerRecords<String,byte[]> records = consumer.poll(10);
                    for(ConsumerRecord<String,byte[]> record : records){
                        GenericRecord record1 = injection.invert(record.value()).get();
                        System.out.println(record.key() + ":" + record1.get("id") + "	" + record1.get("name") + "	" + record1.get("email"));
                    }
                }
            } finally {
                consumer.close();
            }
        }
    }

    3. pom依赖

    <dependency>
                <groupId>org.apache.kafka</groupId>
                <artifactId>kafka_2.11</artifactId>
                <version>1.0.0</version>
            </dependency>
            <dependency>
                <groupId>org.apache.avro</groupId>
                <artifactId>avro</artifactId>
                <version>1.7.6-cdh5.9.1</version>
            </dependency>
            <dependency>
                <groupId>com.twitter</groupId>
                <artifactId>bijection-avro_2.11</artifactId>
                <version>0.9.6</version>
            </dependency>
  • 相关阅读:
    OpenCV_Python —— (6)图像色彩空间
    OpenCV_Python —— (5)图像模糊/平滑/滤波
    Java 14 祭出增强版 switch,真香!!
    推荐 9 个 爱不释手的 JSON 工具!
    从 0 开始手写一个 Mybatis 框架,三步搞定!
    Java常用的几个Json库,性能强势对比!
    Oracle JDK 和 OpenJDK 有什么区别?
    极客时间-左耳听风-程序员攻略-UI/UX设计
    OpenCV导向滤波(引导滤波)实现(Guided Filter)代码,以及使用颜色先验算法去雾
    python面向对象小练习
  • 原文地址:https://www.cnblogs.com/darange/p/9787139.html
Copyright © 2020-2023  润新知