• 基于Kafka消息驱动最终一致事务(二)


    实现用例分析

    上篇基于Kafka消息驱动最终一致事务(一)介绍BASE的理论,接着我们引入一个实例看如何实现BASE,我们会用图7显示的算法实现BASE。

    首先介绍使用技术栈

    JDK:1.8

    Spring:spring-boot,spring-data-jpa

    数据库:Mysql

    消息服务器:Kafka

    数据表

    用户库user创建用户表user,更新应用表updates_applied

    CREATE TABLE `user` (
        `id` INT(11) NOT NULL AUTO_INCREMENT,
        `name` VARCHAR(50) NOT NULL,
        `amt_sold` INT(11) NOT NULL DEFAULT '0',
        `amt_bought` INT(11) NOT NULL DEFAULT '0',
        PRIMARY KEY (`id`)
    );
    
    CREATE TABLE `updates_applied` (
        `trans_id` INT(11) NOT NULL,
        `balance` VARCHAR(50) NOT NULL,
        `user_id` INT(11) NOT NULL
    );

     交易库transaction创建交易库表transaction

    CREATE TABLE `transaction` (
        `xid` INT(11) NOT NULL AUTO_INCREMENT,
        `seller_id` INT(11) NOT NULL,
        `buyer_id` INT(11) NOT NULL,
        `amount` INT(11) NOT NULL,
        PRIMARY KEY (`xid`)
    );

    配置两个数据源

    使用JavaConfig方式。其它damain类,repository类,service类请看源码github地址:https://github.com/birdstudiocn/spring-sample/tree/master/Message-Driven-Sample

    package cn.birdstudio.user.domain;
    
    import javax.sql.DataSource;
    
    import org.springframework.boot.autoconfigure.jdbc.DataSourceProperties;
    import org.springframework.boot.context.properties.ConfigurationProperties;
    import org.springframework.context.annotation.Bean;
    import org.springframework.context.annotation.Configuration;
    import org.springframework.data.jpa.repository.config.EnableJpaRepositories;
    import org.springframework.orm.jpa.JpaTransactionManager;
    import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
    import org.springframework.orm.jpa.vendor.HibernateJpaVendorAdapter;
    import org.springframework.transaction.PlatformTransactionManager;
    
    @Configuration
    @EnableJpaRepositories(basePackageClasses = User.class, entityManagerFactoryRef = "userEntityManagerFactory", transactionManagerRef = "userTransactionManager")
    class UserDataSourceConfiguration {
    	@Bean
    
    	@ConfigurationProperties("app.datasource.user")
    	DataSourceProperties userDataSourceProperties() {
    		return new DataSourceProperties();
    	}
    
    	@Bean
    
    	@ConfigurationProperties("app.datasource.user")
    	DataSource userDataSource() {
    		return userDataSourceProperties().initializeDataSourceBuilder().build();
    	}
    
    	@Bean
    	LocalContainerEntityManagerFactoryBean userEntityManagerFactory() {
    		HibernateJpaVendorAdapter vendorAdapter = new HibernateJpaVendorAdapter();
    		vendorAdapter.setGenerateDdl(false);
    		LocalContainerEntityManagerFactoryBean factory = new LocalContainerEntityManagerFactoryBean();
    		factory.setJpaVendorAdapter(vendorAdapter);
    		factory.setPackagesToScan(User.class.getPackage().getName());
    		factory.setDataSource(userDataSource());
    		factory.setPersistenceUnitName("user");
    		return factory;
    	}
    
    	@Bean
    	PlatformTransactionManager userTransactionManager() {
    		JpaTransactionManager txManager = new JpaTransactionManager();
    		txManager.setEntityManagerFactory(userEntityManagerFactory().getObject());
    		return txManager;
    	}
    }
    

     TransactionDataSourceConfiguration

    package cn.birdstudio.transaction.domain;
    
    import javax.sql.DataSource;
    
    import org.springframework.boot.autoconfigure.jdbc.DataSourceProperties;
    import org.springframework.boot.context.properties.ConfigurationProperties;
    import org.springframework.context.annotation.Bean;
    import org.springframework.context.annotation.Configuration;
    import org.springframework.data.jpa.repository.config.EnableJpaRepositories;
    import org.springframework.orm.jpa.JpaTransactionManager;
    import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
    import org.springframework.orm.jpa.vendor.HibernateJpaVendorAdapter;
    import org.springframework.transaction.PlatformTransactionManager;
    
    @Configuration
    @EnableJpaRepositories(basePackageClasses = Transaction.class, entityManagerFactoryRef = "transactionEntityManagerFactory", transactionManagerRef = "transactionManager")
    class TransactionDataSourceConfiguration {
    	@Bean
    	@ConfigurationProperties("app.datasource.transaction")
    	DataSourceProperties transactionDataSourceProperties() {
    		return new DataSourceProperties();
    	}
    
    	@Bean
    	@ConfigurationProperties("app.datasource.transaction")
    	DataSource transactionDataSource() {
    		return transactionDataSourceProperties().initializeDataSourceBuilder().build();
    	}
    
    	@Bean
    	LocalContainerEntityManagerFactoryBean transactionEntityManagerFactory() {
    		HibernateJpaVendorAdapter vendorAdapter = new HibernateJpaVendorAdapter();
    		vendorAdapter.setGenerateDdl(false);
    		LocalContainerEntityManagerFactoryBean factory = new LocalContainerEntityManagerFactoryBean();
    		factory.setJpaVendorAdapter(vendorAdapter);
    		factory.setPackagesToScan(Transaction.class.getPackage().getName());
    		factory.setDataSource(transactionDataSource());
    		factory.setPersistenceUnitName("transaction");
    		return factory;
    	}
    
    	@Bean
    	PlatformTransactionManager transactionManager() {
    		JpaTransactionManager txManager = new JpaTransactionManager();
    		txManager.setEntityManagerFactory(transactionEntityManagerFactory().getObject());
    		return txManager;
    	}
    }
    

    配置Kafka消息服务

    生产者配置类KafkaProducerConfig.java,配置KafkaTransactionManager必须设置producerFactory.setTransactionIdPrefix("trans");

    Configuration
    public class KafkaProducerConfig {
    	@Bean
    	public ProducerFactory<String, Map<String, Object>> producerFactory() {
    		DefaultKafkaProducerFactory<String, Map<String, Object>> producerFactory = new DefaultKafkaProducerFactory<>(
    				producerConfigs());
    		producerFactory.setTransactionIdPrefix("trans");
    		return producerFactory;
    	}
    
    	@Bean
    	public Map<String, Object> producerConfigs() {
    		Map<String, Object> props = new HashMap<>();
    		props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "172.16.1.168:9092");
    		props.put(ProducerConfig.RETRIES_CONFIG, 2);
    		props.put(ProducerConfig.BATCH_SIZE_CONFIG, 16384);
    		props.put(ProducerConfig.LINGER_MS_CONFIG, 1);
    		props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, 33554432);
    		props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
    		props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class);
    
    		return props;
    	}
    
    	@Bean
    	public KafkaTemplate<String, Map<String, Object>> kafkaTemplate() {
    		return new KafkaTemplate<>(producerFactory());
    	}
    }
    

    消费者配置类KafkaConsumerConfig.java,配置KafkaTransactionManager

    @Configuration
    @EnableKafka
    public class KafkaConsumerConfig {
    	@Bean
    	public KafkaListenerContainerFactory<?> kafkaListenerContainerFactory(
    			ProducerFactory<String, Map<String, Object>> producerFactory) {
    		ConcurrentKafkaListenerContainerFactory<String, TransactionMessage> factory = new ConcurrentKafkaListenerContainerFactory<>();
    		factory.setConsumerFactory(consumerFactory());
    		//factory.setMessageConverter(new StringJsonMessageConverter());
    		//factory.setConcurrency(3);
    		factory.getContainerProperties().setPollTimeout(3000);
    		factory.getContainerProperties().setTransactionManager(new KafkaTransactionManager<>(producerFactory));
    		return factory;
    	}
    
    	@Bean
    	public ConsumerFactory<String, TransactionMessage> consumerFactory() {
    		JsonDeserializer<TransactionMessage> jd = new JsonDeserializer<>(TransactionMessage.class);
    		return new DefaultKafkaConsumerFactory<>(consumerConfigs(), new StringDeserializer(), jd);
    	}
    
    	@Bean
    	public Map<String, Object> consumerConfigs() {
    		Map<String, Object> propsMap = new HashMap<>();
    		propsMap.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "172.16.1.168:9092");
    		propsMap.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false);
    		propsMap.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "100");
    		propsMap.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "15000");
    		propsMap.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
    		propsMap.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JsonDeserializer.class);
    		propsMap.put(ConsumerConfig.GROUP_ID_CONFIG, "group1");
    		propsMap.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest");
    		return propsMap;
    	}
    }
    

    Kafka消息监听接口实现UserServiceImpl。@KafkaListener(groupId = "group1", topics = "transaction")注释监听事件接口,@Transactional("userTransactionManager")注释数据库事务。事件接口被调用KafkaTransactionManager事务开始,然后JpaTransactionManager事务开始,如果事务提交则调用producer.sendOffsetsToTransaction(),最后KafkaTransactionManager事务提交。如果JpaTransactionManager事务有异常则不调用producer.sendOffsetsToTransaction()。如果JpaTransactionManager事务提交后KafkaTransactionManager事务有异常也不调用producer.sendOffsetsToTransaction()。int processed = updatesAppliedRepository.find(trans_id, id, type.toString())语句来判断是否已经更新了User。producer.sendOffsetsToTransaction()作用与删除队列消息相当。

    @Component("userService")
    public class UserServiceImpl implements UserService {
    	private static final Logger logger = LoggerFactory.getLogger(UserServiceImpl.class);
    	private final UserRepository userRepository;
    	@Resource
    	private UpdatesAppliedRepository updatesAppliedRepository;
    
    	public UserServiceImpl(UserRepository userRepository) {
    		this.userRepository = userRepository;
    	}
    
    	private void sold(TransactionMessage msg) {
    		Type type = msg.getType();
    		int id = msg.getId();
    		int amount = msg.getAmount();
    		int trans_id = msg.getXid();
    		int processed = updatesAppliedRepository.find(trans_id, id, type.toString());
    		if (processed == 0) {
    			switch (type) {
    			case SELLER:
    				userRepository.updateAmtSold(id, amount);
    				break;
    			case BUYER:
    				userRepository.updateAmtBought(id, amount);
    				break;
    			}
    			//throwException();
    			UpdatesApplied updatesApplied = new UpdatesApplied();
    			updatesApplied.setTrans_id(trans_id);
    			updatesApplied.setUser_id(id);
    			updatesApplied.setBalance(type.toString());
    			updatesAppliedRepository.save(updatesApplied);
    		}
    	}
    
    	@Override
    	@Transactional("userTransactionManager")
    	@KafkaListener(groupId = "group1", topics = "transaction")
    	//@KafkaListener(groupId = "group1", topicPartitions = @TopicPartition(topic = "", partitionOffsets = @PartitionOffset(partition = "0", initialOffset = "5")))
    	public void receivekafka(TransactionMessage msg) {
    		logger.info("receive kafka message {}", msg);
    		sold(msg);
    	}
    
    	private void throwException() {
    		throw new RuntimeException("throw exception in test");
    	}
    }
    

    参考资料

    1,http://queue.acm.org/detail.cfm?id=1394128

    2,Spring Data JPA - Multiple datasources exam

    3,JMS

    4,https://stackoverflow.com/questions/42230797/spring-cloud-stream-kafka-eventual-consistency-does-kafka-auto-retry-unackno

    5,http://www.kennybastani.com/2016/04/event-sourcing-microservices-spring-cloud.html

    6,使用Spring Cloud和Reactor在微服务中实现Event Sourcing

    7,Spring Kafka Tutorial – Getting Started with the Spring for Apache Kafka

    8,碧桂园旺生活平台解决分布式事务方案之tcc开源框架 https://github.com/yu199195/happylifeplat-tcc

  • 相关阅读:
    第二天续
    使用git提交本地仓库每次需要输入账号密码的问题解决
    第二天
    开启远程之路
    第一天
    第一步了解并且安装配置
    6
    Algorithms
    Algorithms
    Algorithms
  • 原文地址:https://www.cnblogs.com/birdstudio/p/7373057.html
Copyright © 2020-2023  润新知