• io流读取txt文本到mysql数据库


    package com.liujin.cms.kafka;
    
    import org.apache.kafka.clients.consumer.ConsumerRecord;
    import org.springframework.beans.factory.annotation.Autowired;
    import org.springframework.kafka.listener.MessageListener;
    
    import com.alibaba.fastjson.JSON;
    import com.liujin.cms.dao.PlanDao;
    import com.liujin.cms.domain.Plan;
    
    public class ArticleListener implements MessageListener<String, String>{
    
        @Autowired
        PlanDao planDao;
        
        @Override
        public void onMessage(ConsumerRecord<String, String> data) {
            // TODO Auto-generated method stub
            System.err.println("接收到了消息");
            String value = data.value();
            //json转成对象
            Plan plan = JSON.parseObject(value, Plan.class);
            planDao.save(plan);
            System.err.println("保存成功");
        }
    
    }
    package com.bw.test;
    
    import java.io.File;
    import java.io.FileInputStream;
    import java.io.FileNotFoundException;
    import java.util.ArrayList;
    import java.util.List;
    
    import org.junit.Test;
    import org.junit.runner.RunWith;
    import org.springframework.beans.factory.annotation.Autowired;
    import org.springframework.data.redis.core.RedisTemplate;
    import org.springframework.kafka.core.KafkaTemplate;
    import org.springframework.test.context.ContextConfiguration;
    import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
    
    import com.alibaba.fastjson.JSON;
    import com.bw.bean.Plan;
    import com.bw.utils.StreamUtil;
    
    @RunWith(SpringJUnit4ClassRunner.class)
    @ContextConfiguration("classpath:producer.xml")
    public class MyTest {
    
    	
    	
    	@Autowired
    	KafkaTemplate<String, String> kafkaTemplate;
    	
    	@SuppressWarnings("unchecked")
    	@Test
    	public void readtest() throws FileNotFoundException {
    		ArrayList<Plan> list = new ArrayList<Plan>();
    		
    		File file = new File("E:/a/data.txt");
    		
    		FileInputStream inputStream = new FileInputStream(file);
    		
    		List<String> readLine = StreamUtil.readLine(inputStream);
    		readLine.remove(0);
    		for (String string : readLine) {
    			Plan plan = new Plan();
    			String[] split = string.split("\|\|");
    	
    			//System.out.println(line);
    			//每个|都要转义
    		
    			String name=split[0];
    			double amount=Double.parseDouble(split[1]);
    			String manager=split[3];
    			String content=split[2];
    			
    			Integer dept_id=null;
    			if ("药厂".equals(split[4])) {
    				dept_id=1;
    			}else if ("准能选煤厂".equals(split[4])) {
    				dept_id=2;
    			}else if ("洗选车间".equals(split[4])) {
    				dept_id=3;
    			}else if ("生产服务中心".equals(split[4])) {
    				dept_id=4;
    			}else if ("矸电公司".contains(split[4])) {
    				dept_id=5;
    			}else if ("大准铁路公司".equals(split[4])) {
    				dept_id=6;
    			}
    		
    			plan.setName(name);
    			plan.setAmount(amount);
    			plan.setManager(manager);
    			plan.setContent(content);
    			plan.setDept_id(dept_id);
    			
    			String jsonString = JSON.toJSONString(plan);
    			kafkaTemplate.send("zhunneng",jsonString);
    			
    			list.add(plan);
    		}
    	
    		for (Plan plan : list) {
    			System.out.println(plan);
    		}
    	}
    }
    

      

  • 相关阅读:
    nginx高级玩法之根据来源ip分流
    ubuntu上的 /dev/loop0 到 /dev/loop18占到100%的处理
    nginx: [warn] conflicting server name "aaa.bbbb.com" on 0.0.0.0:80, ignored
    nginx报警:nginx: [warn] could not build optimal server_names_hash, you should increase either server_names_hash_max_size: 512 or server_names_hash_bucket_size: 64; ignoring server_names_hash_bucket_size
    nginx配置socket连接
    Syntax error: "(" unexpected shell里面的报错解决
    docker批量操作容器
    ubuntu18.04安装docker和开通对外2375端口(方便portainer管理)
    Cadence OrCad Allegro SPB 16.6 下载及安装破解指南
    关于XILINX芯片IO管脚的上拉电阻的疑问
  • 原文地址:https://www.cnblogs.com/tang0125/p/12693679.html
Copyright © 2020-2023  润新知