import pymysql import urllib3 from lxml import etree import logging import requests import queue import time import threading from threading import RLock import re thread_num = 0 lock = RLock() import multiprocessing # 添加日志 logging.basicConfig( level=logging.INFO, # 定义输出到文件的log级别,大于此级别的都被输出 format='%(asctime)s %(filename)s %(levelname)s : %(message)s', # 定义输出log的格式 datefmt='%Y-%m-%d %H:%M:%S', # 时间 filename='druginfoError.log', # log文件名 filemode='a') # 写入模式“w”或“a” class yaoyuan(object): def __init__(self): self.strat_record = 1 self.end_record = 10000001 # self.db = pymysql.connect(host='localhost', port=3306, database='druginfo', user='root', password='mysql', charset='utf8') self.db = pymysql.connect(host='rm-bp195i4u0w1066u709o.mysql.rds.aliyuncs.com', port=3306, database='druginfo', user='qygwroot', password='kangcenet@123', charset='utf8') self.cursor = self.db.cursor() self.parse_page() def parse_page(self): star_time = time.time() threading_list = [] # pass # print('这是一行mysql的测试数据') mypool = multiprocessing.Pool(10) # 并发10条进程 for i in range(1000): # self.parse_page_data(i) mypool.apply_async(self.parse_page_data, (i,)) # print(1) # 关闭进程池 mypool.close() # 阻塞等待 mypool.join() times = time.time() - star_time print(times) def parse_page_data(self, a): print(a) # self.db = pymysql.connect(host='localhost', port=3306, database='druginfo', user='root', password='mysql', charset='utf8') db = pymysql.connect(host='rm-bp195i4u0w1066u709o.mysql.rds.aliyuncs.com', port=3306, database='druginfo', user='qygwroot', password='kangcenet@123', charset='utf8') cursor = db.cursor() drugsql = "insert into text(name) values('{}')" drugsql_data = drugsql.format('这是一行mysql的测试数据') # lock.acquire() cursor.execute(drugsql_data) db.commit() # lock.release() if __name__ == '__main__': a = yaoyuan()