• python-day37(正式学习)


    前景回顾

    抢票系统的代码优化,使用了Lock类

    from multiprocessing import Process,Lock
    import os,time,json
    
    with open('user', 'w', encoding='utf-8')as f:
        dic={'count':2}
        json.dump(dic,f)
    
    def search():
        with open('user','r',encoding='utf-8')as f:
            data=json.load(f)
            print(data.get('count'))
    
    def get():
        with open('user','r',encoding='utf-8')as f:
            data = json.load(f)
        if data['count'] > 0:
            data['count'] -= 1
            with open('user','w',encoding='utf-8')as f:
                    json.dump(data,f)
                    print('success')
                    time.sleep(1)
        else:
            print('票已售空')
    def piao(lock):
        search()
        lock.acquire()
        get()
        lock.release()
    
    if __name__ == '__main__':
        lock=Lock()
        for i in range(5):
            p=Process(target=piao,args=(lock,))
            p.start()
    

    队列

    1

    from multiprocessing import Queue,Process
    
    q=Queue()
    q.put(1)
    print(q)
    data=q.get()
    print(data)
    data=q.get()
    print(data) #默认会一直等待拿值
    q.put(5)
    

    2

    from multiprocessing import Queue,Process
    
    q=Queue(4)
    q.put(1)
    q.put(5)
    q.put(5)
    q.put(5)
    q.put(5)    #此处队列满了,就会成阻塞状态
    q.get()
    q.get()
    

    3

    from multiprocessing import Queue,Process
    
    q=Queue(4)
    q.put(1)
    q.put(5)
    q.put(5)
    q.put(5)
    q.put(5,block=True,timeout=3)    #此处队列满了,就会成阻塞状态,block为等待状态,timeout为等待时间,等不到就报错
    q.get()
    q.get()
    

    4

    from multiprocessing import Queue,Process
    
    q=Queue(4)
    q.put(1)
    q.get()
    q.get(block=True,timeout=3) #队列已被取空,此时再取就会阻塞,block为阻塞状态,timeout为阻塞时间
    

    5

    from multiprocessing import Queue,Process
    
    q=Queue(1)
    q.put(1)
    q.put_nowait(5) #队列已满,此时再存就会阻塞,相当于put的默认block为False
    

    生产者消费者模型

    版本一

    from multiprocessing import Queue,Process
    import time
    
    def produce(q,name,msg):
        for i in range(3):
            q.put(msg+str(i))
            print(f'{name}生产了{msg+str(i)}')
            time.sleep(1)
        q.put(None)				#用None来终止消费者
    
    def cost(q,name):
        while True:
            msg=q.get()
            if msg==None:
                break
            print(f'{name}吃了{msg}')
            time.sleep(1)
    
    if __name__ == '__main__':
        q=Queue()
        p1=Process(target=produce,args=(q,'wind','card'))
        p2=Process(target=produce,args=(q,'nick','niunai'))
        # p1=Process(target=Process,args=(q,'wind','card'))
        c1=Process(target=cost,args=(q,'chanyuli'))
        c2=Process(target=cost,args=(q,'zhongshifu'))
        p1.start()
        p2.start()
        c1.start()
        c2.start()
    

    版本二

    from multiprocessing import Queue,Process,JoinableQueue
    import time
    
    def produce(q,name,msg):
        for i in range(3):
            q.put(msg+str(i))
            print(f'{name}生产了{msg+str(i)}')
            time.sleep(1)
    
    
    def cost(q,name):
        while True:
            msg=q.get()
            q.task_done()
            if msg==None:
                break
            print(f'{name}吃了{msg}')
            time.sleep(1)
    
    if __name__ == '__main__':
        q=JoinableQueue()
        p1=Process(target=produce,args=(q,'wind','card'))
        p2=Process(target=produce,args=(q,'nick','niunai'))
        # p1=Process(target=Process,args=(q,'wind','card'))
        c1=Process(target=cost,args=(q,'chanyuli'))
        c2=Process(target=cost,args=(q,'zhongshifu'))
        c1.daemon=True
        c2.daemon=True
        p1.start()
        p2.start()
        c1.start()
        c2.start()
        p1.join()
        p2.join()
        q.join()
    
  • 相关阅读:
    大数据知识简单总结
    机器学习pipeline总结
    spark SQL、RDD、Dataframe总结
    hbase介绍
    git命令教程
    hive查询语句入门(hive DDL)
    hive安装部署
    调用高德地图,通过获取的区域名称得到行政区域详情
    搭建SSM框架的配置文件
    jquery实现get的异步请求
  • 原文地址:https://www.cnblogs.com/leaf-wind/p/11530305.html
Copyright © 2020-2023  润新知