• asyncio Lock,Queue


    #
    # total = 0
    #
    # async def add():
    #     #1. dosomething1
    #     #2. io操作
    #     # 1. dosomething3
    #     global total
    #     for i in range(1000000):
    #         total += 1
    # async def desc():
    #     global total
    #     for i in range(1000000):
    #         total -= 1
    #
    # if __name__ == "__main__":
    #     import asyncio
    #     tasks = [add(),desc()]
    #     loop = asyncio.get_event_loop()
    #     loop.run_until_complete(asyncio.wait(tasks))
    #     print(total)
    
    import asyncio
    import aiohttp
    from asyncio import Lock,Queue
    cache = {}
    lock = Lock()
    queue = Queue()
    
    # await queue.get()
    queue = []# 如果不用限流
    
    async def get_stuff(url):
        async with lock:
            if url in cache:
                return cache[url]
            stuff = await aiohttp.request('GET',url)
            cache[url] = stuff
            return stuff
    
    
    async def parse_stuff():
        stuff = await get_stuff()
        #do some parsing
    
    
    async def use_stuff():
        stuff = await get_stuff()
        #use some parsing
    
    tasks = [parse_stuff(),use_stuff()]
  • 相关阅读:
    Spark Streaming自定义接收器
    between-flink-and-storm-Spark
    Java class loader 类加载器
    Spark 学习文章
    英文读音
    分布式系统论文
    git 常用命令
    CPU Cache
    elasticsearch 索引延迟 一致性问题等
    spark join
  • 原文地址:https://www.cnblogs.com/Erick-L/p/8939213.html
Copyright © 2020-2023  润新知