• python logging多进程多线程输出到同一个日志文件


    参考官方案例:https://docs.python.org/zh-cn/3.8/howto/logging-cookbook.html

    import logging
    import logging.config
    import logging.handlers
    from multiprocessing import Process, Queue
    import random
    import threading
    import time
    
    def logger_thread(q):
        while True:
            record = q.get()
            if record is None:
                break
            logger = logging.getLogger(record.name)
            logger.handle(record)
    
    
    def worker_process(q):
        qh = logging.handlers.QueueHandler(q)
        root = logging.getLogger()
        root.setLevel(logging.DEBUG)
        root.addHandler(qh)
        levels = [logging.DEBUG, logging.INFO, logging.WARNING, logging.ERROR,
                  logging.CRITICAL]
        loggers = ['foo', 'foo.bar', 'foo.bar.baz',
                   'spam', 'spam.ham', 'spam.ham.eggs']
        for i in range(100):
            lvl = random.choice(levels)
            logger = logging.getLogger(random.choice(loggers))
            logger.log(lvl, 'Message no. %d', i)
    
    if __name__ == '__main__':
        q = Queue()
        d = {
            'version': 1,
            'formatters': {
                'detailed': {
                    'class': 'logging.Formatter',
                    'format': '%(asctime)s %(name)-15s %(levelname)-8s %(processName)-10s %(message)s'
                }
            },
            'handlers': {
                'console': {
                    'class': 'logging.StreamHandler',
                    'level': 'INFO',
                },
                'file': {
                    'class': 'logging.FileHandler',
                    'filename': 'mplog.log',
                    'mode': 'w',
                    'formatter': 'detailed',
                },
                'foofile': {
                    'class': 'logging.FileHandler',
                    'filename': 'mplog-foo.log',
                    'mode': 'w',
                    'formatter': 'detailed',
                },
                'errors': {
                    'class': 'logging.FileHandler',
                    'filename': 'mplog-errors.log',
                    'mode': 'w',
                    'level': 'ERROR',
                    'formatter': 'detailed',
                },
            },
            'loggers': {
                'foo': {
                    'handlers': ['foofile']
                }
            },
            'root': {
                'level': 'DEBUG',
                'handlers': ['console', 'file', 'errors']
            },
        }
        workers = []
        for i in range(5):
            wp = Process(target=worker_process, name='worker %d' % (i + 1), args=(q,))
            workers.append(wp)
            wp.start()
        logging.config.dictConfig(d)
        lp = threading.Thread(target=logger_thread, args=(q,))
        lp.start()
        # At this point, the main process could do some useful work of its own
        # Once it's done that, it can wait for the workers to terminate...
        for wp in workers:
            wp.join()
        # And now tell the logging thread to finish up, too
        q.put(None)
        lp.join()

    实战案例:

    1、字典形式配置日志

    log_conf_dict = {
        'version': 1,
        'formatters': {
            'my_formatter': {
                'class': 'logging.Formatter',
                'format': '%(asctime)s %(processName)s(%(process)d) %(threadName)s(%(thread)d) %(filename)s[line:%(lineno)d] %(levelname)s %(message)s'
            }
        },
        'handlers': {
            'console': {
                'class': 'logging.StreamHandler',
                'level': 'INFO',
                'formatter': 'my_formatter',
            },
            'file': {
                'class': 'logging.handlers.RotatingFileHandler',
                'filename': '/log/test.log',
                'maxBytes': 5*1024*1024,
                'backupCount': 60,
                'mode': 'w',
                'delay': True,
                'formatter': 'my_formatter',
                'encoding': 'utf-8',
                'level': 'INFO',
            },
        },
        'loggers': {
            'my_logger': {
                'handlers': ['file']
            }
        },
        'root': {
            'level': _level,
            'handlers': ['console', 'file']
        },
    }

    2、主进程中开启独立的日志写入监听线程

        """主进程中开启独立的日志写入监听线程"""
        queue = Queue(-1)
        logging.config.dictConfig(dict)
        log_thread = threading.Thread(target=logger_main, args=(queue,))
        log_thread.start()
    """其他逻辑代码段"""
    queue.put(None)
    log_thread.join()

    日志写入函数
    def logger_main(q):
    '''日志队列写入文件'''
        while True:
            record = q.get()
            if record is None:
                break
            logger = logging.getLogger()
            logger.handle(record)
    3、子进程中将日志输入QueueHandler日志队列
    def child_proc_main(queue):
        lqh = logging.handlers.QueueHandler(queue)
        lqh.set_name("my_queue_handler")
        
        root = logging.getLogger()
    #很关键的一步,必须先清空,再加入。原因:多进程多线程复杂环境下,在window和linux平台运行表现不一致,linux会复制主进程的日志配置,造成同时输出多个日志文件。 root.handlers.clear() root.addHandler(lqh) root.setLevel(level)
  • 相关阅读:
    Promise简单使用,需要在ES6以上
    uni-app条件编译:#ifdef #ifndef #endif
    js获取年月日
    js验证手机号、身份证等
    json.stringify()与json.parse()
    Vuex基本使用的总结--转载
    ...mapMutations前面的三个点什么意思
    制作缩略图、远程缩略图
    node整个环境的启动
    redis命令
  • 原文地址:https://www.cnblogs.com/OnlyDreams/p/15923001.html
Copyright © 2020-2023  润新知