python|python logging多进程多线程输出到同一个日志文件的实战案例

参考官方案例:https://docs.python.org/zh-cn/3.8/howto/logging-cookbook.html

import loggingimport logging.configimport logging.handlersfrom multiprocessing import Process, Queueimport randomimport threadingimport timedef logger_thread(q):while True:record = q.get()if record is None:breaklogger = logging.getLogger(record.name)logger.handle(record)def worker_process(q):qh = logging.handlers.QueueHandler(q)root = logging.getLogger()root.setLevel(logging.DEBUG)root.addHandler(qh)levels = [logging.DEBUG, logging.INFO, logging.WARNING, logging.ERROR,logging.CRITICAL]loggers = ['foo', 'foo.bar', 'foo.bar.baz','spam', 'spam.ham', 'spam.ham.eggs']for i in range(100):lvl = random.choice(levels)logger = logging.getLogger(random.choice(loggers))logger.log(lvl, 'Message no. %d', i)if __name__ == '__main__':q = Queue()d = {'version': 1,'formatters': {'detailed': {'class': 'logging.Formatter','format': '%(asctime)s %(name)-15s %(levelname)-8s %(processName)-10s %(message)s'}},'handlers': {'console': {'class': 'logging.StreamHandler','level': 'INFO',},'file': {'class': 'logging.FileHandler','filename': 'mplog.log','mode': 'w','formatter': 'detailed','foofile': {'filename': 'mplog-foo.log','errors': {'filename': 'mplog-errors.log','level': 'ERROR','loggers': {'foo': {'handlers': ['foofile']'root': {'level': 'DEBUG','handlers': ['console', 'file', 'errors']}workers = []for i in range(5):wp = Process(target=worker_process, name='worker %d' % (i + 1), args=(q,))workers.append(wp)wp.start()logging.config.dictConfig(d)lp = threading.Thread(target=logger_thread, args=(q,))lp.start()# At this point, the main process could do some useful work of its own# Once it's done that, it can wait for the workers to terminate...for wp in workers:wp.join()# And now tell the logging thread to finish up, tooq.put(None)lp.join()

实战案例:
1、字典形式配置日志
log_conf_dict = {'version': 1,'formatters': {'my_formatter': {'class': 'logging.Formatter','format': '%(asctime)s %(processName)s(%(process)d) %(threadName)s(%(thread)d) %(filename)s[line:%(lineno)d] %(levelname)s %(message)s'}},'handlers': {'console': {'class': 'logging.StreamHandler','level': 'INFO','formatter': 'my_formatter',},'file': {'class': 'logging.handlers.RotatingFileHandler','filename': '/log/test.log','maxBytes': 5*1024*1024,'backupCount': 60,'mode': 'w','delay': True,'formatter': 'my_formatter','encoding': 'utf-8','level': 'INFO',},},'loggers': {'my_logger': {'handlers': ['file']}},'root': {'level': _level,'handlers': ['console', 'file']},}

2、主进程中开启独立的日志写入监听线程
"""主进程中开启独立的日志写入监听线程"""queue = Queue(-1)logging.config.dictConfig(dict)log_thread = threading.Thread(target=logger_main, args=(queue,))log_thread.start()"""其他逻辑代码段"""queue.put(None)log_thread.join()

日志写入函数
def logger_main(q):'''日志队列写入文件'''while True:record = q.get()if record is None:breaklogger = logging.getLogger()logger.handle(record)

3、子进程中将日志输入QueueHandler日志队列
def child_proc_main(queue):lqh = logging.handlers.QueueHandler(queue)lqh.set_name("my_queue_handler")root = logging.getLogger()#很关键的一步,必须先清空,再加入。原因:多进程多线程复杂环境下,在window和linux平台运行表现不一致,linux会复制主进程的日志配置,造成同时输出多个日志文件。root.handlers.clear()root.addHandler(lqh)root.setLevel(level)

【python|python logging多进程多线程输出到同一个日志文件的实战案例】到此这篇关于python logging多进程多线程输出到同一个日志文件的文章就介绍到这了,更多相关python logging日志文件内容请搜索脚本之家以前的文章或继续浏览下面的相关文章希望大家以后多多支持脚本之家!

    推荐阅读