概述
使用fastapi的日志接口,其他文件内的logger也能按文件等配置信息存储。
首先在server.py之类的入口文件内写好,启动时加载日志器,在入口文件和其他引用的文件的文头写好
import logging
logger = logging.getLogger()
上代码:
server.py
重点在于
uvicorn.run("regulation_standardization_server:app",
host="0.0.0.0", port=8057,
log_config=log_config, log_level='debug')
的log_config=log_config, log_level='debug'
server.py
# -*- coding: UTF-8 -*-
"""=========================================================
@Project -> File: regulation_standardization -> regulation_standardization_server
@IDE: PyCharm
@author: lxc
@date: 2023-10-27 下午 4:34
@Desc:
1-功能描述:
2-实现步骤
1-
"""
from fastapi import FastAPI, File, UploadFile
from text_to_tree import text_to_tree
from utils.log_util import log_config
import os
import uuid
import uvicorn
import traceback
import logging
logger = logging.getLogger()
app = FastAPI()
def parse_file(file_path):
# 在这里编写处理文件的代码
# 返回处理后的结果,这里假设结果为一个字典
result = text_to_tree(file_path)
return result
@app.post("/doc_analysis")
async def create_upload_file(file: UploadFile = File(...)):
try:
# 生成唯一的UUID作为文件名
unique_filename = str(uuid.uuid4())
file_name, file_extension = os.path.splitext(file.filename)
file_path = f"temp/{unique_filename}{file_extension}"
logger.info("当前正在处理文件:【%s】" % file_name)
# 创建temp文件夹
if not os.path.exists("temp"):
os.makedirs("temp")
# 将文件保存到临时路径
with open(file_path, "wb") as f:
f.write(await file.read())
# 调用解析函数处理文件
result = parse_file(file_path)
if not result.get("title"):
result['title'] = file_name
# 删除临时文件
os.remove(file_path)
return {'errcode': '200', 'errmsg': '', 'data': result}
except Exception as e:
traceback.print_exc()
return {'errcode': '400', 'errmsg': '解析错误', 'data': ''}
if __name__ == '__main__':
uvicorn.run("regulation_standardization_server:app", host="0.0.0.0", port=8057,
log_config=log_config, log_level='debug')
log_util
# -*- coding: UTF-8 -*-
import os, sys, time
log_path = './logs'
if not os.path.exists(log_path):
os.mkdir(log_path)
log_config = {
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"default": {
"format": '[%(asctime)s] [%(filename)s:%(lineno)d] [%(module)s:%(funcName)s] '
'[%(levelname)s]- %(message)s'
}
},
"handlers": {
"all": {
"level": "DEBUG",
"class": "logging.handlers.TimedRotatingFileHandler",
"formatter": "default",
"filename": os.path.join(log_path, 'all-{}.log'.format(time.strftime('%Y-%m-%d'))),
"when": "midnight",
"backupCount": 1,
'encoding': 'utf-8', # 设置默认编码
},
'error': {
'level': 'ERROR',
'class': 'logging.handlers.TimedRotatingFileHandler',
'filename': os.path.join(log_path, 'error-{}.log'.format(time.strftime('%Y-%m-%d'))),
'backupCount': 1,
'formatter': 'default', # 输出格式
'encoding': 'utf-8', # 设置默认编码
"when": "midnight",
},
'info': {
'level': 'INFO',
'class': 'logging.handlers.TimedRotatingFileHandler',
'filename': os.path.join(log_path, 'info-{}.log'.format(time.strftime('%Y-%m-%d'))),
'backupCount': 1,
'formatter': 'default',
'encoding': 'utf-8', # 设置默认编码
"when": "midnight",
},
"console_handler": {
"class": "logging.StreamHandler",
"level": "INFO",
"formatter": "default",
"stream": "ext://sys.stdout"
}
},
"root": {
"level": "DEBUG",
"handlers": ["all", "error", "info", "console_handler"],
'propagate': True
}
}
其他文件
import logging
logger = logging.getLogger()
...
logger.info(11111111111)
这样就ok了,如下:
[rhino@localhost]$ ll logs
total 20
-rw-rw-r-- 1 rhino rhino 14974 Dec 21 11:14 all-2023-12-21.log
-rw-rw-r-- 1 rhino rhino 0 Dec 21 11:11 error-2023-12-21.log
-rw-rw-r-- 1 rhino rhino 3392 Dec 21 11:14 info-2023-12-21.log