logger.py 7.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208
  1. """
  2. 日志配置和功能模块
  3. 提供应用程序的日志记录功能
  4. """
  5. import os
  6. import logging
  7. import gzip
  8. import shutil
  9. import glob
  10. from logging.handlers import RotatingFileHandler, TimedRotatingFileHandler
  11. import time
  12. from typing import Optional, IO, Any
  13. # 添加处理ID过滤器
  14. class ProcessIdFilter(logging.Filter):
  15. def __init__(self):
  16. super().__init__()
  17. self.process_id = None
  18. def filter(self, record):
  19. if not hasattr(record, 'process_id'):
  20. if self.process_id is None:
  21. # 生成唯一的处理ID
  22. self.process_id = int(time.time() * 1000) % 10000
  23. record.process_id = f"PROC-{self.process_id:04d}"
  24. return True
  25. # 自定义日志处理器,支持压缩
  26. class CompressedRotatingFileHandler(RotatingFileHandler):
  27. def __init__(self, filename, mode='a', maxBytes=0, backupCount=0, encoding=None, delay=False):
  28. super().__init__(filename, mode, maxBytes, backupCount, encoding, delay)
  29. def doRollover(self):
  30. """
  31. 执行日志滚动时,压缩旧的日志文件
  32. """
  33. # 关闭流
  34. if self.stream:
  35. self.stream.close()
  36. self.stream = None # type: ignore
  37. if self.backupCount > 0:
  38. # 移动旧的日志文件
  39. for i in range(self.backupCount - 1, 0, -1):
  40. sfn = f"{self.baseFilename}.{i}"
  41. dfn = f"{self.baseFilename}.{i + 1}"
  42. if os.path.exists(sfn):
  43. if os.path.exists(dfn):
  44. os.remove(dfn)
  45. os.rename(sfn, dfn)
  46. dfn = f"{self.baseFilename}.1"
  47. if os.path.exists(dfn):
  48. os.remove(dfn)
  49. # 压缩当前日志文件
  50. try:
  51. with open(self.baseFilename, 'rb') as f_in:
  52. with gzip.open(f"{dfn}.gz", 'wb') as f_out:
  53. shutil.copyfileobj(f_in, f_out)
  54. except Exception:
  55. # 如果压缩失败,回退到普通复制
  56. shutil.copy2(self.baseFilename, dfn)
  57. # 重新打开流
  58. if not self.delay:
  59. self.stream = self._open()
  60. # 自定义TimedRotatingFileHandler,支持压缩
  61. class CompressedTimedRotatingFileHandler(TimedRotatingFileHandler):
  62. def __init__(self, filename, when='h', interval=1, backupCount=0, encoding=None, delay=False, utc=False, atTime=None):
  63. super().__init__(filename, when, interval, backupCount, encoding, delay, utc, atTime)
  64. def doRollover(self):
  65. """
  66. 执行日志滚动时,压缩旧的日志文件
  67. """
  68. # 执行标准的滚动
  69. super().doRollover()
  70. # 查找最新创建的备份文件并压缩
  71. backup_files = glob.glob(f"{self.baseFilename}.*")
  72. for backup_file in backup_files:
  73. if not backup_file.endswith('.gz') and os.path.isfile(backup_file):
  74. try:
  75. with open(backup_file, 'rb') as f_in:
  76. with gzip.open(f"{backup_file}.gz", 'wb') as f_out:
  77. shutil.copyfileobj(f_in, f_out)
  78. os.remove(backup_file) # 删除原始未压缩文件
  79. except Exception:
  80. # 压缩失败时不做处理,保留原始文件
  81. pass
  82. def clean_old_logs(log_folder, max_days=30):
  83. """
  84. 清理超过指定天数的日志文件
  85. 参数:
  86. log_folder: 日志目录
  87. max_days: 保留的最大天数
  88. """
  89. try:
  90. current_time = time.time()
  91. max_age = max_days * 86400 # 转换为秒
  92. for file in os.listdir(log_folder):
  93. file_path = os.path.join(log_folder, file)
  94. if os.path.isfile(file_path) and file.startswith('word_processor.log.'):
  95. file_age = current_time - os.path.getmtime(file_path)
  96. if file_age > max_age:
  97. os.remove(file_path)
  98. except Exception as e:
  99. # 清理过程中的错误不应影响主程序
  100. pass
  101. def setup_logger(log_folder='logs', log_level=logging.INFO, max_size_mb=10, backup_count=30):
  102. """
  103. 配置日志记录器
  104. 参数:
  105. log_folder: 日志保存目录
  106. log_level: 日志级别
  107. max_size_mb: 单个日志文件的最大大小(MB)
  108. backup_count: 保留的备份文件数量
  109. 返回:
  110. logger: 配置好的日志记录器
  111. """
  112. # 确保日志目录存在
  113. os.makedirs(log_folder, exist_ok=True)
  114. # 创建日志记录器
  115. logger = logging.getLogger('word_processor')
  116. logger.setLevel(log_level)
  117. # 如果已经有处理器,则不再添加
  118. if logger.handlers:
  119. return logger
  120. # 日志格式 - 优化格式,添加处理ID以便跟踪单次操作
  121. log_format = logging.Formatter(
  122. '%(asctime)s [%(levelname)s] [%(process_id)s] %(message)s',
  123. datefmt='%Y-%m-%d %H:%M:%S'
  124. )
  125. # 创建处理ID过滤器实例
  126. process_id_filter = ProcessIdFilter()
  127. # 文件处理器 - 同时基于大小和时间滚动
  128. log_file = os.path.join(log_folder, 'word_processor.log')
  129. # 基于大小的处理器
  130. size_handler = CompressedRotatingFileHandler(
  131. log_file,
  132. maxBytes=max_size_mb * 1024 * 1024, # 转换为字节
  133. backupCount=5, # 保留5个基于大小的备份
  134. encoding='utf-8'
  135. )
  136. size_handler.setFormatter(log_format)
  137. size_handler.setLevel(log_level)
  138. size_handler.addFilter(process_id_filter)
  139. # 基于时间的处理器
  140. time_handler = CompressedTimedRotatingFileHandler(
  141. log_file,
  142. when='midnight',
  143. interval=1,
  144. backupCount=backup_count, # 保留指定天数的日志
  145. encoding='utf-8'
  146. )
  147. time_handler.setFormatter(log_format)
  148. time_handler.setLevel(log_level)
  149. time_handler.addFilter(process_id_filter)
  150. # 控制台处理器
  151. console_handler = logging.StreamHandler()
  152. console_handler.setFormatter(log_format)
  153. console_handler.setLevel(log_level)
  154. console_handler.addFilter(process_id_filter)
  155. # 添加处理器到记录器
  156. logger.addHandler(size_handler)
  157. logger.addHandler(time_handler)
  158. logger.addHandler(console_handler)
  159. # 清理旧日志
  160. clean_old_logs(log_folder, backup_count)
  161. return logger
  162. # 初始化日志记录器 - 可以根据环境设置不同的级别
  163. # 生产环境推荐 logging.INFO 或 logging.WARNING
  164. # 开发环境可以使用 logging.DEBUG
  165. import os
  166. env = os.environ.get('FLASK_ENV', 'production')
  167. log_level = logging.INFO if env == 'production' else logging.DEBUG
  168. max_size_mb = 10 # 10MB
  169. backup_count = 30 # 30天
  170. logger = setup_logger(log_level=log_level, max_size_mb=max_size_mb, backup_count=backup_count)
  171. # 重置处理ID,用于跟踪新的请求
  172. def reset_process_id():
  173. for handler in logger.handlers:
  174. for filter_obj in handler.filters:
  175. if isinstance(filter_obj, ProcessIdFilter):
  176. filter_obj.process_id = None