apv

- 🐍 advanced python logging 📔
git clone git://git.acid.vegas/apv.git
Log | Files | Refs | Archive | README | LICENSE

file.py (3031B)

      1 import logging
      2 import logging.handlers
      3 import json
      4 import os
      5 import gzip
      6 
      7 class GZipRotatingFileHandler(logging.handlers.RotatingFileHandler):
      8     '''RotatingFileHandler that compresses old log files using gzip.'''
      9 
     10     def doRollover(self):
     11         '''Compress old log files using gzip.'''
     12         super().doRollover()
     13         if self.backupCount > 0:
     14             for i in range(self.backupCount, 0, -1):
     15                 sfn = f'{self.baseFilename}.{i}'
     16                 if os.path.exists(sfn):
     17                     with open(sfn, 'rb') as f_in:
     18                         with gzip.open(f'{sfn}.gz', 'wb') as f_out:
     19                             f_out.writelines(f_in)
     20                     os.remove(sfn)
     21 
     22 class JsonFormatter(logging.Formatter):
     23     def __init__(self, date_format):
     24         super().__init__()
     25         self.date_format = date_format
     26 
     27     def format(self, record):
     28         log_record = {
     29             'time'        : self.formatTime(record, self.date_format),
     30             'level'       : record.levelname,
     31             'module'      : record.module,
     32             'function'    : record.funcName,
     33             'line'        : record.lineno,
     34             'message'     : record.getMessage(),
     35             'name'        : record.name,
     36             'filename'    : record.filename,
     37             'threadName'  : record.threadName,
     38             'processName' : record.processName,
     39         }
     40         return json.dumps(log_record)
     41 
     42 def setup_file_handler(level_num: int, log_to_disk: bool, max_log_size: int, 
     43                       max_backups: int, log_file_name: str, json_log: bool,
     44                       ecs_log: bool, date_format: str, compress_backups: bool):
     45     '''Set up the file handler for logging to disk.'''
     46     if not log_to_disk:
     47         return
     48 
     49     # Create 'logs' directory if it doesn't exist
     50     logs_dir = os.path.join(os.getcwd(), 'logs')
     51     os.makedirs(logs_dir, exist_ok=True)
     52 
     53     # Use the specified log file name and set extension based on json_log
     54     file_extension = '.json' if json_log else '.log'
     55     log_file_path = os.path.join(logs_dir, f'{log_file_name}{file_extension}')
     56 
     57     # Create the rotating file handler
     58     handler_class = GZipRotatingFileHandler if compress_backups else logging.handlers.RotatingFileHandler
     59     file_handler = handler_class(log_file_path, maxBytes=max_log_size, backupCount=max_backups)
     60     file_handler.setLevel(level_num)
     61 
     62     if ecs_log:
     63         try:
     64             import ecs_logging
     65         except ImportError:
     66             raise ImportError("The 'ecs-logging' library is required for ECS logging. Install it with 'pip install ecs-logging'.")
     67         file_formatter = ecs_logging.StdlibFormatter()
     68     elif json_log:
     69         file_formatter = JsonFormatter(date_format)
     70     else:
     71         file_formatter = logging.Formatter(
     72             fmt='%(asctime)s ┃ %(levelname)-8s ┃ %(module)s ┃ %(funcName)s ┃ %(lineno)d ┃ %(message)s',
     73             datefmt=date_format
     74         )
     75 
     76     file_handler.setFormatter(file_formatter)
     77     logging.getLogger().addHandler(file_handler)