-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathlogging_config.py
More file actions
93 lines (71 loc) · 2.62 KB
/
logging_config.py
File metadata and controls
93 lines (71 loc) · 2.62 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
"""Logging configuration for DTD Pipeline."""
import json
import logging
import sys
from datetime import datetime
from pathlib import Path
from typing import Any
class JSONFormatter(logging.Formatter):
"""JSON formatter for structured logging."""
def format(self, record: logging.LogRecord) -> str:
log_data = {
"timestamp": datetime.utcnow().isoformat() + "Z",
"level": record.levelname,
"logger": record.name,
"message": record.getMessage(),
}
if record.exc_info:
log_data["exception"] = self.formatException(record.exc_info)
if hasattr(record, "extra_data"):
log_data.update(record.extra_data)
return json.dumps(log_data)
class ContextAdapter(logging.LoggerAdapter):
"""Logger adapter that adds context to log records."""
def process(self, msg: str, kwargs: dict) -> tuple[str, dict]:
extra = kwargs.get("extra", {})
extra.update(self.extra)
kwargs["extra"] = extra
return msg, kwargs
def setup_logging(logs_dir: Path, level: int = logging.INFO) -> None:
"""
Setup structured logging to files and console.
Args:
logs_dir: Directory for log files
level: Logging level
"""
logs_dir.mkdir(parents=True, exist_ok=True)
# Root logger
root_logger = logging.getLogger()
root_logger.setLevel(level)
# Clear existing handlers
root_logger.handlers.clear()
# Console handler - human readable
console_handler = logging.StreamHandler(sys.stdout)
console_handler.setLevel(level)
console_formatter = logging.Formatter(
"%(asctime)s | %(levelname)-8s | %(name)s | %(message)s",
datefmt="%Y-%m-%d %H:%M:%S",
)
console_handler.setFormatter(console_formatter)
root_logger.addHandler(console_handler)
# File handler - human readable
file_handler = logging.FileHandler(logs_dir / "pipeline.log")
file_handler.setLevel(level)
file_handler.setFormatter(console_formatter)
root_logger.addHandler(file_handler)
# Error file handler - JSON for structured analysis
error_handler = logging.FileHandler(logs_dir / "errors.jsonl")
error_handler.setLevel(logging.ERROR)
error_handler.setFormatter(JSONFormatter())
root_logger.addHandler(error_handler)
def get_logger(name: str, **context: Any) -> ContextAdapter:
"""
Get a logger with optional context.
Args:
name: Logger name
**context: Additional context to include in logs
Returns:
Logger adapter with context
"""
logger = logging.getLogger(name)
return ContextAdapter(logger, context)