Skip to content
This repository was archived by the owner on Mar 18, 2026. It is now read-only.

Commit 5fe788e

Browse files
Merge pull request #417 from aibtcdev/logging-json
add logging json
2 parents 6cf9547 + f9d4166 commit 5fe788e

4 files changed

Lines changed: 84 additions & 15 deletions

File tree

app/lib/logger.py

Lines changed: 53 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
import json
12
import logging
23
import os
34
from typing import Optional
@@ -12,6 +13,51 @@
1213
}
1314

1415

16+
class JSONFormatter(logging.Formatter):
17+
"""JSON formatter that outputs single-line structured logs."""
18+
19+
def format(self, record):
20+
log_entry = {
21+
"timestamp": self.formatTime(record, self.datefmt),
22+
"level": record.levelname,
23+
"logger": record.name,
24+
"message": record.getMessage(),
25+
}
26+
27+
# Add exception info if present
28+
if record.exc_info:
29+
log_entry["exception"] = self.formatException(record.exc_info)
30+
31+
# Add extra fields if present
32+
for key, value in record.__dict__.items():
33+
if key not in [
34+
"name",
35+
"msg",
36+
"args",
37+
"levelname",
38+
"levelno",
39+
"pathname",
40+
"filename",
41+
"module",
42+
"exc_info",
43+
"exc_text",
44+
"stack_info",
45+
"lineno",
46+
"funcName",
47+
"created",
48+
"msecs",
49+
"relativeCreated",
50+
"thread",
51+
"threadName",
52+
"processName",
53+
"process",
54+
"message",
55+
]:
56+
log_entry[key] = value
57+
58+
return json.dumps(log_entry, ensure_ascii=False)
59+
60+
1561
def configure_logger(name: Optional[str] = None) -> logging.Logger:
1662
"""
1763
Configure and return a logger instance with consistent formatting and level.
@@ -34,37 +80,37 @@ def configure_logger(name: Optional[str] = None) -> logging.Logger:
3480
if not logger.handlers:
3581
console_handler = logging.StreamHandler()
3682
console_handler.setLevel(log_level)
37-
formatter = logging.Formatter("%(levelname)s: %(message)s")
83+
formatter = JSONFormatter()
3884
console_handler.setFormatter(formatter)
3985
logger.addHandler(console_handler)
4086

4187
return logger
4288

4389

4490
def setup_uvicorn_logging():
45-
"""Configure uvicorn and other loggers to use normal formatting."""
91+
"""Configure uvicorn and other loggers to use JSON formatting."""
4692
# Disable uvicorn access logging since we handle it with middleware
4793
logging.getLogger("uvicorn.access").disabled = True
4894

49-
# Create a standard formatter
50-
standard_formatter = logging.Formatter("%(levelname)s: %(message)s")
95+
# Create a JSON formatter
96+
json_formatter = JSONFormatter()
5197

5298
# Get all existing loggers and configure them
5399
for logger_name in ["uvicorn", "uvicorn.error", "fastapi"]:
54100
logger = logging.getLogger(logger_name)
55101
for handler in logger.handlers:
56-
handler.setFormatter(standard_formatter)
102+
handler.setFormatter(json_formatter)
57103

58104
# Configure root logger
59105
root_logger = logging.getLogger()
60106
for handler in root_logger.handlers:
61-
handler.setFormatter(standard_formatter)
107+
handler.setFormatter(json_formatter)
62108

63109
# Set up a hook to catch any new handlers that get added later
64110
original_add_handler = logging.Logger.addHandler
65111

66112
def patched_add_handler(self, hdlr):
67-
hdlr.setFormatter(standard_formatter)
113+
hdlr.setFormatter(json_formatter)
68114
return original_add_handler(self, hdlr)
69115

70116
logging.Logger.addHandler = patched_add_handler

app/middleware/logging.py

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -37,9 +37,14 @@ async def dispatch(self, request: Request, call_next):
3737
"process_time_ms": round(process_time * 1000, 2),
3838
}
3939

40-
# Log the request/response
40+
# Log the request/response with structured data
4141
logger.info(
42-
f"{request_info['method']} {request_info['path']} - {response_info['status_code']} - {response_info['process_time_ms']}ms"
42+
f"HTTP {request_info['method']} {request_info['path']} - {response_info['status_code']}",
43+
extra={
44+
"request": request_info,
45+
"response": response_info,
46+
"event_type": "http_request",
47+
},
4348
)
4449

4550
return response

app/services/infrastructure/job_management/job_manager.py

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -123,6 +123,7 @@ def _get_job_interval(self, job_type, metadata: JobMetadata) -> int:
123123

124124
async def _execute_job_via_executor(self, job_type: str) -> None:
125125
"""Execute a job through the enhanced executor system with proper concurrency control."""
126+
logger.info(f"🚀 Scheduled execution triggered for job type: {job_type}")
126127
try:
127128
from app.backend.models import QueueMessage, QueueMessageType
128129

@@ -131,6 +132,7 @@ async def _execute_job_via_executor(self, job_type: str) -> None:
131132

132133
# Convert job_type string to JobType enum
133134
job_type_enum = JobType.get_or_create(job_type)
135+
logger.debug(f"Converted job type '{job_type}' to enum: {job_type_enum}")
134136

135137
logger.debug(f"🔍 Checking if {job_type} has work to do...")
136138

@@ -172,21 +174,22 @@ async def _execute_job_via_executor(self, job_type: str) -> None:
172174
"triggered_at": str(datetime.now()),
173175
},
174176
dao_id=None,
175-
tweet_id=None,
176-
conversation_id=None,
177+
wallet_id=None,
177178
is_processed=False,
178179
result=None,
179180
created_at=datetime.now(),
180-
updated_at=datetime.now(),
181181
)
182182

183183
# Enqueue the synthetic message with the job's priority
184+
logger.debug(
185+
f"Enqueuing job {job_type} to executor with priority {metadata.priority}"
186+
)
184187
job_id = await self._executor.priority_queue.enqueue(
185188
synthetic_message, metadata.priority
186189
)
187190

188191
logger.info(
189-
f"✅ Queued {job_type} job {job_id} for execution (priority: {metadata.priority})"
192+
f"✅ Enqueued scheduled job '{job_type}' with ID {job_id} (priority: {metadata.priority})"
190193
)
191194

192195
except Exception as e:

app/services/infrastructure/startup_service.py

Lines changed: 17 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -91,19 +91,34 @@ async def start_enhanced_job_system(self) -> None:
9191
raise RuntimeError("Job system initialization failed")
9292

9393
# Schedule jobs with the scheduler
94+
logger.info("Attempting to schedule jobs with the job manager...")
9495
any_jobs_scheduled = self.job_manager.schedule_jobs(self.scheduler)
9596
if any_jobs_scheduled:
9697
# Start the scheduler
98+
logger.info("Starting APScheduler with scheduled jobs...")
9799
self.scheduler.start()
98100
logger.info("Job scheduler started successfully")
101+
102+
# Log scheduler status
103+
jobs = self.scheduler.get_jobs()
104+
logger.info(
105+
f"APScheduler is running with {len(jobs)} active job schedules:"
106+
)
107+
for job in jobs:
108+
logger.info(f" - Job ID: {job.id}, next run: {job.next_run_time}")
99109
else:
100-
logger.warning("No jobs were scheduled")
110+
logger.warning("No jobs were scheduled - scheduler will not be started")
101111

102112
# Start the job executor
113+
logger.info("Starting job executor for background job processing...")
103114
await self.job_manager.start_executor()
104-
logger.info("Enhanced job manager executor started successfully")
115+
executor_stats = self.job_manager.get_executor_stats()
116+
logger.info(
117+
f"Enhanced job executor started with {executor_stats.get('worker_count', 0)} workers"
118+
)
105119

106120
# Start system metrics collection
121+
logger.info("Starting system metrics monitoring...")
107122
await system_metrics.start_monitoring()
108123
logger.info("System metrics monitoring started")
109124

0 commit comments

Comments
 (0)