Skip to content

Commit b4e79be

Browse files
committed
New log entries added to database. Requires dedicated SQLmap.
1 parent c5b62c9 commit b4e79be

2 files changed

Lines changed: 43 additions & 9 deletions

File tree

app/apis/sqlmap_api.py

Lines changed: 10 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -107,18 +107,19 @@ async def delete_task(task_id: str):
107107
@router.post("/tasks/webhook/log")
108108
async def receive_sqlmap_log(request: Request):
109109
try:
110-
# 获取原始请求体
111-
body = await request.body()
112-
# 解析JSON数据
110+
# 解析 JSON 日志
113111
log_data = await request.json()
114112

115-
# 打印接收到的数据用于调试
116-
print(log_data)
113+
# 兼容单条和多条日志两种格式
114+
if isinstance(log_data, dict):
115+
logs = [log_data]
116+
elif isinstance(log_data, list):
117+
logs = log_data
118+
else:
119+
raise HTTPException(status_code=400, detail="Invalid log payload format")
117120

118-
# TODO: 在这里处理日志数据,例如保存到数据库
119-
# 您可以根据log_data的结构进行相应的处理
121+
await sqlmap_task.save_sqlmap_logs(logs)
120122

121-
return {"success": True, "message": "Log received successfully"}
123+
return {"success": True, "message": "Log stored successfully"}
122124
except Exception as e:
123-
print(f"Error processing log: {e}")
124125
raise HTTPException(status_code=400, detail=f"Invalid log data: {str(e)}")

app/core/sqlmap_core.py

Lines changed: 33 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -52,6 +52,39 @@ async def get_task_logs(task_id: str, limit: int = 100, offset: int = 0):
5252
return logs
5353

5454

55+
async def save_sqlmap_logs(logs: list[dict]):
56+
"""
57+
保存 SQLMap webhook 上报的日志
58+
"""
59+
async with AsyncSessionLocal() as session:
60+
for item in logs:
61+
task_id = item.get("taskid")
62+
if not task_id:
63+
continue
64+
65+
result = await session.execute(
66+
select(SqlmapScanPayload).where(SqlmapScanPayload.task_id == task_id)
67+
)
68+
payload = result.scalar_one_or_none()
69+
70+
celery_task_id = (
71+
payload.celery_task_id
72+
if payload and payload.celery_task_id
73+
else task_id
74+
)
75+
76+
log_row = SqlmapScanLog(
77+
task_id=task_id,
78+
level=item.get("level", "INFO"),
79+
message=item.get("message", ""),
80+
log_time=item.get("time"),
81+
celery_task_id=celery_task_id,
82+
)
83+
session.add(log_row)
84+
85+
await session.commit()
86+
87+
5588
# 同步扫描任务写入。防止数据库丢失
5689
def celery_task_add(
5790
*,

0 commit comments

Comments
 (0)