Skip to content

Commit af5feb3

Browse files
committed
添加异步和同步添加数据库函数
1 parent 65f21df commit af5feb3

1 file changed

Lines changed: 37 additions & 11 deletions

File tree

app/core/sqlmap_core.py

Lines changed: 37 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,26 +1,31 @@
11
from sqlalchemy import select
22

33
from app.database.database import AsyncSessionLocal
4+
from app.middleware.custom_decorators import with_async_session
45
from app.models.sqlmap_result import SqlmapScanPayload, SqlmapScanLog
6+
from app.database.celery_sync_database import SessionLocal
57

68

9+
# 初次创建任务后将会存储数据库
10+
@with_async_session
711
async def task_add(
8-
task_id: int,
12+
*,
13+
session,
14+
task_id: str,
915
scan_url: str,
1016
status: str,
1117
scan_risk: int = 1,
1218
scan_level: int = 1,
1319
):
14-
async with AsyncSessionLocal() as session:
15-
task = SqlmapScanPayload(
16-
task_id=task_id,
17-
scan_url=scan_url,
18-
status=status,
19-
scan_risk=scan_risk,
20-
scan_level=scan_level,
21-
)
22-
session.add(task)
23-
await session.commit()
20+
task = SqlmapScanPayload(
21+
task_id=task_id,
22+
scan_url=scan_url,
23+
status=status,
24+
scan_risk=scan_risk,
25+
scan_level=scan_level,
26+
)
27+
session.add(task)
28+
await session.commit()
2429

2530

2631
async def list_tasks():
@@ -45,3 +50,24 @@ async def get_task_logs(task_id: str, limit: int = 100, offset: int = 0):
4550
)
4651
logs = result.scalars().all()
4752
return logs
53+
54+
55+
# 同步扫描任务写入。防止数据库丢失
56+
def celery_task_add(
57+
*,
58+
session,
59+
task_id: str,
60+
scan_url: str,
61+
status: str,
62+
scan_risk: int = 1,
63+
scan_level: int = 1,
64+
):
65+
task = SqlmapScanPayload(
66+
task_id=task_id,
67+
scan_url=scan_url,
68+
status=status,
69+
scan_risk=scan_risk,
70+
scan_level=scan_level,
71+
)
72+
session.add(task)
73+
session.commit()

0 commit comments

Comments
 (0)