Skip to content

Commit fb318c4

Browse files
Intégration de ChromaDB pour le stockage et la recherche : service Docker persistant, sessions utilisateur, UI enrichie, dashboard admin amélioré et theming unifié
1 parent 15c1882 commit fb318c4

19 files changed

Lines changed: 1342 additions & 237 deletions

File tree

.env.example

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,8 @@ ACCESS_TOKEN_EXPIRE_MINUTES=30
1717
# Note : Dans Docker, utilisez le chemin absolu interne /app/data/
1818
# DATABASE_URL="sqlite:////app/data/chatbot_production.db"
1919
DATABASE_URL="sqlite:///./data/chatbot_production.db"
20+
CHROMA_DB_HOST=localhost
21+
CHROMA_DB_PORT=8001
2022

2123
# --- Clés API LLM ---
2224
# Au moins une clé est requise. Groq est prioritaire.

app/core/config.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,11 @@ class Settings(BaseSettings):
1818
EMBEDDING_MODEL: str = "sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2"
1919
FAQ_JSON_PATH: str = "data/faq.json"
2020
CONFIDENCE_THRESHOLD: float = 0.45
21+
DIRECT_ANSWER_THRESHOLD: float = 0.75
22+
# Config Chroma
23+
CHROMA_DB_HOST: str = "chromadb"
24+
CHROMA_DB_PORT: int = 8000
25+
CHROMA_COLLECTION_NAME: str = "faq_collection"
2126
model_config = SettingsConfigDict(
2227
env_file=".env",
2328
case_sensitive=True,

app/main.py

Lines changed: 9 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,8 @@
11
from contextlib import asynccontextmanager
2-
from fastapi import FastAPI
2+
from fastapi import FastAPI, Request
33
from fastapi.staticfiles import StaticFiles
44
from fastapi.responses import FileResponse
5+
from fastapi.templating import Jinja2Templates
56
from sqlmodel import SQLModel
67
import os
78

@@ -10,6 +11,8 @@
1011
from app.services.rag_engine import RAGService
1112
from app.routers import auth, admin, chat
1213

14+
templates = Jinja2Templates(directory="templates")
15+
1316
@asynccontextmanager
1417
async def lifespan(app: FastAPI):
1518
SQLModel.metadata.create_all(engine)
@@ -29,8 +32,10 @@ async def lifespan(app: FastAPI):
2932
app.include_router(chat.router)
3033

3134
@app.get("/")
32-
async def root():
33-
# Vérifie si le fichier existe pour éviter une erreur 500
35+
async def root(request: Request):
3436
if os.path.exists("templates/index.html"):
35-
return FileResponse("templates/index.html")
37+
return templates.TemplateResponse(
38+
"index.html",
39+
{"request": request}
40+
)
3641
return {"message": "Erreur: templates/index.html introuvable. Vérifiez vos dossiers."}

app/routers/admin.py

Lines changed: 73 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
1-
from fastapi import APIRouter, Depends, Request, Form
2-
from fastapi.responses import RedirectResponse
1+
from fastapi import APIRouter, Depends, Request, Form, HTTPException
2+
from fastapi.responses import RedirectResponse, JSONResponse
33
from fastapi.templating import Jinja2Templates
44
from sqlmodel import Session, select, func
55
from app.db.session import get_session
@@ -24,7 +24,7 @@ async def dashboard(
2424
.where(ChatInteraction.confidence < 0.45)
2525
.where(ChatInteraction.provider != "static_rule")
2626
.order_by(ChatInteraction.timestamp.desc())
27-
.limit(10)
27+
.limit(50)
2828
).all()
2929

3030
stats = {
@@ -94,4 +94,73 @@ async def delete_faq(
9494
@router.get("/stats")
9595
async def get_stats_json(db: Session = Depends(get_session), current_user = Depends(get_current_admin_user)):
9696
total = db.exec(select(func.count(ChatInteraction.id))).one()
97-
return {"total_messages": total}
97+
return {"total_messages": total}
98+
99+
@router.post("/questions/convert-to-faq/{interaction_id}")
100+
async def convert_question_to_faq(
101+
interaction_id: int,
102+
question: str = Form(None),
103+
answer: str = Form(None),
104+
category: str = Form("general"),
105+
db: Session = Depends(get_session),
106+
current_user = Depends(get_current_admin_user)
107+
):
108+
interaction = db.get(ChatInteraction, interaction_id)
109+
if not interaction:
110+
raise HTTPException(status_code=404, detail="Interaction not found")
111+
112+
faq_question = question if question else interaction.message
113+
faq_answer = answer if answer else interaction.response
114+
115+
existing = db.exec(
116+
select(FAQItem).where(FAQItem.question == faq_question)
117+
).first()
118+
119+
if existing:
120+
return JSONResponse(
121+
status_code=400,
122+
content={"message": "This question already exists in FAQ", "faq_id": existing.id}
123+
)
124+
125+
new_faq = FAQItem(
126+
question=faq_question,
127+
answer=faq_answer,
128+
category=category
129+
)
130+
db.add(new_faq)
131+
db.commit()
132+
db.refresh(new_faq)
133+
134+
try:
135+
RAGService().reload_from_db(db)
136+
except Exception as e:
137+
print(f"Error reloading ChromaDB: {e}")
138+
139+
return JSONResponse(
140+
status_code=200,
141+
content={
142+
"message": "Question added to FAQ successfully",
143+
"faq_id": new_faq.id,
144+
"question": new_faq.question
145+
}
146+
)
147+
148+
@router.get("/questions/{interaction_id}")
149+
async def get_question_details(
150+
interaction_id: int,
151+
db: Session = Depends(get_session),
152+
current_user = Depends(get_current_admin_user)
153+
):
154+
interaction = db.get(ChatInteraction, interaction_id)
155+
if not interaction:
156+
raise HTTPException(status_code=404, detail="Interaction not found")
157+
158+
return {
159+
"id": interaction.id,
160+
"message": interaction.message,
161+
"response": interaction.response,
162+
"confidence": interaction.confidence,
163+
"provider": interaction.provider,
164+
"timestamp": interaction.timestamp.isoformat(),
165+
"user_session_id": interaction.user_session_id
166+
}

app/routers/chat.py

Lines changed: 48 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
from fastapi import APIRouter, Depends, BackgroundTasks, HTTPException
22
from pydantic import BaseModel
33
from typing import List, Dict, Any
4-
from sqlmodel import Session, select, delete
4+
from sqlmodel import Session, select, delete, desc
55
from app.db.session import get_session
66
from app.db.models import ChatInteraction
77
from app.services.rag_engine import RAGService
@@ -45,61 +45,78 @@ async def chat_endpoint(
4545
background_tasks: BackgroundTasks,
4646
db: Session = Depends(get_session)
4747
):
48-
# Recherche RAG (incluant maintenant les règles statiques Bonjour/Merci)
48+
history_items = db.exec(
49+
select(ChatInteraction)
50+
.where(ChatInteraction.user_session_id == request.user_id)
51+
.order_by(desc(ChatInteraction.timestamp))
52+
.limit(5)
53+
).all()
54+
history_items = history_items[::-1]
55+
history_text = "\n".join(
56+
[f"User: {h.message}\nAssistant: {h.response}" for h in history_items]
57+
) if history_items else "Aucun historique récent."
58+
4959
rag_result = rag_service.search(request.message, threshold=settings.CONFIDENCE_THRESHOLD)
50-
5160
response_text = ""
5261
provider = "retrieval_only"
5362
confidence = rag_result["confidence"]
5463
matched_q = rag_result["matched_question"]
55-
# Si le moteur RAG a trouvé une règle statique (Bonjour), on l'utilise directement
64+
context_faq = rag_result["answer"] if rag_result["answer"] else ""
65+
5666
if rag_result.get("provider") == "static_rule":
57-
response_text = rag_result["answer"]
58-
provider = "static_rule"
59-
# La confiance est déjà à 1.0 grâce à la modif dans rag_engine.py
67+
return ChatResponse(
68+
response=rag_result["answer"],
69+
confidence=1.0,
70+
provider="static_rule",
71+
retrieval_only=True,
72+
is_new_question=False
73+
)
74+
# Cas A : Confiance TRÈS élevée -> FAQ Directe
75+
if context_faq and confidence >= settings.DIRECT_ANSWER_THRESHOLD:
76+
response_text = context_faq
77+
provider = "retrieval_high_confidence"
78+
# Cas B : Passage au LLM
6079
else:
61-
# Décision : RAG ou LLM ?
62-
should_use_llm = request.use_llm and (confidence < 0.65)
63-
if not should_use_llm and rag_result["answer"]:
64-
# Cas : Réponse trouvée dans la FAQ avec une bonne confiance
65-
response_text = rag_result["answer"]
66-
else:
67-
# Cas : Pas de réponse FAQ ou confiance faible -> Appel LLM
68-
context = rag_result["answer"] if rag_result["answer"] else ""
69-
70-
prompt = f"""Tu es un assistant support client expert.
71-
Contexte issu de la base de connaissances : "{context}"
72-
Question utilisateur : "{request.message}"
80+
system_prompt = f"""Tu es un assistant support client utile et précis.
81+
82+
CONTEXTE FAQ (Peut être vide ou peu pertinent, score={confidence:.2f}) :
83+
"{context_faq}"
7384
74-
Instructions :
75-
- Si le contexte répond à la question, reformule-le poliment.
76-
- Si le contexte est vide ou non pertinent, réponds avec tes connaissances générales en restant bref.
77-
- Réponds en français.
85+
HISTORIQUE :
86+
{history_text}
87+
88+
INSTRUCTIONS :
89+
1. Utilise le CONTEXTE FAQ en priorité s'il semble répondre à la question.
90+
2. Si le contexte est vide ou hors-sujet, utilise tes connaissances.
91+
3. Réponds toujours poliment et en français.
7892
"""
79-
llm_result = await llm_orchestrator.generate_response(prompt)
93+
if request.use_llm:
94+
llm_result = await llm_orchestrator.generate_response(
95+
f"{system_prompt}\n\nUser: {request.message}"
96+
)
8097

8198
if llm_result["status"] == "success":
8299
response_text = llm_result["response"]
83-
provider = llm_result["provider"]
84-
confidence = 1.0
85-
100+
provider = f"llm_{llm_result['provider']}"
86101
else:
87-
# Fallback ultime
88-
response_text = rag_result["answer"] or "Désolé, je n'ai pas la réponse et mes services IA sont indisponibles."
102+
response_text = context_faq or "Désolé, mes services d'IA sont indisponibles."
89103
provider = "fallback_error"
104+
else:
105+
response_text = context_faq or "Je n'ai pas trouvé de réponse exacte."
90106

91107
# Sauvegarde
92108
background_tasks.add_task(
93109
save_interaction_task, db, request.user_id, request.message, response_text, confidence, provider
94110
)
111+
is_retrieval = provider in ["retrieval_high_confidence", "static_rule", "retrieval_only"]
95112

96113
return ChatResponse(
97114
response=response_text,
98115
confidence=confidence,
99116
provider=provider,
100117
matched_question=matched_q,
101-
retrieval_only=(provider == "retrieval_only" or provider == "static_rule"),
102-
is_new_question=(rag_result["confidence"] < 0.45 and provider != "static_rule") # On marque comme "new" seulement si ce n'est pas un "Bonjour"
118+
retrieval_only=is_retrieval,
119+
is_new_question=(confidence < settings.CONFIDENCE_THRESHOLD)
103120
)
104121

105122
@router.get("/llm/status")

0 commit comments

Comments
 (0)