Skip to content

Commit 3b4593a

Browse files
fix(backend): replace decommissioned gemma2-9b-it with llama-3.3-70b-versatile
1 parent 19c83ed commit 3b4593a

5 files changed

Lines changed: 6 additions & 6 deletions

File tree

backend/app/modules/bias_detection/check_bias.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ def check_bias(text):
6161
"content": (f"Give bias score to the following article \n\n{text}"),
6262
},
6363
],
64-
model="gemma2-9b-it",
64+
model="llama-3.3-70b-versatile",
6565
temperature=0.3,
6666
max_tokens=512,
6767
)

backend/app/modules/chat/llm_processing.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@ def ask_llm(question, docs):
5555
"""
5656

5757
response = client.chat.completions.create(
58-
model="gemma2-9b-it",
58+
model="llama-3.3-70b-versatile",
5959
messages=[
6060
{"role": "system", "content": "Use only the context to answer."},
6161
{"role": "user", "content": prompt},

backend/app/modules/facts_check/llm_processing.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -63,7 +63,7 @@ def run_claim_extractor_sdk(state):
6363
),
6464
},
6565
],
66-
model="gemma2-9b-it",
66+
model="llama-3.3-70b-versatile",
6767
temperature=0.3,
6868
max_tokens=512,
6969
)
@@ -128,7 +128,7 @@ def run_fact_verifier_sdk(search_results):
128128
),
129129
},
130130
],
131-
model="gemma2-9b-it",
131+
model="llama-3.3-70b-versatile",
132132
temperature=0.3,
133133
max_tokens=256,
134134
)

backend/app/modules/langgraph_nodes/judge.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@
2424

2525
# Init once
2626
groq_llm = ChatGroq(
27-
model="gemma2-9b-it",
27+
model="llama-3.3-70b-versatile",
2828
temperature=0.0,
2929
max_tokens=10,
3030
)

backend/app/modules/langgraph_nodes/sentiment.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@ def run_sentiment_sdk(state):
4949
),
5050
},
5151
],
52-
model="gemma2-9b-it",
52+
model="llama-3.3-70b-versatile",
5353
temperature=0.2,
5454
max_tokens=3,
5555
)

0 commit comments

Comments
 (0)