-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathLLM.py
More file actions
22 lines (17 loc) · 850 Bytes
/
LLM.py
File metadata and controls
22 lines (17 loc) · 850 Bytes
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
import anthropic
from LLM_utils import make_system_prompt, make_user_prompt
import constants
class LLMClient:
def __init__(self, code_input):
self.client = anthropic.Anthropic(api_key = constants.CLAUDE_API_KEY)
self.code_input = code_input
# List to hold user queries and Claude's responses for maintaining contextual awareness
self.messages = []
# Making a system prompt to set Claude's role and feed the code.
make_system_prompt(self.client, constants.SYSTEM_PROMPT+self.code_input, self.messages, constants.INITIAL_USER_QUERY)
def get_response_to_query(self, query):
"""
Prompts Claude and generates a response.
"""
response_text = make_user_prompt(self.client, constants.SYSTEM_PROMPT+self.code_input, self.messages, query)
return response_text