11import os
22import json
3- from typing import List
3+ from typing import List , Iterable
44import openai
55from devchat ._cli .utils import init_dir
66from . import Namespace , CommandParser , Command
@@ -95,29 +95,30 @@ def _call_gpt(messages: List[dict], # messages passed to GPT
9595
9696 for try_times in range (3 ):
9797 try :
98- response = client .chat .completions .create (
98+ response : Iterable = client .chat .completions .create (
9999 messages = messages ,
100100 model = model_name ,
101101 stream = True ,
102102 tools = tools
103103 )
104104
105105 response_result = {'content' : None , 'function_name' : None , 'parameters' : "" }
106- for chunk in response :
107- chunk = chunk .dict ()
108- delta = chunk ["choices" ][0 ]["delta" ]
109- if 'tool_calls' in delta and delta ['tool_calls' ]:
110- tool_call = delta ['tool_calls' ][0 ]['function' ]
111- if tool_call .get ('name' , None ):
112- response_result ["function_name" ] = tool_call ["name" ]
113- if tool_call .get ("arguments" , None ):
114- response_result ["parameters" ] += tool_call ["arguments" ]
115- if delta .get ('content' , None ):
116- if response_result ["content" ]:
117- response_result ["content" ] += delta ["content" ]
118- else :
119- response_result ["content" ] = delta ["content" ]
120- print (delta ["content" ], end = '' , flush = True )
106+ if isinstance (response , Iterable ):
107+ for chunk in response :
108+ chunk = chunk .dict ()
109+ delta = chunk ["choices" ][0 ]["delta" ]
110+ if 'tool_calls' in delta and delta ['tool_calls' ]:
111+ tool_call = delta ['tool_calls' ][0 ]['function' ]
112+ if tool_call .get ('name' , None ):
113+ response_result ["function_name" ] = tool_call ["name" ]
114+ if tool_call .get ("arguments" , None ):
115+ response_result ["parameters" ] += tool_call ["arguments" ]
116+ if delta .get ('content' , None ):
117+ if response_result ["content" ]:
118+ response_result ["content" ] += delta ["content" ]
119+ else :
120+ response_result ["content" ] = delta ["content" ]
121+ print (delta ["content" ], end = '' , flush = True )
121122 if response_result ["function_name" ]:
122123 print ("``` command_run" )
123124 function_call = {
@@ -135,6 +136,7 @@ def _call_gpt(messages: List[dict], # messages passed to GPT
135136 except Exception as err :
136137 print ("Exception Error:" , err )
137138 return {'content' : None , 'function_name' : None , 'parameters' : "" }
139+ return {'content' : None , 'function_name' : None , 'parameters' : "" }
138140
139141
140142def _create_messages ():
@@ -193,7 +195,7 @@ def _auto_route(history_messages, model_name:str):
193195 response ['function_name' ],
194196 response ['parameters' ],
195197 model_name )
196- elif not response ['content' ]:
198+ if not response ['content' ]:
197199 return (- 1 , "" )
198200 return (- 1 , "" )
199201
@@ -218,19 +220,18 @@ def run_command(
218220 # response = _auto_function_calling(history_messages, model_name)
219221 # return response['content']
220222 return _auto_route (history_messages , model_name )
221- else :
222- commands = input_text .split ()
223- command = commands [0 ][1 :]
223+ commands = input_text .split ()
224+ command = commands [0 ][1 :]
224225
225- command_obj = _load_command (command )
226- if not command_obj or not command_obj .steps :
227- return None
226+ command_obj = _load_command (command )
227+ if not command_obj or not command_obj .steps :
228+ return None
228229
229- runner = CommandRunner (model_name )
230- return runner .run_command (
231- command ,
232- command_obj ,
233- history_messages ,
234- input_text ,
235- parent_hash ,
236- context_contents )
230+ runner = CommandRunner (model_name )
231+ return runner .run_command (
232+ command ,
233+ command_obj ,
234+ history_messages ,
235+ input_text ,
236+ parent_hash ,
237+ context_contents )
0 commit comments