5353import uuid
5454from collections .abc import AsyncGenerator , Callable , Sequence
5555from pathlib import Path
56- from typing import TYPE_CHECKING , Any , cast
56+ from typing import TYPE_CHECKING , Any
5757
5858import google .auth
5959import google .auth .transport .requests
@@ -1292,125 +1292,17 @@ async def _ensure_healthy(self) -> None:
12921292 if logger .isEnabledFor (logging .INFO ):
12931293 logger .info ("Health check passed - backend is ready for use" )
12941294
1295- _LEGACY_CHAT_COMPLETIONS_PARAM_ORDER : tuple [str , ...] = (
1296- "request_data" ,
1297- "processed_messages" ,
1298- "effective_model" ,
1299- "identity" ,
1300- "cancellation_token" ,
1301- "cancellation_coordinator" ,
1302- "context" ,
1303- "options" ,
1304- )
1305-
1306- def _normalize_chat_completions_request (
1307- self ,
1308- request : ConnectorChatCompletionsRequest | None ,
1309- args : tuple [Any , ...],
1310- kwargs : dict [str , Any ],
1311- ) -> ConnectorChatCompletionsRequest :
1312- """Normalize canonical and legacy invocation shapes into one request object."""
1313- if isinstance (request , ConnectorChatCompletionsRequest ):
1314- if args or kwargs :
1315- raise TypeError (
1316- "GeminiCloudProjectConnector.chat_completions() canonical invocation accepts only "
1317- "the `request` argument."
1318- )
1319- return request
1320-
1321- legacy_values : dict [str , Any ] = {}
1322- legacy_kwargs = dict (kwargs )
1323-
1324- positional_values : list [Any ] = []
1325- if request is not None :
1326- positional_values .append (request )
1327- positional_values .extend (args )
1328-
1329- max_positional = len (self ._LEGACY_CHAT_COMPLETIONS_PARAM_ORDER )
1330- if len (positional_values ) > max_positional :
1331- raise TypeError (
1332- "GeminiCloudProjectConnector.chat_completions() takes at most "
1333- f"{ max_positional } legacy positional arguments "
1334- f"({ len (positional_values )} given)."
1335- )
1336-
1337- for index , value in enumerate (positional_values ):
1338- param_name = self ._LEGACY_CHAT_COMPLETIONS_PARAM_ORDER [index ]
1339- if param_name in legacy_kwargs :
1340- raise TypeError (
1341- "GeminiCloudProjectConnector.chat_completions() got multiple values for "
1342- f"argument '{ param_name } '."
1343- )
1344- legacy_values [param_name ] = value
1345-
1346- for param_name in self ._LEGACY_CHAT_COMPLETIONS_PARAM_ORDER :
1347- if param_name in legacy_kwargs :
1348- legacy_values [param_name ] = legacy_kwargs .pop (param_name )
1349-
1350- required_legacy = self ._LEGACY_CHAT_COMPLETIONS_PARAM_ORDER [:3 ]
1351- missing_required = [
1352- name for name in required_legacy if name not in legacy_values
1353- ]
1354- if missing_required :
1355- raise TypeError (
1356- "GeminiCloudProjectConnector.chat_completions() missing required arguments: "
1357- f"{ ', ' .join (missing_required )} . "
1358- "Provide canonical `request` or legacy "
1359- "`request_data`, `processed_messages`, and `effective_model`."
1360- )
1361-
1362- processed_messages_raw = legacy_values ["processed_messages" ]
1363- if processed_messages_raw is None :
1364- processed_messages_seq : Sequence [ChatMessage ] = ()
1365- elif isinstance (processed_messages_raw , Sequence ):
1366- processed_messages_seq = cast (Sequence [ChatMessage ], processed_messages_raw )
1367- else :
1368- raise TypeError (
1369- "GeminiCloudProjectConnector.chat_completions() legacy `processed_messages` "
1370- "must be a sequence."
1371- )
1372-
1373- effective_model = legacy_values ["effective_model" ]
1374- if not isinstance (effective_model , str ):
1375- raise TypeError (
1376- "GeminiCloudProjectConnector.chat_completions() legacy `effective_model` "
1377- "must be a string."
1378- )
1379-
1380- options_from_legacy = legacy_values .get ("options" )
1381- if options_from_legacy is None :
1382- options : dict [str , Any ] = {}
1383- elif isinstance (options_from_legacy , dict ):
1384- options = dict (options_from_legacy )
1385- else :
1386- raise TypeError (
1387- "GeminiCloudProjectConnector.chat_completions() legacy `options` must be a dict."
1388- )
1389- options .update (legacy_kwargs )
1390-
1391- return ConnectorChatCompletionsRequest (
1392- request = legacy_values ["request_data" ],
1393- processed_messages = processed_messages_seq ,
1394- effective_model = effective_model ,
1395- identity = legacy_values .get ("identity" ),
1396- cancellation_token = legacy_values .get ("cancellation_token" ),
1397- cancellation_coordinator = legacy_values .get ("cancellation_coordinator" ),
1398- context = legacy_values .get ("context" ),
1399- options = options ,
1400- )
1401-
1402- async def chat_completions ( # type: ignore[override]
1295+ async def chat_completions (
14031296 self ,
1404- request : ConnectorChatCompletionsRequest | None = None ,
1405- * args : Any ,
1406- ** kwargs : Any ,
1297+ request : ConnectorChatCompletionsRequest ,
14071298 ) -> ResponseEnvelope | StreamingResponseEnvelope :
14081299 """Handle chat completions using Google Code Assist API with user's GCP project."""
1409- connector_request = self ._normalize_chat_completions_request (
1410- request = request ,
1411- args = args ,
1412- kwargs = kwargs ,
1413- )
1300+ if not isinstance (request , ConnectorChatCompletionsRequest ):
1301+ raise TypeError (
1302+ "GeminiCloudProjectConnector.chat_completions() requires a "
1303+ "ConnectorChatCompletionsRequest instance."
1304+ )
1305+ connector_request = request
14141306 # Structural enforcement: check cancellation immediately if coordinator and token provided
14151307 if (
14161308 connector_request .cancellation_coordinator is not None
@@ -1424,8 +1316,7 @@ async def chat_completions( # type: ignore[override]
14241316 processed_messages = connector_request .processed_messages
14251317 effective_model = connector_request .effective_model
14261318
1427- # Options not specifically used here but available in request.options if needed
1428- # kwargs was previously passed down to _chat_completions_streaming / _chat_completions_standard
1319+ # Provider-specific options flow through ``ConnectorChatCompletionsRequest.options``.
14291320 kwargs = dict (connector_request .options ) if connector_request .options else {}
14301321
14311322 # Runtime validation with descriptive errors
0 commit comments