Skip to content

Commit 9717806

Browse files
author
Mateusz
committed
fix: align ICodexTransport protocol signature with executor call site
The initiate_streaming_request protocol was missing keyword arguments (context, backend, model, key_name) that the executor was passing. This caused runtime failures for custom transport implementations. Removed the cast(Any, self._transport) workaround that was suppressing the type error.
1 parent baafced commit 9717806

2 files changed

Lines changed: 50 additions & 38 deletions

File tree

src/connectors/openai_codex/executor.py

Lines changed: 13 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -454,17 +454,17 @@ async def _streaming_iterator() -> AsyncIterator[ProcessedResponse]:
454454
attempt=attempts_used + 1,
455455
)
456456
try:
457-
stream_handle = await cast(
458-
Any, self._transport
459-
).initiate_streaming_request(
460-
url,
461-
current_payload_dict,
462-
current_headers,
463-
context.session_id,
464-
context=request_context,
465-
backend="openai-codex",
466-
model=context.effective_model,
467-
key_name=capture_key_name,
457+
stream_handle = (
458+
await self._transport.initiate_streaming_request(
459+
url,
460+
current_payload_dict,
461+
current_headers,
462+
context.session_id,
463+
context=request_context,
464+
backend="openai-codex",
465+
model=context.effective_model,
466+
key_name=capture_key_name,
467+
)
468468
)
469469
# Fall through to consume the stream iterator below
470470
except (HTTPException, LLMProxyError) as exc:
@@ -1082,6 +1082,8 @@ def _slice_input_for_continuation(
10821082
break
10831083
common_prefix_len += 1
10841084

1085+
if common_prefix_len < len(prior_fingerprints):
1086+
return None
10851087
if common_prefix_len <= 0 or common_prefix_len >= len(current_input):
10861088
return None
10871089
return list(current_input[common_prefix_len:])

src/connectors/openai_codex/interfaces.py

Lines changed: 37 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -7,16 +7,17 @@
77

88
from __future__ import annotations
99

10-
from abc import ABC, abstractmethod
11-
from collections.abc import Mapping
12-
from pathlib import Path
13-
from typing import Any, Protocol
14-
15-
from src.connectors._openai_codex_capabilities import CodexClientCapabilities
16-
from src.connectors.openai_codex.contracts import (
17-
CodexConnectorSettings,
18-
CodexInputItem,
19-
CodexPayload,
10+
from abc import ABC, abstractmethod
11+
from collections.abc import Mapping
12+
from pathlib import Path
13+
from typing import Any, Protocol
14+
15+
from src.connectors._openai_codex_capabilities import CodexClientCapabilities
16+
from src.connectors.contracts import ConnectorRequestContext
17+
from src.connectors.openai_codex.contracts import (
18+
CodexConnectorSettings,
19+
CodexInputItem,
20+
CodexPayload,
2021
CodexRequestContext,
2122
CodexToolSchema,
2223
CompatibilityResult,
@@ -684,23 +685,32 @@ class ICodexTransport(Protocol):
684685
Public boundary for streaming HTTP requests used by response executor.
685686
"""
686687

687-
async def initiate_streaming_request(
688-
self,
689-
url: str,
690-
payload: dict[str, Any],
691-
headers: dict[str, str],
692-
session_id: str,
693-
) -> StreamingResponseHandle:
694-
"""Initiate a streaming request to Codex API.
695-
696-
Args:
697-
url: Codex API endpoint URL
698-
payload: Request payload as dictionary
699-
headers: HTTP headers including Authorization
700-
session_id: Session identifier for logging and cancellation
701-
702-
Returns:
703-
StreamingResponseHandle with iterator and cancel callback
688+
async def initiate_streaming_request(
689+
self,
690+
url: str,
691+
payload: dict[str, Any],
692+
headers: dict[str, str],
693+
session_id: str,
694+
*,
695+
context: ConnectorRequestContext | None = None,
696+
backend: str = "openai-codex",
697+
model: str = "unknown",
698+
key_name: str | None = None,
699+
) -> StreamingResponseHandle:
700+
"""Initiate a streaming request to Codex API.
701+
702+
Args:
703+
url: Codex API endpoint URL
704+
payload: Request payload as dictionary
705+
headers: HTTP headers including Authorization
706+
session_id: Session identifier for logging and cancellation
707+
context: Optional proxy request context for wire capture
708+
backend: Backend key used for websocket capture metadata
709+
model: Effective model name for websocket capture metadata
710+
key_name: Optional capture key name override
711+
712+
Returns:
713+
StreamingResponseHandle with iterator and cancel callback
704714
705715
Raises:
706716
HTTPException: For 4xx/5xx responses

0 commit comments

Comments
 (0)