Skip to content

Commit 82612ee

Browse files
author
Mateusz
committed
fix(config): align codebase with immutable BackendSettings
Freeze BackendSettings and assemble named BackendConfig entries via the model validator. Add AppConfig.mutate_backends for safe updates. Update providers, semantic validation typing, tests, and CLI docs (including per-output dynamic compression log level). Adjust Codex and hybrid tests for hyphenated backend keys and model_validate construction. Made-with: Cursor
1 parent 660ddd5 commit 82612ee

29 files changed

Lines changed: 717 additions & 431 deletions

docs/user_guide/cli-parameters.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -239,6 +239,7 @@ See the [Dynamic Tool Output Compression Guide](features/dynamic-tool-output-com
239239
| `--dynamic-compression-disable-methods CSV` | N/A | Comma-separated compression methods to disable. |
240240
| `--dynamic-compression-disable-tools CSV` | N/A | Comma-separated tool names to bypass. |
241241
| `--dynamic-compression-disable-command-prefixes CSV` | N/A | Comma-separated command prefixes to bypass. |
242+
| `--dynamic-compression-per-output-evaluation-log-level LEVEL` | N/A | Per-output compression evaluation log level (`off`, `debug`, `info`). |
242243

243244
---
244245

src/connectors/openai.py

Lines changed: 13 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -323,6 +323,12 @@ def _raise_upstream_http_error(
323323
details=merged,
324324
)
325325

326+
if status_code == 401 or status_code == 403:
327+
raise AuthenticationError(
328+
message=message,
329+
details=merged,
330+
)
331+
326332
if 400 <= status_code < 500:
327333
raise InvalidRequestError(
328334
message=message,
@@ -2107,9 +2113,13 @@ async def responses(
21072113
if isinstance(use_websocket_raw, bool)
21082114
else self._use_websocket
21092115
)
2110-
connector_context = kwargs.get("context")
2111-
if not isinstance(connector_context, ConnectorRequestContext):
2112-
connector_context = None
2116+
# Start from request.context (canonical path), then let options override
2117+
connector_context: ConnectorRequestContext | None = None
2118+
if isinstance(request.context, ConnectorRequestContext):
2119+
connector_context = request.context
2120+
options_context = kwargs.get("context")
2121+
if isinstance(options_context, ConnectorRequestContext):
2122+
connector_context = options_context
21132123
if use_websocket:
21142124
return await self._handle_websocket_response(
21152125
payload,

src/connectors/openai_codex/settings.py

Lines changed: 16 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -105,6 +105,8 @@ def load(self, app_config: AppConfig) -> CodexConnectorSettings: # noqa: C901
105105
}
106106

107107
backend_config = getattr(app_config.backends, "openai_codex", None)
108+
if backend_config is None and hasattr(app_config.backends, "lookup"):
109+
backend_config = app_config.backends.lookup("openai-codex")
108110
backend_extra: dict[str, Any] = {}
109111
if backend_config and hasattr(backend_config, "extra"):
110112
try:
@@ -434,9 +436,15 @@ def load(self, app_config: AppConfig) -> CodexConnectorSettings: # noqa: C901
434436
if isinstance(parsed, list):
435437
accounts = to_string_list(parsed)
436438
else:
437-
accounts = [part.strip() for part in normalized.split(",") if part.strip()]
439+
accounts = [
440+
part.strip()
441+
for part in normalized.split(",")
442+
if part.strip()
443+
]
438444
else:
439-
accounts = [part.strip() for part in normalized.split(",") if part.strip()]
445+
accounts = [
446+
part.strip() for part in normalized.split(",") if part.strip()
447+
]
440448
elif isinstance(raw_accounts_source, list):
441449
accounts = to_string_list(raw_accounts_source)
442450
elif raw_accounts_source == "all":
@@ -459,7 +467,9 @@ def load(self, app_config: AppConfig) -> CodexConnectorSettings: # noqa: C901
459467
os.getenv("OPENAI_CODEX_MANAGED_OAUTH_REFRESH_BUFFER_SECONDS")
460468
)
461469
if refresh_buffer is None:
462-
refresh_buffer = coerce_positive_int(managed_cfg.get("refresh_buffer_seconds"))
470+
refresh_buffer = coerce_positive_int(
471+
managed_cfg.get("refresh_buffer_seconds")
472+
)
463473
if refresh_buffer is None:
464474
refresh_buffer = settings["managed_oauth"]["refresh_buffer_seconds"]
465475

@@ -484,7 +494,9 @@ def load(self, app_config: AppConfig) -> CodexConnectorSettings: # noqa: C901
484494
affinity_max = settings["managed_oauth"]["session_affinity_max_entries"]
485495

486496
allow_legacy_fallback = managed_cfg.get("allow_legacy_fallback")
487-
env_allow_fallback = os.getenv("OPENAI_CODEX_MANAGED_OAUTH_ALLOW_LEGACY_FALLBACK")
497+
env_allow_fallback = os.getenv(
498+
"OPENAI_CODEX_MANAGED_OAUTH_ALLOW_LEGACY_FALLBACK"
499+
)
488500
if env_allow_fallback is not None:
489501
allow_legacy_fallback = env_allow_fallback.strip().lower() in truthy
490502
elif allow_legacy_fallback is None:

src/core/app/controllers/responses_controller.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,9 @@
1616
from fastapi.responses import StreamingResponse
1717
from pydantic import ValidationError
1818

19-
from src.core.app.controllers.responses_stream_coercion import coerce_stream_chunk_payload
19+
from src.core.app.controllers.responses_stream_coercion import (
20+
coerce_stream_chunk_payload,
21+
)
2022
from src.core.common.exceptions import (
2123
InitializationError,
2224
LLMProxyError,

src/core/app/test_builder.py

Lines changed: 8 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -173,9 +173,7 @@ async def build_test_app_async(config: AppConfig | None = None) -> FastAPI:
173173
and not getattr(config.auth, "disable_auth", False)
174174
and not (list(getattr(config.auth, "api_keys", []) or []))
175175
):
176-
new_auth = config.auth.model_copy(
177-
update={"api_keys": ["test-proxy-key"]}
178-
)
176+
new_auth = config.auth.model_copy(update={"api_keys": ["test-proxy-key"]})
179177
config = config.model_copy(update={"auth": new_auth})
180178
except (AttributeError, TypeError, ValidationError):
181179
logger.warning(
@@ -335,16 +333,16 @@ def create_test_config() -> AppConfig:
335333
default_backend = os.environ.get("LLM_BACKEND", "openai")
336334

337335
# Set up backend config based on the default backend
338-
backend_settings = BackendSettings(default_backend=default_backend)
339-
340-
# Always include openai as a fallback
341-
backend_settings.__dict__["openai"] = BackendConfig(api_key="test_key")
342-
343-
# Add the default backend if it's not openai
336+
extra_backends: dict[str, BackendConfig] = {
337+
"openai": BackendConfig(api_key="test_key"),
338+
}
344339
if default_backend != "openai":
345-
backend_settings.__dict__[default_backend] = BackendConfig(
340+
extra_backends[default_backend] = BackendConfig(
346341
api_key=f"test_key_{default_backend}"
347342
)
343+
backend_payload: dict[str, Any] = {"default_backend": default_backend}
344+
backend_payload.update(extra_backends)
345+
backend_settings = BackendSettings.model_validate(backend_payload)
348346

349347
# Get command prefix from environment if set
350348
command_prefix = os.environ.get("COMMAND_PREFIX", "!/")

src/core/cli.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@
1414
import os
1515
import sys
1616
from collections.abc import Callable
17-
from typing import Literal, overload
17+
from typing import Any, Literal, overload
1818

1919
from fastapi import FastAPI
2020

@@ -144,9 +144,7 @@ def apply_cli_args(
144144
return final_cfg
145145

146146

147-
def _emit_legacy_compression_deprecation_warnings(
148-
*, config: AppConfig
149-
) -> None:
147+
def _emit_legacy_compression_deprecation_warnings(*, config: AppConfig) -> None:
150148
deprecated_env_controls = {
151149
"GEMINI_TOOL_OUTPUT_TRUNCATE_CHARS": (
152150
"dynamic_compression.methods.compact_acknowledgement + "
@@ -181,7 +179,9 @@ def _emit_legacy_compression_deprecation_warnings(
181179
)
182180
configured_legacy_extras: list[str] = []
183181
backends = getattr(config, "backends", None)
184-
backend_items = getattr(backends, "__dict__", {})
182+
backend_items: dict[str, Any] = {}
183+
if backends is not None and hasattr(backends, "get_named_backend_configs"):
184+
backend_items = backends.get_named_backend_configs()
185185
if isinstance(backend_items, dict):
186186
for backend_name, backend_config in backend_items.items():
187187
if backend_name.startswith("_") or backend_name == "default_backend":

src/core/config/app_config.py

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -204,6 +204,24 @@ def set(self, key: str, value: Any) -> None:
204204
def get_gcp_project_id(self) -> str | None:
205205
return self.gcp_project_id
206206

207+
def mutate_backends(
208+
self,
209+
updates: Mapping[str, Any] | None = None,
210+
**kwargs: Any,
211+
) -> None:
212+
"""Merge *updates* into ``backends`` and assign the new aggregate.
213+
214+
``BackendSettings`` is immutable; tests and migration helpers must not
215+
assign attributes on ``config.backends`` directly. Pass a mapping for
216+
hyphenated backend keys (for example ``{"openai-codex": BackendConfig()}``)
217+
and/or keyword arguments for declared fields such as ``default_backend``.
218+
"""
219+
merged: dict[str, Any] = {}
220+
if updates is not None:
221+
merged.update(dict(updates))
222+
merged.update(kwargs)
223+
self.backends = self.backends.model_copy(update=merged)
224+
207225

208226
def load_config(
209227
config_path: str | Path | None = None,

0 commit comments

Comments
 (0)