Skip to content

Commit 592fd6f

Browse files
author
Mateusz
committed
fix: suppress per-output compression INFO log spam with configurable log level
Root cause: _log_output_evaluation emitted INFO for every applied=True outcome, flooding logs during long sessions with many tool outputs. - Add per_output_evaluation_log_level config field (default: debug) - Wire through CLI arg, env var, schema, and applicator - failed_open outcomes always emit at INFO (safety signal preserved) - Add 3 tests: debug default, info override, off suppression - Fix all pyright warnings in touched files via typed casts
1 parent cd7548c commit 592fd6f

10 files changed

Lines changed: 245 additions & 33 deletions

File tree

config/schemas/app_config.schema.yaml

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -634,6 +634,27 @@ properties:
634634
capability_refresh_backoff_seconds:
635635
type: number
636636
minimum: 0
637+
request_processing_unification:
638+
type: object
639+
additionalProperties: false
640+
properties:
641+
enable_core_canonical_path: { type: boolean }
642+
enable_canonical_features: { type: boolean }
643+
connector_stream_first:
644+
type: object
645+
additionalProperties: { type: boolean }
646+
retire_legacy_dual_path: { type: boolean }
647+
emit_path_selection_metadata: { type: boolean }
648+
promotion_requirements:
649+
type: object
650+
additionalProperties: false
651+
properties:
652+
require_characterization_tests: { type: boolean }
653+
require_equivalence_tests: { type: boolean }
654+
max_non_stream_p95_latency_delta_pct: { type: number, minimum: 0 }
655+
max_stream_ttft_delta_pct: { type: number, minimum: 0 }
656+
max_memory_delta_pct: { type: number, minimum: 0 }
657+
require_cleanup_checks: { type: boolean }
637658
usage_tracking:
638659
type: object
639660
additionalProperties: false
@@ -732,6 +753,7 @@ properties:
732753
level: { type: string, enum: [conservative, balanced, aggressive] }
733754
max_level: { type: string, enum: [conservative, balanced, aggressive] }
734755
min_bytes: { type: integer, minimum: 0 }
756+
per_output_evaluation_log_level: { type: string, enum: [off, debug, info] }
735757
time_budget_ms_per_output: { type: integer, minimum: 1 }
736758
explicit_format_flags:
737759
type: array

src/core/cli_support/applicators/dynamic_compression_applicator.py

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -65,6 +65,20 @@ def apply(
6565
origin="--dynamic-compression-min-bytes",
6666
)
6767

68+
if (
69+
getattr(args, "dynamic_compression_per_output_evaluation_log_level", None)
70+
is not None
71+
):
72+
dynamic_overrides["per_output_evaluation_log_level"] = (
73+
args.dynamic_compression_per_output_evaluation_log_level
74+
)
75+
resolution.record(
76+
"dynamic_compression.per_output_evaluation_log_level",
77+
args.dynamic_compression_per_output_evaluation_log_level,
78+
ParameterSource.CLI,
79+
origin="--dynamic-compression-per-output-evaluation-log-level",
80+
)
81+
6882
if (
6983
getattr(args, "dynamic_compression_file_detail_include_line_numbers", None)
7084
is not None

src/core/cli_support/argument_parser_builder.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -761,6 +761,12 @@ def _add_dynamic_compression_arguments(
761761
metavar="BYTES",
762762
help="Minimum output size in bytes required for compression eligibility",
763763
)
764+
dynamic_group.add_argument(
765+
"--dynamic-compression-per-output-evaluation-log-level",
766+
dest="dynamic_compression_per_output_evaluation_log_level",
767+
choices=["off", "debug", "info"],
768+
help="Per-output compression evaluation log level",
769+
)
764770
file_detail_line_numbers_toggle = dynamic_group.add_mutually_exclusive_group()
765771
file_detail_line_numbers_toggle.add_argument(
766772
"--dynamic-compression-file-detail-include-line-numbers",

src/core/config/env/from_env_part2.py

Lines changed: 11 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
import json
44
import logging
55
from collections.abc import Mapping
6-
from typing import Any
6+
from typing import Any, cast
77

88
from src.core.config.env.util import (
99
env_to_bool as _env_to_bool,
@@ -172,11 +172,9 @@ def apply_config_part2(
172172
alias_data = json.loads(model_aliases_env)
173173
if isinstance(alias_data, list):
174174
config["model_aliases"] = [
175-
{"pattern": item["pattern"], "replacement": item["replacement"]}
176-
for item in alias_data
177-
if isinstance(item, dict)
178-
and "pattern" in item
179-
and "replacement" in item
175+
{"pattern": entry["pattern"], "replacement": entry["replacement"]}
176+
for entry in cast("list[dict[str, Any]]", alias_data)
177+
if "pattern" in entry and "replacement" in entry
180178
]
181179
if resolution is not None:
182180
resolution.record(
@@ -341,6 +339,13 @@ def apply_config_part2(
341339
path="dynamic_compression.min_bytes",
342340
resolution=resolution,
343341
),
342+
"per_output_evaluation_log_level": _get_env_value(
343+
env,
344+
"DYNAMIC_COMPRESSION_PER_OUTPUT_EVALUATION_LOG_LEVEL",
345+
"debug",
346+
path="dynamic_compression.per_output_evaluation_log_level",
347+
resolution=resolution,
348+
),
344349
"file_detail_include_line_numbers": _env_to_bool(
345350
"DYNAMIC_COMPRESSION_FILE_DETAIL_INCLUDE_LINE_NUMBERS",
346351
False,

src/core/domain/configuration/dynamic_compression_config.py

Lines changed: 18 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
from __future__ import annotations
44

55
from enum import Enum
6-
from typing import Literal
6+
from typing import Literal, cast
77

88
from pydantic import ConfigDict, Field, field_validator
99

@@ -89,7 +89,10 @@ def _normalize_content_types(cls, value: object) -> list[str] | None:
8989
return [stripped] if stripped else None
9090
if not isinstance(value, list):
9191
return None
92-
out = [str(item).strip().lower() for item in value if str(item).strip()]
92+
out: list[str] = []
93+
for it in cast("list[object]", value):
94+
if isinstance(it, str) and it.strip():
95+
out.append(it.strip().lower())
9396
return out or None
9497

9598

@@ -111,9 +114,9 @@ def _normalize_pipeline(cls, value: object) -> list[str]:
111114
if not isinstance(value, list):
112115
return []
113116
normalized: list[str] = []
114-
for item in value:
115-
if isinstance(item, str):
116-
stripped = item.strip()
117+
for candidate in cast("list[object]", value):
118+
if isinstance(candidate, str):
119+
stripped = candidate.strip()
117120
if stripped:
118121
normalized.append(stripped)
119122
return normalized
@@ -1046,6 +1049,7 @@ class DynamicCompressionConfig(ValueObject):
10461049
]
10471050
)
10481051
telemetry_include_content_hashes: bool = True
1052+
per_output_evaluation_log_level: Literal["off", "debug", "info"] = "debug"
10491053
marker: CompressionMarkerConfig = Field(default_factory=CompressionMarkerConfig)
10501054
alerts: CompressionAlertsConfig = Field(default_factory=CompressionAlertsConfig)
10511055
recovery: CompressionRecoveryConfig = Field(
@@ -1062,9 +1066,7 @@ class DynamicCompressionConfig(ValueObject):
10621066
"other": True,
10631067
}
10641068
)
1065-
methods: dict[str, bool] = Field(
1066-
default_factory=_default_method_states
1067-
)
1069+
methods: dict[str, bool] = Field(default_factory=_default_method_states)
10681070
pytest_failure_focus_min_lines: int | None = Field(default=None, ge=0)
10691071
noise_directories: list[str] = Field(
10701072
default_factory=lambda: [
@@ -1137,7 +1139,10 @@ def _normalize_lists(cls, value: object) -> list[str]:
11371139
)
11381140
if not isinstance(value, list):
11391141
return []
1140-
normalized = [str(item).strip() for item in value if str(item).strip()]
1142+
normalized: list[str] = []
1143+
for it in cast("list[object]", value):
1144+
if isinstance(it, str) and it.strip():
1145+
normalized.append(it.strip())
11411146
return _dedupe_preserve_order(normalized)
11421147

11431148
@field_validator("pytest_failure_focus_min_lines", mode="before")
@@ -1161,13 +1166,13 @@ def _normalize_declarative_rules(cls, value: object) -> list[dict[str, object]]:
11611166
if value is None:
11621167
return []
11631168
if isinstance(value, dict):
1164-
return [value]
1169+
return [cast("dict[str, object]", value)]
11651170
if not isinstance(value, list):
11661171
return []
11671172
normalized: list[dict[str, object]] = []
1168-
for item in value:
1169-
if isinstance(item, dict):
1170-
normalized.append(item)
1173+
for candidate in cast("list[object]", value):
1174+
if isinstance(candidate, dict):
1175+
normalized.append(cast("dict[str, object]", candidate))
11711176
return normalized
11721177

11731178
def is_category_enabled(self, category: str) -> bool:

src/core/services/tool_output_compression_service.py

Lines changed: 34 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@
2424
ToolIdentity,
2525
ToolOutputCompressionBatchResult,
2626
ToolOutputCompressionRecord,
27+
ToolOutputContext,
2728
)
2829
from src.core.interfaces.compression_strategy_registry_interface import (
2930
CompressionStrategy,
@@ -138,6 +139,7 @@ async def compress_messages(
138139
updated_messages: list[ChatMessage] = []
139140
records: list[ToolOutputCompressionRecord] = []
140141
batch_alerts: list[CompressionAlertRecord] = []
142+
per_output_log_level = effective_config.per_output_evaluation_log_level
141143
tool_lookup = self._identity_resolver.build_tool_call_lookup(messages)
142144

143145
for message_index, message in enumerate(messages):
@@ -188,6 +190,7 @@ async def compress_messages(
188190
enabled_pipeline=[],
189191
decision_reason="already_processed_output",
190192
output_started_at=output_started_at,
193+
per_output_evaluation_log_level=per_output_log_level,
191194
)
192195
batch_alerts.extend(
193196
self._record_metrics_and_alerts(
@@ -239,6 +242,7 @@ async def compress_messages(
239242
enabled_pipeline=enabled_pipeline,
240243
decision_reason="compression_disabled",
241244
output_started_at=output_started_at,
245+
per_output_evaluation_log_level=per_output_log_level,
242246
)
243247
batch_alerts.extend(
244248
self._record_metrics_and_alerts(
@@ -261,6 +265,7 @@ async def compress_messages(
261265
enabled_pipeline=enabled_pipeline,
262266
decision_reason="below_min_bytes",
263267
output_started_at=output_started_at,
268+
per_output_evaluation_log_level=per_output_log_level,
264269
)
265270
batch_alerts.extend(
266271
self._record_metrics_and_alerts(
@@ -283,6 +288,7 @@ async def compress_messages(
283288
enabled_pipeline=enabled_pipeline,
284289
decision_reason="category_disabled",
285290
output_started_at=output_started_at,
291+
per_output_evaluation_log_level=per_output_log_level,
286292
)
287293
batch_alerts.extend(
288294
self._record_metrics_and_alerts(
@@ -305,6 +311,7 @@ async def compress_messages(
305311
enabled_pipeline=enabled_pipeline,
306312
decision_reason="tool_disabled",
307313
output_started_at=output_started_at,
314+
per_output_evaluation_log_level=per_output_log_level,
308315
)
309316
batch_alerts.extend(
310317
self._record_metrics_and_alerts(
@@ -330,6 +337,7 @@ async def compress_messages(
330337
enabled_pipeline=enabled_pipeline,
331338
decision_reason="command_prefix_disabled",
332339
output_started_at=output_started_at,
340+
per_output_evaluation_log_level=per_output_log_level,
333341
)
334342
batch_alerts.extend(
335343
self._record_metrics_and_alerts(
@@ -381,6 +389,7 @@ async def compress_messages(
381389
enabled_pipeline=enabled_pipeline,
382390
decision_reason="no_matching_rule",
383391
output_started_at=output_started_at,
392+
per_output_evaluation_log_level=per_output_log_level,
384393
)
385394
batch_alerts.extend(
386395
self._record_metrics_and_alerts(
@@ -425,6 +434,7 @@ async def compress_messages(
425434
enabled_pipeline=enabled_pipeline,
426435
decision_reason="no_enabled_pipeline_methods",
427436
output_started_at=output_started_at,
437+
per_output_evaluation_log_level=per_output_log_level,
428438
)
429439
batch_alerts.extend(
430440
self._record_metrics_and_alerts(
@@ -559,6 +569,7 @@ async def compress_messages(
559569
"failed_open" if record.failed_open else "not_applied"
560570
),
561571
output_started_at=output_started_at,
572+
per_output_evaluation_log_level=per_output_log_level,
562573
)
563574
batch_alerts.extend(
564575
self._record_metrics_and_alerts(
@@ -584,6 +595,7 @@ async def compress_messages(
584595
"applied_failed_open" if record.failed_open else "applied"
585596
),
586597
output_started_at=output_started_at,
598+
per_output_evaluation_log_level=per_output_log_level,
587599
)
588600
batch_alerts.extend(
589601
self._record_metrics_and_alerts(
@@ -903,7 +915,7 @@ async def _run_pipeline_with_escalation(
903915
self,
904916
*,
905917
original_content: str,
906-
context,
918+
context: ToolOutputContext,
907919
pipeline: list[str],
908920
level: CompressionLevel,
909921
max_level: CompressionLevel,
@@ -1035,7 +1047,7 @@ async def _run_single_level_pipeline(
10351047
self,
10361048
*,
10371049
content: str,
1038-
context,
1050+
context: ToolOutputContext,
10391051
pipeline: list[str],
10401052
level: CompressionLevel,
10411053
started_at: float,
@@ -1498,6 +1510,7 @@ def _log_output_evaluation(
14981510
enabled_pipeline: list[str],
14991511
decision_reason: str,
15001512
output_started_at: float,
1513+
per_output_evaluation_log_level: str,
15011514
) -> None:
15021515
record.explicit_format_note = self._explicit_format_diagnostic_note(
15031516
record=record,
@@ -1511,12 +1524,26 @@ def _log_output_evaluation(
15111524
and decision_reason in _NOISY_NOOP_DECISION_REASONS
15121525
):
15131526
return
1514-
should_emit_info = record.applied or record.failed_open
1515-
if should_emit_info:
1516-
if not is_log_level_enabled(logger, logging.INFO):
1527+
should_emit_detail = record.applied or record.failed_open
1528+
if should_emit_detail:
1529+
if record.failed_open:
1530+
target_level = "info"
1531+
else:
1532+
target_level = per_output_evaluation_log_level
1533+
if target_level not in {"off", "debug", "info"}:
1534+
target_level = "debug"
1535+
if target_level == "off":
15171536
return
1518-
log_level = "info"
1519-
log_fn = logger.info
1537+
if target_level == "info":
1538+
if not is_log_level_enabled(logger, logging.INFO):
1539+
return
1540+
log_level = "info"
1541+
log_fn = logger.info
1542+
else:
1543+
if not is_log_level_enabled(logger, logging.DEBUG):
1544+
return
1545+
log_level = "debug"
1546+
log_fn = logger.debug
15201547
else:
15211548
if not is_log_level_enabled(logger, logging.DEBUG):
15221549
return

tests/unit/core/cli_support/applicators/test_dynamic_compression_applicator.py

Lines changed: 12 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
from __future__ import annotations
44

55
import argparse
6+
from typing import cast
67

78
from src.core.config.parameter_resolution import ParameterResolution, ParameterSource
89

@@ -13,6 +14,7 @@ def _empty_args() -> argparse.Namespace:
1314
dynamic_compression_level=None,
1415
dynamic_compression_max_level=None,
1516
dynamic_compression_min_bytes=None,
17+
dynamic_compression_per_output_evaluation_log_level=None,
1618
dynamic_compression_file_detail_include_line_numbers=None,
1719
dynamic_compression_disable_categories=None,
1820
dynamic_compression_disable_methods=None,
@@ -31,6 +33,7 @@ def test_dynamic_compression_applicator_sets_overrides_and_resolution() -> None:
3133
args.dynamic_compression_level = "balanced"
3234
args.dynamic_compression_max_level = "aggressive"
3335
args.dynamic_compression_min_bytes = 2048
36+
args.dynamic_compression_per_output_evaluation_log_level = "off"
3437
args.dynamic_compression_file_detail_include_line_numbers = True
3538
args.dynamic_compression_disable_methods = "ansi_normalize,line_dedupe"
3639

@@ -41,18 +44,21 @@ def test_dynamic_compression_applicator_sets_overrides_and_resolution() -> None:
4144

4245
dc = overrides.get("dynamic_compression")
4346
assert isinstance(dc, dict)
44-
assert dc.get("enabled") is True
45-
assert dc.get("level") == "balanced"
46-
assert dc.get("max_level") == "aggressive"
47-
assert dc.get("min_bytes") == 2048
48-
assert dc.get("file_detail_include_line_numbers") is True
49-
assert dc.get("disable_methods") == ["ansi_normalize", "line_dedupe"]
47+
dc_typed = cast("dict[str, object]", dc)
48+
assert dc_typed.get("enabled") is True
49+
assert dc_typed.get("level") == "balanced"
50+
assert dc_typed.get("max_level") == "aggressive"
51+
assert dc_typed.get("min_bytes") == 2048
52+
assert dc_typed.get("per_output_evaluation_log_level") == "off"
53+
assert dc_typed.get("file_detail_include_line_numbers") is True
54+
assert dc_typed.get("disable_methods") == ["ansi_normalize", "line_dedupe"]
5055

5156
cli_paths = set(resolution.latest_by_source(ParameterSource.CLI))
5257
assert "dynamic_compression.enabled" in cli_paths
5358
assert "dynamic_compression.level" in cli_paths
5459
assert "dynamic_compression.max_level" in cli_paths
5560
assert "dynamic_compression.min_bytes" in cli_paths
61+
assert "dynamic_compression.per_output_evaluation_log_level" in cli_paths
5662
assert "dynamic_compression.file_detail_include_line_numbers" in cli_paths
5763
assert "dynamic_compression.disable_methods" in cli_paths
5864

0 commit comments

Comments
 (0)