@@ -343,7 +343,7 @@ def _restore_llama_stack(context: Context) -> None:
343343def before_feature (context : Context , feature : Feature ) -> None :
344344 """Run before each feature file is exercised.
345345
346- Per-feature setup that is not expressed in Gherkin (e.g. feedback cleanup state) .
346+ Per-feature setup that is not expressed in Gherkin.
347347 Lightspeed YAML is applied in feature Backgrounds via ``configure_service``.
348348
349349 Records monotonic start time on ``feature`` for duration logging in
@@ -369,27 +369,28 @@ def before_feature(context: Context, feature: Feature) -> None:
369369 if _E2E_FLAKY_TAG in scenario .effective_tags :
370370 patch_scenario_with_autoretry (scenario , max_attempts = max_flaky )
371371
372- if "Feedback" in feature . tags :
373- context . hostname = os . getenv ( "E2E_LSC_HOSTNAME " , "localhost" )
374- context . port = os . getenv ( "E2E_LSC_PORT" , "8080" )
375- context . feedback_conversations = []
372+ # Do not inherit feedback teardown state from a previous feature file.
373+ for _attr in ( "feedback_e2e_conversation_cleanup " , "feedback_conversations" ):
374+ if hasattr ( context , _attr ):
375+ delattr ( context , _attr )
376376
377377
378378def after_feature (context : Context , feature : Feature ) -> None :
379379 """Run after each feature file is exercised.
380380
381- Perform feature-level teardown: restore any modified configuration and
382- clean up feedback conversations.
381+ Perform feature-level teardown: restore any modified configuration and,
382+ when ``context.feedback_e2e_conversation_cleanup`` is set by feedback steps,
383+ delete tracked feedback test conversations.
383384 """
384385 # Restore Llama Stack FIRST (before any lightspeed-stack restart)
385386 llama_was_running = getattr (context , "llama_stack_was_running" , False )
386387 if llama_was_running :
387388 _restore_llama_stack (context )
388389 context .llama_stack_was_running = False
389390
390- if "Feedback" in feature . tags :
391+ if getattr ( context , "feedback_e2e_conversation_cleanup" , False ) :
391392 token = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6Ikpva"
392- for conversation_id in context . feedback_conversations :
393+ for conversation_id in getattr ( context , " feedback_conversations" , []) :
393394 url = f"http://{ context .hostname } :{ context .port } /v1/conversations/{ conversation_id } "
394395 headers = {"Authorization" : f"Bearer { token } " }
395396 response = requests .delete (url , headers = headers , timeout = 10 )
0 commit comments