1313"""
1414
1515import asyncio
16- import os
17- import shutil
1816import subprocess
1917import tempfile
2018import threading
2321from typing import Any , Optional
2422
2523import trustme
26- import yaml
2724from behave import given , then # pyright: ignore[reportAttributeAccessIssue]
2825from behave .runner import Context
2926
27+ from tests .e2e .utils .llama_config_utils import (
28+ backup_llama_config ,
29+ load_llama_config ,
30+ restore_llama_config_if_modified ,
31+ write_llama_config ,
32+ )
3033from tests .e2e .utils .utils import (
3134 is_prow_environment ,
3235 restart_container ,
3336 wait_for_lightspeed_stack_http_ready ,
3437)
3538
36- # Llama Stack config — mounted into the container from the host
37- _LLAMA_STACK_CONFIG = "run.yaml"
38- _LLAMA_STACK_CONFIG_BACKUP = "run.yaml.proxy-backup"
39-
4039
4140def _is_docker_mode () -> bool :
4241 """Check if services are running in Docker containers (local e2e)."""
@@ -126,18 +125,6 @@ def _get_proxy_host(is_docker: bool) -> str:
126125 return "172.17.0.1"
127126
128127
129- def _load_llama_config () -> dict [str , Any ]:
130- """Load the base Llama Stack run config."""
131- with open (_LLAMA_STACK_CONFIG , encoding = "utf-8" ) as f :
132- return yaml .safe_load (f )
133-
134-
135- def _write_config (config : dict [str , Any ], path : str ) -> None :
136- """Write a YAML config file."""
137- with open (path , "w" , encoding = "utf-8" ) as f :
138- yaml .dump (config , f , default_flow_style = False )
139-
140-
141128def _find_inference_provider (
142129 context : Context , config : dict [str , Any ]
143130) -> dict [str , Any ]:
@@ -175,12 +162,6 @@ def _find_inference_provider(
175162 )
176163
177164
178- def _backup_llama_config () -> None :
179- """Create a backup of the current run.yaml if not already backed up."""
180- if not os .path .exists (_LLAMA_STACK_CONFIG_BACKUP ):
181- shutil .copy (_LLAMA_STACK_CONFIG , _LLAMA_STACK_CONFIG_BACKUP )
182-
183-
184165# --- Background Steps ---
185166
186167
@@ -214,11 +195,8 @@ def restore_if_modified(context: Context) -> None:
214195 _stop_proxy (context , "tunnel_proxy" , "proxy_loop" )
215196 _stop_proxy (context , "interception_proxy" , "interception_proxy_loop" )
216197
217- if os .path .exists (_LLAMA_STACK_CONFIG_BACKUP ):
218- print (
219- f"Restoring original Llama Stack config from { _LLAMA_STACK_CONFIG_BACKUP } ..."
220- )
221- shutil .move (_LLAMA_STACK_CONFIG_BACKUP , _LLAMA_STACK_CONFIG )
198+ if restore_llama_config_if_modified ():
199+ print ("Restoring original Llama Stack config from backup..." )
222200
223201
224202# --- Service Restart Steps ---
@@ -264,10 +242,10 @@ def run_proxy() -> None:
264242@given ("Llama Stack is configured to route inference through the tunnel proxy" )
265243def configure_llama_tunnel_proxy (context : Context ) -> None :
266244 """Modify run.yaml with proxy config pointing to the tunnel proxy."""
267- _backup_llama_config ()
245+ backup_llama_config ()
268246 proxy = context .tunnel_proxy
269247 proxy_host = _get_proxy_host (context .is_docker_mode )
270- config = _load_llama_config ()
248+ config = load_llama_config ()
271249 provider = _find_inference_provider (context , config )
272250
273251 if "config" not in provider :
@@ -278,14 +256,14 @@ def configure_llama_tunnel_proxy(context: Context) -> None:
278256 }
279257 }
280258
281- _write_config (config , _LLAMA_STACK_CONFIG )
259+ write_llama_config (config )
282260
283261
284262@given ('Llama Stack is configured to route inference through proxy "{proxy_url}"' )
285263def configure_llama_unreachable_proxy (context : Context , proxy_url : str ) -> None :
286264 """Modify run.yaml with a proxy URL (may be unreachable)."""
287- _backup_llama_config ()
288- config = _load_llama_config ()
265+ backup_llama_config ()
266+ config = load_llama_config ()
289267 provider = _find_inference_provider (context , config )
290268
291269 if "config" not in provider :
@@ -296,7 +274,7 @@ def configure_llama_unreachable_proxy(context: Context, proxy_url: str) -> None:
296274 }
297275 }
298276
299- _write_config (config , _LLAMA_STACK_CONFIG )
277+ write_llama_config (config )
300278
301279
302280# --- Interception Proxy Steps ---
@@ -346,10 +324,10 @@ def run_proxy() -> None:
346324)
347325def configure_llama_interception_with_ca (context : Context ) -> None :
348326 """Modify run.yaml with interception proxy and CA cert config."""
349- _backup_llama_config ()
327+ backup_llama_config ()
350328 proxy = context .interception_proxy
351329 proxy_host = _get_proxy_host (context .is_docker_mode )
352- config = _load_llama_config ()
330+ config = load_llama_config ()
353331 provider = _find_inference_provider (context , config )
354332
355333 if "config" not in provider :
@@ -364,7 +342,7 @@ def configure_llama_interception_with_ca(context: Context) -> None:
364342 },
365343 }
366344
367- _write_config (config , _LLAMA_STACK_CONFIG )
345+ write_llama_config (config )
368346
369347
370348@given (
@@ -373,10 +351,10 @@ def configure_llama_interception_with_ca(context: Context) -> None:
373351)
374352def configure_llama_interception_no_ca (context : Context ) -> None :
375353 """Modify run.yaml with interception proxy but NO CA cert."""
376- _backup_llama_config ()
354+ backup_llama_config ()
377355 proxy = context .interception_proxy
378356 proxy_host = _get_proxy_host (context .is_docker_mode )
379- config = _load_llama_config ()
357+ config = load_llama_config ()
380358 provider = _find_inference_provider (context , config )
381359
382360 if "config" not in provider :
@@ -387,7 +365,7 @@ def configure_llama_interception_no_ca(context: Context) -> None:
387365 },
388366 }
389367
390- _write_config (config , _LLAMA_STACK_CONFIG )
368+ write_llama_config (config )
391369
392370
393371# --- TLS Steps ---
@@ -396,8 +374,8 @@ def configure_llama_interception_no_ca(context: Context) -> None:
396374@given ('Llama Stack is configured with minimum TLS version "{version}"' )
397375def configure_llama_tls_version (context : Context , version : str ) -> None :
398376 """Modify run.yaml with TLS version config."""
399- _backup_llama_config ()
400- config = _load_llama_config ()
377+ backup_llama_config ()
378+ config = load_llama_config ()
401379 provider = _find_inference_provider (context , config )
402380
403381 if "config" not in provider :
@@ -408,14 +386,14 @@ def configure_llama_tls_version(context: Context, version: str) -> None:
408386 }
409387 }
410388
411- _write_config (config , _LLAMA_STACK_CONFIG )
389+ write_llama_config (config )
412390
413391
414392@given ('Llama Stack is configured with ciphers "{ciphers}"' )
415393def configure_llama_ciphers (context : Context , ciphers : str ) -> None :
416394 """Modify run.yaml with cipher suite config."""
417- _backup_llama_config ()
418- config = _load_llama_config ()
395+ backup_llama_config ()
396+ config = load_llama_config ()
419397 provider = _find_inference_provider (context , config )
420398
421399 if "config" not in provider :
@@ -426,7 +404,7 @@ def configure_llama_ciphers(context: Context, ciphers: str) -> None:
426404 }
427405 }
428406
429- _write_config (config , _LLAMA_STACK_CONFIG )
407+ write_llama_config (config )
430408
431409
432410# --- Proxy Verification Steps ---
0 commit comments