import importlib import os import unittest import httpx class TestLlmStatusEndpoint(unittest.IsolatedAsyncioTestCase): async def asyncSetUp(self): self._old_env = dict(os.environ) os.environ["OPENAI_COMPAT_BASE_URL"] = "http://localhost:11434" os.environ.pop("USE_LOCAL_OLLAMA", None) # Clear this so OPENAI_COMPAT_BASE_URL is used import backend.config as config import backend.main as main importlib.reload(config) # Reload config to pick up env changes self.main = importlib.reload(main) self.client = httpx.AsyncClient( transport=httpx.ASGITransport(app=self.main.app), base_url="http://test", ) async def asyncTearDown(self): await self.client.aclose() os.environ.clear() os.environ.update(self._old_env) async def test_status_without_probe(self): resp = await self.client.get("/api/llm/status") self.assertEqual(resp.status_code, 200) data = resp.json() self.assertEqual(data["provider"], "openai_compat") self.assertEqual(data["base_url"], "http://localhost:11434")