import httpx OLLAMA_URL = "http://localhost:11434" async def ollama_chat(model: str, messages: list[dict]) -> str: async with httpx.AsyncClient(timeout=120) as client: r = await client.post( f"{OLLAMA_URL}/api/chat", json={"model": model, "messages": messages, "stream": False}, ) r.raise_for_status() data = r.json() return data["message"]["content"]