1st try of the backend

This commit is contained in:
2026-01-19 22:02:50 +01:00
commit b90621f665
4 changed files with 125 additions and 0 deletions

13
backend/llm.py Normal file
View File

@@ -0,0 +1,13 @@
import httpx
OLLAMA_URL = "http://localhost:11434"
async def ollama_chat(model: str, messages: list[dict]) -> str:
async with httpx.AsyncClient(timeout=120) as client:
r = await client.post(
f"{OLLAMA_URL}/api/chat",
json={"model": model, "messages": messages, "stream": False},
)
r.raise_for_status()
data = r.json()
return data["message"]["content"]