1st try of the backend
This commit is contained in:
13
backend/llm.py
Normal file
13
backend/llm.py
Normal file
@@ -0,0 +1,13 @@
|
||||
import httpx
|
||||
|
||||
OLLAMA_URL = "http://localhost:11434"
|
||||
|
||||
async def ollama_chat(model: str, messages: list[dict]) -> str:
|
||||
async with httpx.AsyncClient(timeout=120) as client:
|
||||
r = await client.post(
|
||||
f"{OLLAMA_URL}/api/chat",
|
||||
json={"model": model, "messages": messages, "stream": False},
|
||||
)
|
||||
r.raise_for_status()
|
||||
data = r.json()
|
||||
return data["message"]["content"]
|
||||
Reference in New Issue
Block a user