1st try of the backend
This commit is contained in:
71
backend/app.py
Normal file
71
backend/app.py
Normal file
@@ -0,0 +1,71 @@
|
||||
from fastapi import FastAPI
|
||||
from pydantic import BaseModel
|
||||
import json
|
||||
|
||||
from llm import ollama_chat
|
||||
from tools import web_search, electronics_ohm, run_command
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
class ChatIn(BaseModel):
|
||||
message: str
|
||||
model: str = "llama3.1" # change si besoin
|
||||
mode: str = "AUTO" # AUTO / DEV / ELEC / INFRA / WEB
|
||||
|
||||
SYSTEM = """Tu es un assistant personnel pour Nino.
|
||||
Tu dois être pratique, structuré, et orienté action.
|
||||
Si une demande nécessite une recherche web, utilise l'outil web_search.
|
||||
Si c'est de l'électronique, tu peux utiliser electronics_ohm ou demander les valeurs manquantes.
|
||||
Si une commande système est utile, propose run_command mais explique ce que ça fait.
|
||||
Réponds en français.
|
||||
"""
|
||||
|
||||
TOOLS_SPEC = """
|
||||
Outils disponibles (à appeler en JSON strict sur une seule ligne) :
|
||||
|
||||
1) web_search: {"tool":"web_search","query":"...","max_results":5}
|
||||
2) electronics_ohm: {"tool":"electronics_ohm","V":null,"I":0.02,"R":220}
|
||||
3) run_command: {"tool":"run_command","cmd":"docker ps"}
|
||||
|
||||
Si tu appelles un outil, n'écris QUE le JSON.
|
||||
"""
|
||||
|
||||
@app.post("/chat")
|
||||
async def chat(inp: ChatIn):
|
||||
messages = [
|
||||
{"role": "system", "content": SYSTEM},
|
||||
{"role": "system", "content": TOOLS_SPEC},
|
||||
{"role": "user", "content": inp.message},
|
||||
]
|
||||
|
||||
# 1) le modèle choisit soit de répondre, soit d'appeler un outil
|
||||
first = (await ollama_chat(inp.model, messages)).strip()
|
||||
|
||||
# 2) si JSON tool-call
|
||||
if first.startswith("{") and '"tool"' in first:
|
||||
call = json.loads(first)
|
||||
tool = call["tool"]
|
||||
|
||||
if tool == "web_search":
|
||||
res = web_search(call["query"], call.get("max_results", 5))
|
||||
messages.append({"role": "assistant", "content": first})
|
||||
messages.append({"role": "tool", "content": json.dumps(res, ensure_ascii=False)})
|
||||
final = await ollama_chat(inp.model, messages)
|
||||
return {"answer": final, "tool_used": "web_search", "tool_result": res}
|
||||
|
||||
if tool == "electronics_ohm":
|
||||
res = electronics_ohm(call.get("V"), call.get("I"), call.get("R"))
|
||||
messages.append({"role": "assistant", "content": first})
|
||||
messages.append({"role": "tool", "content": json.dumps(res, ensure_ascii=False)})
|
||||
final = await ollama_chat(inp.model, messages)
|
||||
return {"answer": final, "tool_used": "electronics_ohm", "tool_result": res}
|
||||
|
||||
if tool == "run_command":
|
||||
res = run_command(call["cmd"])
|
||||
messages.append({"role": "assistant", "content": first})
|
||||
messages.append({"role": "tool", "content": json.dumps(res, ensure_ascii=False)})
|
||||
final = await ollama_chat(inp.model, messages)
|
||||
return {"answer": final, "tool_used": "run_command", "tool_result": res}
|
||||
|
||||
# sinon réponse directe
|
||||
return {"answer": first, "tool_used": None}
|
||||
Reference in New Issue
Block a user