main.py aktualisiert

This commit is contained in:
2026-03-04 16:13:37 +00:00
parent 53e6c4b4a2
commit d9ab24d37a

71
main.py
View File

@@ -4,6 +4,9 @@ import fcntl
import subprocess import subprocess
import sqlite3 import sqlite3
import asyncio import asyncio
import openai
import google.generativeai as genai
import json
from fastapi import FastAPI, WebSocket, BackgroundTasks, Request, Form, WebSocketDisconnect from fastapi import FastAPI, WebSocket, BackgroundTasks, Request, Form, WebSocketDisconnect
from fastapi.responses import RedirectResponse from fastapi.responses import RedirectResponse
from fastapi.templating import Jinja2Templates from fastapi.templating import Jinja2Templates
@@ -17,6 +20,46 @@ templates = Jinja2Templates(directory="templates")
SSH_KEY = os.path.expanduser("~/.ssh/id_rsa") SSH_KEY = os.path.expanduser("~/.ssh/id_rsa")
DB_PATH = "cluster.db" DB_PATH = "cluster.db"
# --- KI KONFIGURATION ---
AI_PROVIDER = "ollama" # "openai", "google" oder "ollama"
OPENAI_API_KEY = "dein-key"
GOOGLE_API_KEY = "dein-key"
OLLAMA_BASE_URL = "http://x.x.x.x:11434/v1" # IP deines Ollama-Servers
# System Prompt: Sagt der KI, wer sie ist und was sie kann
SYSTEM_PROMPT = """Du bist der Pi-Orchestrator KI-Assistent.
Deine Aufgabe ist es, dem Nutzer zu helfen, Raspberry Pis zu verwalten und Docker-Container zu steuern.
Du kannst Linux-Befehle generieren. Antworte präzise und hilfsbereit."""
# --- KI FUNKTIONEN ---
async def get_ai_response(user_input):
if AI_PROVIDER == "openai":
client = openai.OpenAI(api_key=OPENAI_API_KEY)
response = client.chat.completions.create(
model="gpt-4o",
messages=[{"role": "system", "content": SYSTEM_PROMPT}, {"role": "user", "content": user_input}]
)
return response.choices[0].message.content
elif AI_PROVIDER == "ollama":
# Ollama nutzt das OpenAI-Format, braucht aber keinen Key
client = openai.OpenAI(base_url=OLLAMA_BASE_URL, api_key="ollama")
response = client.chat.completions.create(
model="llama3", # Oder dein bevorzugtes Modell
messages=[{"role": "system", "content": SYSTEM_PROMPT}, {"role": "user", "content": user_input}]
)
return response.choices[0].message.content
elif AI_PROVIDER == "google":
genai.configure(api_key=GOOGLE_API_KEY)
model = genai.GenerativeModel('gemini-1.5-flash')
# Gemini braucht einen etwas anderen Aufbau für System Prompts
response = model.generate_content(f"{SYSTEM_PROMPT}\n\nNutzer: {user_input}")
return response.text
return "Fehler: Kein KI-Provider konfiguriert."
# --- DATENBANK INITIALISIERUNG --- # --- DATENBANK INITIALISIERUNG ---
def init_db(): def init_db():
conn = sqlite3.connect(DB_PATH) conn = sqlite3.connect(DB_PATH)
@@ -161,27 +204,19 @@ async def terminal_websocket(websocket: WebSocket, ip: str):
os.close(master_fd) os.close(master_fd)
os.close(slave_fd) os.close(slave_fd)
# --- WEBSOCKET CHAT UPDATE ---
@app.websocket("/ws/chat") @app.websocket("/ws/chat")
async def chat_endpoint(websocket: WebSocket): async def websocket_chat(websocket: WebSocket):
await websocket.accept() await websocket.accept()
try:
while True: while True:
user_msg = await websocket.receive_text() data = await websocket.receive_text()
user_msg_lower = user_msg.lower() # Hole Antwort von der gewählten KI
ai_msg = await get_ai_response(data)
if "installiere docker" in user_msg_lower: await websocket.send_text(ai_msg)
conn = get_db() except Exception as e:
nodes = conn.execute('SELECT * FROM nodes').fetchall() print(f"Chat Error: {e}")
conn.close()
target = next((n for n in nodes if n['name'].lower() in user_msg_lower or n['ip'] in user_msg_lower), None)
if target:
await websocket.send_text(f"🤖 OK. Starte Docker-Installation auf {target['name']}...")
cmd = "curl -sSL https://get.docker.com | sh && sudo usermod -aG docker " + target['user']
asyncio.create_task(run_remote_task(target['ip'], target['user'], cmd, "Docker Installation"))
else:
await websocket.send_text("🤖 Node nicht gefunden. Welchen meinst du?")
else:
await websocket.send_text(f"🤖 Empfangen: {user_msg}. Soll ich etwas installieren?")
# --- Status in DB aktualisieren Helper --- # --- Status in DB aktualisieren Helper ---
def update_node_status(ip, new_status): def update_node_status(ip, new_status):