pi_admin/main.py aktualisiert
This commit is contained in:
@@ -0,0 +1,72 @@
|
|||||||
|
import json
|
||||||
|
import os
|
||||||
|
from fastapi import FastAPI, Request, WebSocket
|
||||||
|
from fastapi.templating import Jinja2Templates
|
||||||
|
from fastapi.staticfiles import StaticFiles
|
||||||
|
import paramiko
|
||||||
|
from python_on_whales import DockerClient
|
||||||
|
|
||||||
|
app = FastAPI()
|
||||||
|
templates = Jinja2Templates(directory="templates")
|
||||||
|
|
||||||
|
# Speicher für Nodes (einfache JSON-Datei)
|
||||||
|
NODES_FILE = "nodes.json"
|
||||||
|
|
||||||
|
def load_nodes():
|
||||||
|
if os.path.exists(NODES_FILE):
|
||||||
|
with open(NODES_FILE, "r") as f: return json.load(f)
|
||||||
|
return {}
|
||||||
|
|
||||||
|
def save_nodes(nodes):
|
||||||
|
with open(NODES_FILE, "w") as f: json.dump(nodes, f)
|
||||||
|
|
||||||
|
@app.get("/")
|
||||||
|
async def index(request: Request):
|
||||||
|
return templates.TemplateResponse("index.html", {"request": request, "nodes": load_nodes()})
|
||||||
|
|
||||||
|
# --- Node Management ---
|
||||||
|
|
||||||
|
@app.post("/add_node")
|
||||||
|
async def add_node(data: dict):
|
||||||
|
nodes = load_nodes()
|
||||||
|
nodes[data['ip']] = {"name": data['name'], "status": "connecting"}
|
||||||
|
save_nodes(nodes)
|
||||||
|
# Hier würde im Hintergrund der Bootstrap-Prozess starten
|
||||||
|
return {"status": "added"}
|
||||||
|
|
||||||
|
# --- SSH & Command Logic ---
|
||||||
|
|
||||||
|
def run_ssh_cmd(ip, user, password, cmd):
|
||||||
|
ssh = paramiko.SSHClient()
|
||||||
|
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
||||||
|
try:
|
||||||
|
ssh.connect(ip, username=user, password=password, timeout=10)
|
||||||
|
stdin, stdout, stderr = ssh.exec_command(f"sudo -n {cmd}")
|
||||||
|
output = stdout.read().decode()
|
||||||
|
ssh.close()
|
||||||
|
return output
|
||||||
|
except Exception as e:
|
||||||
|
return str(e)
|
||||||
|
|
||||||
|
# --- Chat & AI Logic ---
|
||||||
|
|
||||||
|
@app.websocket("/ws/chat")
|
||||||
|
async def chat_endpoint(websocket: WebSocket):
|
||||||
|
await websocket.accept()
|
||||||
|
while True:
|
||||||
|
user_msg = await websocket.receive_text()
|
||||||
|
|
||||||
|
# SIMULATION KI-LOGIK (Hier kommt dein LLM-Aufruf rein)
|
||||||
|
# Die KI würde entscheiden: "Ich muss Docker auf Node 192.168.1.10 installieren"
|
||||||
|
if "installiere docker" in user_msg.lower():
|
||||||
|
# Beispielhafter Ablauf
|
||||||
|
ip = "192.168.1.10" # Von KI extrahiert
|
||||||
|
await websocket.send_text(f"🤖 Starte Docker-Installation auf {ip}...")
|
||||||
|
result = run_ssh_cmd(ip, "pi", "raspberry", "curl -sSL https://get.docker.com | sh")
|
||||||
|
await websocket.send_text(f"✅ Ergebnis: {result[:100]}...")
|
||||||
|
else:
|
||||||
|
await websocket.send_text(f"🤖 Ich habe empfangen: '{user_msg}'. Wie kann ich helfen?")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
import uvicorn
|
||||||
|
uvicorn.run(app, host="0.0.0.0", port=8000)
|
||||||
Reference in New Issue
Block a user