Webseite überarbeitet und Telegram Bot funktion hinzugefügt #1

Merged
pi-farm merged 59 commits from dev into main 2026-03-07 23:50:03 +00:00
Showing only changes of commit 3d5504b5e8 - Show all commits

744
main.py
View File

@@ -1,353 +1,435 @@
import os
import pty
import fcntl
import subprocess
import sqlite3
import asyncio
import openai
import re
import httpx
from google import genai
from google.genai import types
import json
from fastapi import FastAPI, WebSocket, BackgroundTasks, Request, Form, WebSocketDisconnect
from fastapi.responses import RedirectResponse
from fastapi.templating import Jinja2Templates
from fastapi.staticfiles import StaticFiles
from dotenv import load_dotenv, set_key
# Lade Umgebungsvariablen
load_dotenv()
app = FastAPI()
static_path = os.path.join(os.path.dirname(__file__), "static")
app.mount("/static", StaticFiles(directory=static_path), name="static")
templates = Jinja2Templates(directory="templates")
SSH_KEY = os.path.expanduser("~/.ssh/id_rsa")
DB_PATH = "cluster.db"
chat_history = []
PROMPT_FILE = "system_prompt.txt"
ENV_FILE = os.path.join(os.path.dirname(__file__), ".env")
# KI KONFIGURATION
AI_PROVIDER = os.getenv("AI_PROVIDER", "google").lower()
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY", "")
GOOGLE_API_KEY = os.getenv("GOOGLE_API_KEY", "")
OLLAMA_BASE_URL = os.getenv("OLLAMA_BASE_URL", "http://127.0.0.1:11434/v1")
GOOGLE_MODEL = os.getenv("GOOGLE_MODEL", "gemini-2.0-flash")
OPENAI_MODEL = os.getenv("OPENAI_MODEL", "gpt-4o")
OLLAMA_MODEL = os.getenv("OLLAMA_MODEL", "llama3")
# --- DATENBANK INITIALISIERUNG (ERWEITERT) ---
def init_db():
conn = sqlite3.connect(DB_PATH)
# Spalten erweitert um sudo_password, os, arch, docker_installed
conn.execute('''
CREATE TABLE IF NOT EXISTS nodes (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT,
ip TEXT UNIQUE,
user TEXT,
sudo_password TEXT,
os TEXT DEFAULT 'Unbekannt',
arch TEXT DEFAULT 'Unbekannt',
docker_installed INTEGER DEFAULT 0,
status TEXT
)
''')
conn.commit()
conn.close()
init_db()
def get_db():
conn = sqlite3.connect(DB_PATH)
conn.row_factory = sqlite3.Row
return conn
def get_system_prompt():
conn = get_db()
nodes = conn.execute('SELECT * FROM nodes').fetchall()
conn.close()
<!DOCTYPE html>
<html lang="de">
<head>
<meta charset="UTF-8">
<title>Pi-Orchestrator AI Dashboard</title>
<script src="https://cdn.tailwindcss.com"></script>
node_info = ""
for n in nodes:
docker_str = "Ja" if n['docker_installed'] else "Nein"
node_info += f"- Name: {n['name']}, IP: {n['ip']}, User: {n['user']}, OS: {n['os']}, Arch: {n['arch']}, Docker: {docker_str}\n"
<link href="/static/gridstack.min.css" rel="stylesheet"/>
<script src="/static/gridstack-all.js"></script>
<link rel="stylesheet" href="/static/xterm.css" />
<script src="/static/xterm.js"></script>
<script src="/static/xterm-addon-fit.js"></script>
<script src="/static/marked.min.js"></script>
if os.path.exists(PROMPT_FILE):
with open(PROMPT_FILE, "r", encoding="utf-8") as f:
template = f.read()
else:
template = "Du bist ein Cluster-Orchestrator. Nodes:\n{node_info}\nBefehle via <EXECUTE target=\"IP\">cmd</EXECUTE>"
print(f"⚠️ Warnung: {PROMPT_FILE} fehlt.")
return template.replace("{node_info}", node_info)
# --- KI LOGIK (UNVERÄNDERT) ---
async def get_ai_response(user_input, system_prompt):
global chat_history
chat_history.append({"role": "user", "content": user_input})
chat_history = chat_history[-30:]
ai_msg = ""
try:
if AI_PROVIDER in ["openai", "ollama"]:
url = OLLAMA_BASE_URL if AI_PROVIDER == "ollama" else None
if url and not url.endswith('/v1'): url = url.rstrip('/') + '/v1'
key = "ollama" if AI_PROVIDER == "ollama" else OPENAI_API_KEY
model_to_use = OLLAMA_MODEL if AI_PROVIDER == "ollama" else OPENAI_MODEL
client = openai.OpenAI(base_url=url, api_key=key)
response = client.chat.completions.create(model=model_to_use, messages=[{"role": "system", "content": system_prompt}] + chat_history)
ai_msg = response.choices[0].message.content
elif AI_PROVIDER == "google":
client = genai.Client(api_key=GOOGLE_API_KEY)
google_history = [types.Content(role="user" if m["role"] == "user" else "model", parts=[types.Part.from_text(text=msg["content"])]) for msg in chat_history[:-1]]
chat = client.chats.create(model=GOOGLE_MODEL, config=types.GenerateContentConfig(system_instruction=system_prompt), history=google_history)
response = chat.send_message(user_input)
ai_msg = response.text
except Exception as e:
ai_msg = f"KI Fehler: {e}"
chat_history.append({"role": "assistant", "content": ai_msg})
return ai_msg
# --- WebSocket Manager ---
class ConnectionManager:
def __init__(self): self.active_connections = []
async def connect(self, ws: WebSocket): await ws.accept(); self.active_connections.append(ws)
def disconnect(self, ws: WebSocket): self.active_connections.remove(ws)
async def broadcast(self, msg: str):
for c in self.active_connections:
try: await c.send_text(msg)
except: pass
manager = ConnectionManager()
# --- ERWEITERTES NODE BOOTSTRAPPING (Inventur) ---
async def bootstrap_node(ip, user, password):
await manager.broadcast(f"🔑 SSH-Handshake für {ip}...")
# 1. Key kopieren
ssh_copy_cmd = f"sshpass -p '{password}' ssh-copy-id -o StrictHostKeyChecking=no -i {SSH_KEY}.pub {user}@{ip}"
proc = subprocess.run(ssh_copy_cmd, shell=True, capture_output=True, text=True)
if proc.returncode != 0:
await manager.broadcast(f"❌ Fehler beim Key-Copy für {ip}: {proc.stderr}")
return
# 2. System-Infos abrufen (Inventur)
await manager.broadcast(f"🔍 Inventur auf {ip} wird durchgeführt...")
# Befehlskette: Arch, OS-Name, Docker-Check
inspect_cmd = "uname -m && (lsb_release -ds || cat /etc/os-release | grep PRETTY_NAME | cut -d'=' -f2) && (command -v docker >/dev/null 2>&1 && echo '1' || echo '0')"
ssh_cmd = f"ssh -o StrictHostKeyChecking=no {user}@{ip} \"{inspect_cmd}\""
try:
output = subprocess.check_output(ssh_cmd, shell=True).decode().strip().split('\n')
arch = output[0] if len(output) > 0 else "Unbekannt"
# Mapping für Architektur
if "aarch64" in arch.lower(): arch = "arm64"
elif "x86_64" in arch.lower(): arch = "x86-64"
<style>
.grid-stack { background: #0f172a; min-height: 100vh; padding: 10px; }
.grid-stack-item-content {
background: #1e293b;
color: white;
border: 1px solid #334155;
border-radius: 8px;
display: flex;
flex-direction: column;
overflow: hidden;
}
os_name = output[1].replace('"', '') if len(output) > 1 else "Linux"
docker_val = int(output[2]) if len(output) > 2 else 0
.terminal-container, #terminal {
flex: 1;
width: 100%;
height: 100%;
background: black;
}
status = "Docker Aktiv" if docker_val else "Bereit (Kein Docker)"
# Datenbank Update
conn = get_db()
conn.execute('''
UPDATE nodes SET os = ?, arch = ?, docker_installed = ?, status = ?
WHERE ip = ?
''', (os_name, arch, docker_val, status, ip))
conn.commit()
conn.close()
await manager.broadcast(f"✅ Node {ip} konfiguriert ({os_name}, {arch}).")
except Exception as e:
await manager.broadcast(f"⚠️ Inventur auf {ip} unvollständig: {e}")
#install-log {
flex: 1;
font-family: monospace;
font-size: 11px;
color: #4ade80;
padding: 10px;
overflow-y: auto;
background: #0f172a;
}
# --- ROUTES ---
.widget-header {
background: #334155;
padding: 8px 12px;
cursor: move;
font-size: 12px;
font-weight: bold;
display: flex;
justify-content: space-between;
align-items: center;
user-select: none;
}
@app.get("/")
async def index(request: Request):
conn = get_db()
nodes = conn.execute('SELECT * FROM nodes').fetchall()
conn.close()
return templates.TemplateResponse("index.html", {"request": request, "nodes": nodes})
body {
margin: 0;
padding-top: 60px; /* Platz für die Toolbar */
}
@app.get("/api/node/{node_id}")
async def get_node(node_id: int):
conn = get_db()
node = conn.execute('SELECT * FROM nodes WHERE id = ?', (node_id,)).fetchone()
conn.close()
return dict(node) if node else {}
.top-toolbar {
position: fixed;
top: 0;
left: 0;
width: 100%;
background-color: #1e293b;
color: white;
height: 55px;
display: flex;
align-items: center;
justify-content: space-between;
padding: 0 20px;
box-sizing: border-box;
box-shadow: 0 4px 6px -1px rgba(0, 0, 0, 0.3);
z-index: 1000;
border-bottom: 1px solid #334155;
}
@app.post("/add_node")
async def add_node(background_tasks: BackgroundTasks, name: str = Form(...), ip: str = Form(...), user: str = Form(...), password: str = Form(...)):
conn = get_db()
try:
# Speichere Initialdaten inkl. Sudo-Passwort
conn.execute('''
INSERT INTO nodes (name, ip, user, sudo_password, status)
VALUES (?, ?, ?, ?, ?)
''', (name, ip, user, password, "Kopplung..."))
conn.commit()
background_tasks.add_task(bootstrap_node, ip, user, password)
except sqlite3.IntegrityError: pass
finally: conn.close()
return RedirectResponse(url="/", status_code=303)
.toolbar-title {
font-weight: bold;
font-size: 1.1em;
color: #38bdf8;
}
@app.post("/edit_node/{node_id}")
async def edit_node(node_id: int, name: str = Form(...), ip: str = Form(...), user: str = Form(...)):
conn = get_db()
conn.execute('UPDATE nodes SET name=?, ip=?, user=? WHERE id=?', (name, ip, user, node_id))
conn.commit()
conn.close()
return RedirectResponse(url="/", status_code=303)
.toolbar-controls {
display: flex;
align-items: center;
gap: 10px;
}
@app.post("/remove_node/{node_id}")
async def remove_node(node_id: int):
conn = get_db()
conn.execute('DELETE FROM nodes WHERE id = ?', (node_id,))
conn.commit()
conn.close()
return RedirectResponse(url="/", status_code=303)
.toolbar-controls label {
white-space: nowrap;
font-size: 12px;
color: #94a3b8;
}
@app.get("/refresh_status/{node_id}")
async def refresh_status(node_id: int):
conn = get_db()
node = conn.execute('SELECT * FROM nodes WHERE id = ?', (node_id,)).fetchone()
if not node: return {"status": "Offline"}
#ollama-url-container {
display: none;
align-items: center;
gap: 8px;
border-left: 1px solid #475569;
padding-left: 12px;
}
inspect_cmd = "uname -m && (lsb_release -ds || cat /etc/os-release | grep PRETTY_NAME | cut -d'=' -f2) && (command -v docker >/dev/null 2>&1 && echo '1' || echo '0')"
ssh_cmd = f"ssh -o StrictHostKeyChecking=no -o ConnectTimeout=3 {node['user']}@{node['ip']} \"{inspect_cmd}\""
.toolbar-controls select,
.toolbar-controls input {
height: 32px;
background: #0f172a;
color: white;
border: 1px solid #475569;
border-radius: 4px;
padding: 0 8px;
font-size: 12px;
outline: none;
}
.toolbar-controls select:focus,
.toolbar-controls input:focus {
border-color: #38bdf8;
}
.btn-tool {
height: 32px;
padding: 0 12px;
border-radius: 4px;
font-size: 12px;
font-weight: bold;
transition: all 0.2s;
display: flex;
align-items: center;
gap: 5px;
}
.save-btn { background-color: #059669; color: white; }
.save-btn:hover { background-color: #10b981; }
.add-node-btn { background-color: #2563eb; color: white; }
.add-node-btn:hover { background-color: #3b82f6; }
#settings-status {
color: #10b981;
font-size: 11px;
min-width: 80px;
}
.markdown-content pre { background: #000; padding: 8px; border-radius: 4px; border: 1px solid #334155; overflow-x: auto; margin: 8px 0; }
.markdown-content code { font-family: monospace; color: #4ade80; }
.markdown-content p { margin-bottom: 8px; }
/* NEUE STYLES FÜR NODE CARDS */
.node-badge {
background: #334155;
padding: 2px 5px;
border-radius: 4px;
font-size: 8px;
text-transform: uppercase;
font-weight: bold;
color: #94a3b8;
}
</style>
</head>
<body class="bg-slate-950 text-white overflow-hidden">
try:
output = subprocess.check_output(ssh_cmd, shell=True).decode().strip().split('\n')
arch = output[0]; os_name = output[1].replace('"', ''); docker_val = int(output[2])
if "aarch64" in arch.lower(): arch = "arm64"
elif "x86_64" in arch.lower(): arch = "x86-64"
new_status = "Docker Aktiv" if docker_val else "Bereit (Kein Docker)"
<div class="top-toolbar">
<div class="toolbar-title">🤖 KI-Orchestrator</div>
conn.execute('UPDATE nodes SET status=?, os=?, arch=?, docker_installed=? WHERE id=?',
(new_status, os_name, arch, docker_val, node_id))
conn.commit()
result = {"status": new_status, "os": os_name, "arch": arch, "docker": docker_val}
except:
new_status = "Offline"
conn.execute('UPDATE nodes SET status=? WHERE id=?', (new_status, node_id))
conn.commit()
result = {"status": new_status, "os": node['os'], "arch": node['arch'], "docker": node['docker_installed']}
conn.close()
return result
<div class="toolbar-controls">
<button onclick="addNode()" class="btn-tool add-node-btn">
<span>+</span> Node hinzufügen
</button>
# --- WebSockets Terminal / Chat / Logs (Integration wie gehabt) ---
@app.websocket("/ws/install_logs")
async def log_websocket(websocket: WebSocket):
await manager.connect(websocket)
try:
while True: await websocket.receive_text()
except WebSocketDisconnect: manager.disconnect(websocket)
<div class="h-6 w-px bg-slate-700 mx-2"></div>
@app.websocket("/ws/terminal/{ip}")
async def terminal_websocket(websocket: WebSocket, ip: str):
await websocket.accept()
conn = get_db(); node = conn.execute('SELECT * FROM nodes WHERE ip = ?', (ip,)).fetchone(); conn.close()
if not node: await websocket.close(); return
master_fd, slave_fd = pty.openpty()
proc = await asyncio.create_subprocess_exec("ssh", "-o", "StrictHostKeyChecking=no", "-t", f"{node['user']}@{ip}", stdin=slave_fd, stdout=slave_fd, stderr=slave_fd)
async def pty_to_ws():
fl = fcntl.fcntl(master_fd, fcntl.F_GETFL); fcntl.fcntl(master_fd, fcntl.F_SETFL, fl | os.O_NONBLOCK)
try:
while True:
await asyncio.sleep(0.01)
try:
data = os.read(master_fd, 1024).decode(errors='ignore')
if data: await websocket.send_text(data)
except BlockingIOError: continue
except: pass
async def ws_to_pty():
try:
while True:
data = await websocket.receive_text(); os.write(master_fd, data.encode())
except: pass
try: await asyncio.gather(pty_to_ws(), ws_to_pty())
finally:
if proc.returncode is None: proc.terminate()
os.close(master_fd); os.close(slave_fd)
<label for="ai-provider">Provider:</label>
<select id="ai-provider" onchange="updateModelDropdown(false)">
<option value="google">Google Gemini</option>
<option value="openai">OpenAI</option>
<option value="ollama">Ollama</option>
</select>
<label for="ai-model">Modell:</label>
<select id="ai-model"></select>
<div id="ollama-url-container">
<label for="ollama-url">URL:</label>
<input type="text" id="ollama-url" onblur="updateModelDropdown(false)" placeholder="http://192.168.x.x:11434/v1">
</div>
<button class="btn-tool save-btn" onclick="saveSettings()">Speichern</button>
<span id="settings-status"></span>
</div>
</div>
@app.websocket("/ws/chat")
async def chat_endpoint(websocket: WebSocket):
await websocket.accept()
try:
while True:
user_msg = await websocket.receive_text()
ai_response = await get_ai_response(user_msg, get_system_prompt())
commands = re.findall(r'<EXECUTE target="(.*?)">(.*?)</EXECUTE>', ai_response, re.I | re.S)
clean_msg = re.sub(r'<EXECUTE.*?>.*?</EXECUTE>', '', ai_response, flags=re.I | re.S).strip()
if clean_msg: await websocket.send_text(clean_msg)
if commands:
tasks = []
for target, cmd in commands:
conn = get_db(); n = conn.execute('SELECT * FROM nodes WHERE ip=? OR name=?', (target.strip(), target.strip())).fetchone(); conn.close()
if n: tasks.append(run_remote_task(n['ip'], n['user'], cmd.strip()))
if tasks:
await websocket.send_text(" *Führe Befehle aus...*")
await asyncio.gather(*tasks)
summary = await get_ai_response("Zusammenfassung der Ergebnisse?", get_system_prompt())
await websocket.send_text(f"--- Info ---\n{summary}")
except: pass
<div class="flex h-screen">
<div class="w-64 bg-slate-900 border-r border-slate-800 p-4 flex flex-col">
<div id="node-list" class="flex-1 overflow-y-auto space-y-2">
{% for node in nodes %}
<div class="p-3 bg-slate-800 rounded border border-slate-700 relative group" id="node-card-{{ node.id }}">
<div class="flex justify-between items-start">
<div class="text-sm font-bold">{{ node.name }}</div>
<form action="/remove_node/{{ node.id }}" method="post" onsubmit="return confirm('Node wirklich entfernen?')">
<button type="submit" class="text-slate-500 hover:text-red-500 text-xs"></button>
</form>
</div>
<div class="text-[10px] text-slate-500 font-mono flex justify-between items-center mb-1">
{{ node.ip }}
<button onclick="refreshNodeStatus({{ node.id }})" class="hover:text-blue-400">🔄</button>
</div>
async def run_remote_task(ip, user, cmd):
await manager.broadcast(f"🚀 Task: {cmd} auf {ip}")
proc = await asyncio.create_subprocess_shell(f"ssh -o StrictHostKeyChecking=no {user}@{ip} '{cmd}'", stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.STDOUT)
full_output = ""
while True:
line = await proc.stdout.readline()
if not line: break
out = line.decode('utf-8', errors='ignore').strip()
if out: await manager.broadcast(f"🛠️ {out}"); full_output += out + "\n"
await proc.wait()
chat_history.append({"role": "user", "content": f"[SYSTEM] Befehl '{cmd}' auf {ip} fertig:\n{full_output or 'Kein Output'}"})
<div class="flex gap-1 mb-2">
<span id="os-{{ node.id }}" class="node-badge truncate max-w-[80px]" title="{{ node.os }}">{{ node.os }}</span>
<span id="arch-{{ node.id }}" class="node-badge">{{ node.arch }}</span>
</div>
# --- Settings API ---
@app.get("/api/settings")
async def get_settings():
return {"provider": AI_PROVIDER, "google_model": GOOGLE_MODEL, "openai_model": OPENAI_MODEL, "ollama_model": OLLAMA_MODEL, "ollama_base_url": OLLAMA_BASE_URL}
<div class="mt-1 flex items-center gap-2">
<span id="led-{{ node.id }}" class="h-2 w-2 rounded-full {% if 'Aktiv' in node.status %} bg-blue-500 shadow-[0_0_8px_#3b82f6] {% else %} bg-yellow-500 {% endif %}"></span>
<span id="badge-{{ node.id }}" class="text-[9px] uppercase font-mono text-slate-300">{{ node.status }}</span>
</div>
<button onclick="openTerminal('{{ node.ip }}')" class="mt-2 w-full text-[10px] bg-slate-700 hover:bg-slate-600 py-1 rounded transition-colors">Konsole öffnen</button>
</div>
{% endfor %}
</div>
<button onclick="localStorage.removeItem('pi-orch-layout-v2'); location.reload();" class="mt-4 text-[10px] text-slate-500 hover:text-white uppercase text-center w-full">Layout Reset</button>
</div>
@app.post("/api/settings")
async def update_settings(request: Request):
global AI_PROVIDER, GOOGLE_MODEL, OPENAI_MODEL, OLLAMA_MODEL, OLLAMA_BASE_URL
data = await request.json()
provider = data.get("provider")
if provider:
AI_PROVIDER = provider; set_key(ENV_FILE, "AI_PROVIDER", provider)
if data.get("model"):
m = data.get("model")
if provider == "google": GOOGLE_MODEL = m; set_key(ENV_FILE, "GOOGLE_MODEL", m)
if provider == "openai": OPENAI_MODEL = m; set_key(ENV_FILE, "OPENAI_MODEL", m)
if provider == "ollama": OLLAMA_MODEL = m; set_key(ENV_FILE, "OLLAMA_MODEL", m)
if data.get("ollama_base_url"):
u = data.get("ollama_base_url"); OLLAMA_BASE_URL = u; set_key(ENV_FILE, "OLLAMA_BASE_URL", u)
return {"status": "success"}
<div class="flex-1 overflow-y-auto">
<div class="grid-stack">
<div class="grid-stack-item" gs-id="chat-widget" gs-w="6" gs-h="5" gs-x="0" gs-y="0">
<div class="grid-stack-item-content">
<div class="widget-header"><span>💬 KI Chat</span> <span></span></div>
<div id="chat-window" class="flex-1 p-4 overflow-y-auto text-sm space-y-2 bg-[#0f172a]/50"></div>
<div class="p-2 border-t border-slate-700 flex bg-slate-800">
<input id="user-input" type="text" class="flex-1 bg-slate-900 p-2 rounded text-xs outline-none border border-slate-700 focus:border-blue-500" placeholder="Nachricht (Enter zum Senden)..." autocomplete="off">
<button onclick="sendMessage()" class="ml-2 bg-blue-600 hover:bg-blue-500 px-4 py-1 rounded text-xs transition-colors">Senden</button>
</div>
</div>
</div>
@app.get("/api/models")
async def get_models(provider: str):
try:
if provider == "ollama":
url = OLLAMA_BASE_URL.replace("/v1", "").rstrip("/")
async with httpx.AsyncClient() as client:
r = await client.get(f"{url}/api/tags", timeout=5.0)
return {"models": [m["name"] for m in r.json().get("models", [])]}
elif provider == "openai":
client = openai.AsyncOpenAI(api_key=OPENAI_API_KEY)
r = await client.models.list()
return {"models": sorted([m.id for m in r.data if "gpt" in m.id or "o1" in m.id])}
elif provider == "google":
client = genai.Client(api_key=GOOGLE_API_KEY)
return {"models": sorted([m.name.replace("models/", "") for m in client.models.list() if 'generateContent' in m.supported_actions])}
except: return {"models": []}
<div class="grid-stack-item" gs-id="logs-widget" gs-w="6" gs-h="5" gs-x="6" gs-y="0">
<div class="grid-stack-item-content">
<div class="widget-header"><span>📜 System Logs</span> <span></span></div>
<div id="install-log">Warte auf Aufgaben...</div>
</div>
</div>
if __name__ == "__main__":
import uvicorn
uvicorn.run(app, host="0.0.0.0", port=8000)
<div class="grid-stack-item" gs-id="term-widget" gs-w="12" gs-h="6" gs-x="0" gs-y="5">
<div class="grid-stack-item-content">
<div class="widget-header"><span>🖥 Live Terminal</span> <span></span></div>
<div id="terminal" class="terminal-container"></div>
</div>
</div>
</div>
</div>
</div>
<div id="add-node-modal" class="hidden fixed inset-0 bg-black/80 flex items-center justify-center z-[2000]">
<div class="bg-slate-800 p-6 rounded-lg border border-slate-600 w-96">
<h3 class="text-lg font-bold mb-4">Neuen Node hinzufügen</h3>
<form action="/add_node" method="post" class="space-y-4">
<input type="text" name="name" placeholder="Name (z.B. Pi-Worker-1)" class="w-full bg-slate-900 border border-slate-700 p-2 rounded text-sm" required>
<input type="text" name="ip" placeholder="IP Adresse" class="w-full bg-slate-900 border border-slate-700 p-2 rounded text-sm" required>
<input type="text" name="user" placeholder="SSH Benutzer" class="w-full bg-slate-900 border border-slate-700 p-2 rounded text-sm" required>
<input type="password" name="password" placeholder="Sudo/SSH Passwort" class="w-full bg-slate-900 border border-slate-700 p-2 rounded text-sm" required>
<div class="flex justify-end gap-2 pt-2">
<button type="button" onclick="closeAddNode()" class="px-4 py-2 text-sm text-slate-400">Abbrechen</button>
<button type="submit" class="bg-blue-600 px-4 py-2 text-sm rounded font-bold">Koppeln & Inventur</button>
</div>
</form>
</div>
</div>
<script>
let currentSettings = {};
let termDataDisposable = null;
// 1. CHAT LOGIK
function initChat(chatWs) {
const input = document.getElementById('user-input');
window.sendMessage = function() {
const msg = input.value.trim();
if(!msg) return;
chatWs.send(msg);
appendChat("Du", msg, "text-slate-400 font-bold");
input.value = '';
input.focus();
};
input.addEventListener('keydown', (e) => { if (e.key === 'Enter') { e.preventDefault(); sendMessage(); } });
}
// 2. MODAL LOGIK
window.addNode = () => document.getElementById('add-node-modal').classList.remove('hidden');
window.closeAddNode = () => document.getElementById('add-node-modal').classList.add('hidden');
// 3. ERWEITERTER REFRESH (Aktualisiert OS & Arch)
window.refreshNodeStatus = async function(nodeId) {
const badge = document.getElementById(`badge-${nodeId}`);
const led = document.getElementById(`led-${nodeId}`);
const osEl = document.getElementById(`os-${nodeId}`);
const archEl = document.getElementById(`arch-${nodeId}`);
const card = document.getElementById(`node-card-${nodeId}`);
badge.textContent = "PRÜFE...";
card.style.opacity = "0.7";
try {
const response = await fetch(`/refresh_status/${nodeId}`);
const data = await response.json();
badge.textContent = data.status;
osEl.textContent = data.os;
archEl.textContent = data.arch;
if (data.status.includes("Aktiv")) {
led.className = "h-2 w-2 rounded-full bg-blue-500 shadow-[0_0_8px_#3b82f6]";
} else if (data.status === "Offline") {
led.className = "h-2 w-2 rounded-full bg-red-500 shadow-[0_0_8px_#ef4444]";
} else {
led.className = "h-2 w-2 rounded-full bg-yellow-500";
}
} catch (e) {
badge.textContent = "FEHLER";
led.className = "h-2 w-2 rounded-full bg-red-500";
} finally {
card.style.opacity = "1";
}
};
// 4. SETTINGS & MODEL LOGIK
async function loadSettings() {
try {
const res = await fetch('/api/settings');
currentSettings = await res.json();
document.getElementById('ai-provider').value = currentSettings.provider;
document.getElementById('ollama-url').value = currentSettings.ollama_base_url || "http://127.0.0.1:11434/v1";
updateModelDropdown(true);
} catch (e) {}
}
async function updateModelDropdown(isInitialLoad = false) {
const provider = document.getElementById('ai-provider').value;
const modelSelect = document.getElementById('ai-model');
const urlContainer = document.getElementById('ollama-url-container');
let ollamaUrl = document.getElementById('ollama-url').value;
urlContainer.style.display = (provider === "ollama") ? "flex" : "none";
modelSelect.innerHTML = '<option>Lade...</option>';
try {
const res = await fetch(`/api/models?provider=${provider}&url=${encodeURIComponent(ollamaUrl)}`);
const data = await res.json();
modelSelect.innerHTML = '';
if (data.models && data.models.length > 0) {
data.models.forEach(m => {
const opt = document.createElement('option');
opt.value = opt.textContent = m;
modelSelect.appendChild(opt);
});
const savedModel = currentSettings[`${provider}_model`];
if (isInitialLoad && savedModel) modelSelect.value = savedModel;
}
} catch (e) { modelSelect.innerHTML = '<option>Fehler</option>'; }
}
async function saveSettings() {
const provider = document.getElementById('ai-provider').value;
const model = document.getElementById('ai-model').value;
const ollama_base_url = document.getElementById('ollama-url').value;
const statusEl = document.getElementById('settings-status');
try {
await fetch('/api/settings', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ provider, model, ollama_base_url })
});
statusEl.textContent = "✅ Gespeichert";
setTimeout(() => statusEl.textContent = "", 2000);
} catch (e) { statusEl.textContent = "❌ Fehler"; }
}
// 5. INITIALISIERUNG
document.addEventListener('DOMContentLoaded', function() {
var grid = GridStack.init({
cellHeight: 80, margin: 10, float: true,
handle: '.widget-header',
resizable: { handles: 'all' }
});
const savedLayout = localStorage.getItem('pi-orch-layout-v2');
if (savedLayout) { try { grid.load(JSON.parse(savedLayout)); } catch(e){} }
grid.on('resizestop dragstop', () => {
localStorage.setItem('pi-orch-layout-v2', JSON.stringify(grid.save(false)));
});
const term = new Terminal({ theme: { background: '#000' }, fontSize: 13, convertEol: true });
const fitAddon = new FitAddon.FitAddon();
term.loadAddon(fitAddon);
term.open(document.getElementById('terminal'));
const logWs = new WebSocket(`ws://${location.host}/ws/install_logs`);
logWs.onmessage = (ev) => {
const div = document.createElement('div');
div.textContent = `> ${ev.data}`;
document.getElementById('install-log').appendChild(div);
document.getElementById('install-log').scrollTop = document.getElementById('install-log').scrollHeight;
};
const chatWs = new WebSocket(`ws://${location.host}/ws/chat`);
chatWs.onmessage = (ev) => appendChat("KI", ev.data, "text-blue-400 font-bold");
initChat(chatWs);
window.appendChat = function(user, msg, classes) {
const win = document.getElementById('chat-window');
let formattedMsg = (user === "KI") ? marked.parse(msg) : msg;
win.innerHTML += `<div class="mb-4"><span class="${classes} block mb-1 text-[10px] uppercase">${user}</span><div class="markdown-content text-slate-300 text-sm">${formattedMsg}</div></div>`;
win.scrollTop = win.scrollHeight;
};
window.openTerminal = function(ip) {
if (window.termWs) window.termWs.close();
if (termDataDisposable) termDataDisposable.dispose();
term.clear();
window.termWs = new WebSocket(`ws://${location.host}/ws/terminal/${ip}`);
window.termWs.onmessage = (ev) => term.write(ev.data);
termDataDisposable = term.onData(data => {
if (window.termWs?.readyState === WebSocket.OPEN) window.termWs.send(data);
});
};
loadSettings();
});
</script>
</body>
</html>