diff --git a/main.py b/main.py index 15846fc..fe011e9 100644 --- a/main.py +++ b/main.py @@ -1,327 +1,241 @@ import os -import pty -import fcntl -import subprocess -import sqlite3 -import asyncio -import re import json -import httpx -from dotenv import load_dotenv, set_key -from fastapi import FastAPI, WebSocket, WebSocketDisconnect -from fastapi.staticfiles import StaticFiles -from pydantic import BaseModel +import threading +import time +from flask import Flask, render_template, request, jsonify, redirect, url_for +from flask_sqlalchemy import SQLAlchemy +from flask_sock import Sock +import paramiko +import requests -load_dotenv() +# AI Provider Imports +import openai +import google.generativeai as genai -app = FastAPI() +app = Flask(__name__) +app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///orchestrator.db' +app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False +db = SQLAlchemy(app) +sock = Sock(app) -STATIC_DIR = os.path.join(os.path.dirname(__file__),"static") -app.mount("/static", StaticFiles(directory=STATIC_DIR), name="static") +# --- DATENBANK MODELLE --- -DB_PATH = "cluster.db" -SSH_KEY = os.path.expanduser("~/.ssh/id_rsa") +class Node(db.Model): + id = db.Column(db.Integer, primary_key=True) + name = db.Column(db.String(100), nullable=False) + ip = db.Column(db.String(50), nullable=False) + user = db.Column(db.String(50), nullable=False) + password = db.Column(db.String(100), nullable=False) + status = db.Column(db.String(50), default="Unbekannt") + arch = db.Column(db.String(20), default="N/A") + os_type = db.Column(db.String(50), default="linux") + has_docker = db.Column(db.Boolean, default=False) + has_vnc = db.Column(db.Boolean, default=False) -# ------------------------------------------------- -# DATABASE -# ------------------------------------------------- + def to_dict(self): + return {c.name: getattr(self, c.name) for c in self.__table__.columns} -def init_db(): - conn = sqlite3.connect(DB_PATH) +class Settings(db.Model): + id = db.Column(db.Integer, primary_key=True) + provider = db.Column(db.String(50), default="google") + google_model = db.Column(db.String(100), default="gemini-1.5-flash") + openai_model = db.Column(db.String(100), default="gpt-4o-mini") + ollama_model = db.Column(db.String(100), default="llama3") + ollama_base_url = db.Column(db.String(200), default="http://localhost:11434/v1") - conn.execute(""" - CREATE TABLE IF NOT EXISTS nodes ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - name TEXT, - ip TEXT UNIQUE, - user TEXT, - sudo_password TEXT, - os TEXT, - arch TEXT, - docker INTEGER DEFAULT 0, - ssh INTEGER DEFAULT 1, - vnc INTEGER DEFAULT 0, - status TEXT DEFAULT 'unknown', - last_seen TEXT - ) - """) +with app.app_context(): + db.create_all() + if not Settings.query.first(): + db.session.add(Settings()) + db.session.commit() - conn.commit() - conn.close() +# --- HILFSFUNKTIONEN --- -init_db() - - -def get_db(): - conn = sqlite3.connect(DB_PATH) - conn.row_factory = sqlite3.Row - return conn - -# ------------------------------------------------- -# MODELS -# ------------------------------------------------- - -class NodeCreate(BaseModel): - name:str - ip:str - user:str - -class NodeUpdate(BaseModel): - name:str - ip:str - user:str - -class AISettings(BaseModel): - provider:str - model:str - ollama:str - -# ------------------------------------------------- -# NODE API -# ------------------------------------------------- - -@app.get("/nodes") -def get_nodes(): - - conn = get_db() - - rows = conn.execute("SELECT * FROM nodes").fetchall() - - conn.close() - - return [dict(r) for r in rows] - - -@app.post("/nodes") -def add_node(node:NodeCreate): - - conn = get_db() - - conn.execute("INSERT INTO nodes(name,ip,user) VALUES(?,?,?)", - (node.name,node.ip,node.user)) - - conn.commit() - - conn.close() - - return {"status":"ok"} - - -@app.put("/nodes/{node_id}") -def update_node(node_id:int,node:NodeUpdate): - - conn = get_db() - - conn.execute(""" - UPDATE nodes - SET name=?, ip=?, user=? - WHERE id=? - """, - (node.name,node.ip,node.user,node_id)) - - conn.commit() - - conn.close() - - return {"status":"updated"} - -# ------------------------------------------------- -# NODE SCANNING -# ------------------------------------------------- - -async def run_ssh(ip,user,cmd): - - ssh_cmd=[ - "ssh", - "-o","StrictHostKeyChecking=no", - f"{user}@{ip}", - cmd - ] - - proc = await asyncio.create_subprocess_exec( - *ssh_cmd, - stdout=asyncio.subprocess.PIPE, - stderr=asyncio.subprocess.PIPE - ) - - out,_ = await proc.communicate() - - return out.decode().strip() - - -async def detect_node(node): - - ip=node['ip'] - user=node['user'] +def get_ssh_client(node): + client = paramiko.SSHClient() + client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) + client.connect(node.ip, username=node.user, password=node.password, timeout=5) + return client +def run_ssh_cmd(node, cmd): try: - - arch = await run_ssh(ip,user,"uname -m") - - os_release = await run_ssh(ip,user,"cat /etc/os-release || uname") - - docker = await run_ssh(ip,user,"docker --version || echo nodocker") - - vnc = await run_ssh(ip,user,"pgrep Xtightvnc || echo novnc") - - os_name="unknown" - - if "debian" in os_release.lower(): - os_name="debian" - - elif "fedora" in os_release.lower(): - os_name="fedora" - - elif "raspbian" in os_release.lower(): - os_name="raspberrypi" - - elif "ubuntu" in os_release.lower(): - os_name="ubuntu" - - docker_installed = 0 if "nodocker" in docker else 1 - - vnc_enabled = 0 if "novnc" in vnc else 1 - - status="online" - + client = get_ssh_client(node) + stdin, stdout, stderr = client.exec_command(cmd) + result = stdout.read().decode().strip() + client.close() + return result except: + return None - arch="" - os_name="" - docker_installed=0 - vnc_enabled=0 - status="offline" +# --- ROUTEN --- - conn=get_db() +@app.route('/') +def index(): + nodes = Node.query.all() + return render_template('index.html', nodes=nodes) - conn.execute(""" - UPDATE nodes - SET os=?,arch=?,docker=?,vnc=?,status=?,last_seen=datetime('now') - WHERE id=? - """, - (os_name,arch,docker_installed,vnc_enabled,status,node['id'])) +@app.route('/add_node', methods=['POST']) +def add_node(): + new_node = Node( + name=request.form['name'], + ip=request.form['ip'], + user=request.form['user'], + password=request.form['password'] + ) + db.session.add(new_node) + db.session.commit() + return redirect(url_for('index')) - conn.commit() +@app.route('/edit_node/', methods=['POST']) +def edit_node(id): + node = Node.query.get_or_404(id) + node.name = request.form.get('name', node.name) + node.ip = request.form.get('ip', node.ip) + node.user = request.form.get('user', node.user) + if request.form.get('password'): + node.password = request.form.get('password') + db.session.commit() + return redirect(url_for('index')) - conn.close() +@app.route('/remove_node/', methods=['POST']) +def remove_node(id): + node = Node.query.get_or_404(id) + db.session.delete(node) + db.session.commit() + return redirect(url_for('index')) +@app.route('/refresh_status/') +def refresh_status(id): + node = Node.query.get_or_404(id) + try: + # Architektur prüfen + node.arch = run_ssh_cmd(node, "uname -m") or "N/A" + # OS Typ prüfen + os_info = run_ssh_cmd(node, "cat /etc/os-release | grep ^ID=") + node.os_type = os_info.split('=')[1].replace('"', '') if os_info else "linux" + # Docker prüfen + docker_check = run_ssh_cmd(node, "docker ps") + node.has_docker = True if docker_check is not None else False + # VNC prüfen (Port 5900/5901) + vnc_check = run_ssh_cmd(node, "netstat -tuln | grep :590") + node.has_vnc = True if vnc_check else False + + node.status = "Online" + except: + node.status = "Offline" + + db.session.commit() + return jsonify(node.to_dict()) -async def scan_nodes(): +# --- SETTINGS API --- - while True: - - conn=get_db() - - nodes=conn.execute("SELECT * FROM nodes").fetchall() - - conn.close() - - tasks=[] - - for n in nodes: - tasks.append(detect_node(n)) - - if tasks: - await asyncio.gather(*tasks) - - await asyncio.sleep(60) - - -@app.on_event("startup") -async def start_scanner(): - - asyncio.create_task(scan_nodes()) - -# ------------------------------------------------- -# TERMINAL WEBSOCKET -# ------------------------------------------------- - -@app.websocket("/ws/terminal") -async def terminal_ws(websocket:WebSocket): - - await websocket.accept() - - pid,fd = pty.fork() - - if pid==0: - os.execvp("bash",["bash"]) - - while True: - - await asyncio.sleep(0.01) +@app.route('/api/settings', methods=['GET', 'POST']) +def handle_settings(): + s = Settings.query.first() + if request.method == 'POST': + data = request.json + s.provider = data.get('provider', s.provider) + s.ollama_base_url = data.get('ollama_base_url', s.ollama_base_url) + # Speichere das Modell für den aktuellen Provider + setattr(s, f"{s.provider}_model", data.get('model')) + db.session.commit() + return jsonify({"status": "success"}) + return jsonify({ + "provider": s.provider, + "google_model": s.google_model, + "openai_model": s.openai_model, + "ollama_model": s.ollama_model, + "ollama_base_url": s.ollama_base_url + }) +@app.route('/api/models') +def get_models(): + provider = request.args.get('provider') + if provider == "google": + return jsonify({"models": ["gemini-1.5-flash", "gemini-1.5-pro"]}) + elif provider == "openai": + return jsonify({"models": ["gpt-4o-mini", "gpt-4o", "gpt-3.5-turbo"]}) + elif provider == "ollama": + url = request.args.get('url', 'http://localhost:11434/v1') try: - data=os.read(fd,1024).decode() - await websocket.send_text(data) + # Versuche Modelle von Ollama API zu laden + r = requests.get(url.replace('/v1', '/api/tags'), timeout=2) + names = [m['name'] for m in r.json().get('models', [])] + return jsonify({"models": names}) except: - pass + return jsonify({"models": ["llama3", "mistral", "codellama"]}) + return jsonify({"models": []}) - try: - msg = await asyncio.wait_for(websocket.receive_text(),0.01) - os.write(fd,msg.encode()) - except: - pass - -# ------------------------------------------------- -# AI CHAT -# ------------------------------------------------- - -chat_history=[] - -async def fake_ai(message:str): - - if "nodes" in message.lower(): - - conn=get_db() - - rows=conn.execute("SELECT name,ip,status FROM nodes").fetchall() - - conn.close() - - txt="Nodes:\n" - - for r in rows: - txt+=f"{r['name']} {r['ip']} {r['status']}\n" - - return txt - - return "AI connected." - - -@app.websocket("/ws/chat") -async def chat_ws(websocket:WebSocket): - - await websocket.accept() +# --- WEBSOCKETS --- +@sock.route('/ws/install_logs') +def install_logs(ws): + # Dummy Log Stream für Demo-Zwecke while True: + data = ws.receive(timeout=1) + # Hier könnten echte Hintergrund-Prozesse ihre Logs senden + pass - msg = await websocket.receive_text() +@sock.route('/ws/chat') +def chat_handler(ws): + s = Settings.query.first() + while True: + msg = ws.receive() + if not msg: break + + # KI LOGIK + response = "Fehler: Provider nicht konfiguriert" + try: + if s.provider == "google": + genai.configure(api_key=os.getenv("GOOGLE_API_KEY")) + model = genai.GenerativeModel(s.google_model) + response = model.generate_content(msg).text + elif s.provider == "openai": + client = openai.OpenAI(api_key=os.getenv("OPENAI_API_KEY")) + res = client.chat.completions.create( + model=s.openai_model, + messages=[{"role": "user", "content": msg}] + ) + response = res.choices[0].message.content + elif s.provider == "ollama": + r = requests.post(f"{s.ollama_base_url}/chat/completions", json={ + "model": s.ollama_model, + "messages": [{"role": "user", "content": msg}], + "stream": False + }) + response = r.json()['choices'][0]['message']['content'] + except Exception as e: + response = f"KI-Fehler: {str(e)}" + + ws.send(response) - chat_history.append(msg) +@sock.route('/ws/terminal/') +def terminal_handler(ws, ip): + node = Node.query.filter_by(ip=ip).first() + if not node: return + + try: + client = get_ssh_client(node) + chan = client.invoke_shell() + + def split_reader(): + while True: + if chan.recv_ready(): + out = chan.recv(1024).decode() + ws.send(out) + time.sleep(0.01) + + threading.Thread(target=split_reader, daemon=True).start() + + while True: + cmd = ws.receive() + if not cmd: break + chan.send(cmd) + + except Exception as e: + ws.send(f"\r\n[SSH FEHLER]: {str(e)}\r\n") - reply = await fake_ai(msg) - - await websocket.send_text(reply) - -# ------------------------------------------------- -# AI SETTINGS -# ------------------------------------------------- - -ENV_FILE = ".env" - -@app.post("/ai/settings") -async def save_settings(settings:AISettings): - - set_key(ENV_FILE,"AI_PROVIDER",settings.provider) - set_key(ENV_FILE,"OLLAMA_BASE_URL",settings.ollama) - - return {"status":"saved"} - -# ------------------------------------------------- -# ROOT -# ------------------------------------------------- - -@app.get("/") -def root(): - - return {"status":"PiDoBot running"} +if __name__ == '__main__': + # Stelle sicher, dass API Keys in der Umgebung sind oder hier hartcodiert werden + # os.environ["GOOGLE_API_KEY"] = "DEIN_KEY" + app.run(host='0.0.0.0', port=5000, debug=True) \ No newline at end of file