From 8d75b87f3babe5d91779bb7162f4bf67d6751c9e Mon Sep 17 00:00:00 2001 From: thomasciesla Date: Mon, 22 Dec 2025 14:12:13 +0100 Subject: [PATCH] jtl-wafi-agent.py aktualisiert --- jtl-wafi-agent.py | 342 +++++++++++++++++++++++----------------------- 1 file changed, 171 insertions(+), 171 deletions(-) diff --git a/jtl-wafi-agent.py b/jtl-wafi-agent.py index ffd2fe6..fdb377e 100644 --- a/jtl-wafi-agent.py +++ b/jtl-wafi-agent.py @@ -50,16 +50,16 @@ LOG_FILE = "/var/log/jtl-wafi.log" # PFADE - SHOPS # ============================================================================= VHOSTS_DIR = "/var/www/vhosts" -ACTIVE_SHOPS_FILE = "/var/lib/geoip/active_shops.json" +ACTIVE_SHOPS_FILE = "/var/lib/jtl-wafi/active_shops.json" # ============================================================================= # SHOP-DATEIEN # ============================================================================= -BACKUP_SUFFIX = ".geoip_backup" -BLOCKING_FILE = "geoip_blocking.php" -CACHE_FILE = "geoip_ip_ranges.cache" -SHOP_LOG_FILE = "geoip_blocked.log" -RATELIMIT_DIR = "geoip_ratelimit" +BACKUP_SUFFIX = ".jtl-wafi_backup" +BLOCKING_FILE = "jtl-wafi.php" +CACHE_FILE = "jtl-wafi_ip_ranges.cache" +SHOP_LOG_FILE = "jtl-wafi_blocked.log" +RATELIMIT_DIR = "jtl-wafi_ratelimit" # ============================================================================= # WEBSOCKET @@ -141,7 +141,7 @@ BOT_IP_RANGES = { '43.0.0.0/9', # 43.0.0.0 - 43.127.255.255 (8.3 Mio IPs) '8.128.0.0/10', # 8.128.0.0 - 8.191.255.255 (4.2 Mio IPs) '8.208.0.0/12', # 8.208.0.0 - 8.223.255.255 (1 Mio IPs) - + # === Alibaba Cloud Singapore - Weitere Blöcke === '47.74.0.0/15', # 47.74.0.0 - 47.75.255.255 '47.76.0.0/16', # 47.76.0.0 - 47.76.255.255 @@ -153,7 +153,7 @@ BOT_IP_RANGES = { '149.129.0.0/16', # 149.129.0.0 - 149.129.255.255 '161.117.0.0/16', # 161.117.0.0 - 161.117.255.255 '170.33.0.0/16', # 170.33.0.0 - 170.33.255.255 - + # === Alibaba Cloud China (Aliyun) === '39.96.0.0/13', # 39.96.0.0 - 39.103.255.255 '39.104.0.0/14', # 39.104.0.0 - 39.107.255.255 @@ -175,7 +175,7 @@ BOT_IP_RANGES = { '139.224.0.0/16', # 139.224.0.0 - 139.224.255.255 '140.205.0.0/16', # 140.205.0.0 - 140.205.255.255 '182.92.0.0/16', # 182.92.0.0 - 182.92.255.255 - + # === Alibaba Sonstige === '203.107.0.0/16', # Alibaba DNS '103.206.40.0/22', # Alibaba Cloud SG @@ -567,21 +567,21 @@ GENERIC_BOT_PATTERNS = [ def setup_logging(debug: bool = False): """Konfiguriert Logging mit Rotation.""" log_level = logging.DEBUG if debug else logging.INFO - + # Formatter formatter = logging.Formatter( '%(asctime)s [%(levelname)s] %(message)s', datefmt='%Y-%m-%d %H:%M:%S' ) - + # Console Handler console_handler = logging.StreamHandler() console_handler.setFormatter(formatter) console_handler.setLevel(log_level) - + # File Handler mit Rotation handlers = [console_handler] - + log_dir = os.path.dirname(LOG_FILE) if log_dir and os.path.exists(log_dir): try: @@ -595,12 +595,12 @@ def setup_logging(debug: bool = False): handlers.append(file_handler) except PermissionError: pass - + # Logger konfigurieren logger = logging.getLogger('jtl_wafi_agent') logger.setLevel(log_level) logger.handlers = handlers - + return logger # Global Logger (wird in main() initialisiert) @@ -640,9 +640,9 @@ def get_most_common_owner(httpdocs_path: str): """ if not os.path.isdir(httpdocs_path): return None, None - + owner_counts = Counter() - + try: for entry in os.listdir(httpdocs_path): entry_path = os.path.join(httpdocs_path, entry) @@ -653,10 +653,10 @@ def get_most_common_owner(httpdocs_path: str): continue except (OSError, IOError): return None, None - + if not owner_counts: return None, None - + most_common = owner_counts.most_common(1)[0][0] return most_common @@ -668,10 +668,10 @@ def set_owner(path: str, uid: int, gid: int, recursive: bool = False): """ if uid is None or gid is None: return - + try: os.chown(path, uid, gid) - + if recursive and os.path.isdir(path): for root, dirs, files in os.walk(path): for d in dirs: @@ -713,21 +713,21 @@ def detect_bot(user_agent: str, ip: str = None) -> str: for cidr in ip_ranges: if ip_in_cidr(ip, cidr): return bot_name - + if not user_agent or user_agent == 'Unknown': return 'Unbekannt' - + # SCHRITT 2: Spezifische User-Agent Patterns for bot_name, pattern in BOT_PATTERNS.items(): if re.search(pattern, user_agent, re.IGNORECASE): return bot_name - + # SCHRITT 3: Generische Patterns als Fallback ua_lower = user_agent.lower() for pattern in GENERIC_BOT_PATTERNS: if pattern in ua_lower: return f'Bot ({pattern})' - + return 'Unbekannt' @@ -737,10 +737,10 @@ def detect_bot(user_agent: str, ip: str = None) -> str: def check_link11(domain: str) -> Dict[str, Any]: """Prüft ob eine Domain hinter Link11 ist.""" global DNS_CACHE - + if domain in DNS_CACHE: return DNS_CACHE[domain] - + try: ip = socket.gethostbyname(domain) is_link11 = (ip == LINK11_IP) @@ -1229,7 +1229,7 @@ if (count($ranges) >= {min_expected}) {{ }} ''' - temp_php = os.path.join(httpdocs_path, '_geoip_cache_gen.php') + temp_php = os.path.join(httpdocs_path, '_jtl-wafi_cache_gen.php') try: with open(temp_php, 'w') as f: f.write(php_script) @@ -1280,7 +1280,7 @@ else {{ echo "OK:" . count($data); }} # ============================================================================= # SHOP REGISTRY FUNCTIONS # ============================================================================= -def add_shop_to_active(shop: str, mode: str = "geoip", geo_region: str = "dach", +def add_shop_to_active(shop: str, mode: str = "geoip", geo_region: str = "dach", rate_limit: int = None, ban_duration: int = None, bot_monitor_only: bool = False): """Registriert einen Shop als aktiv.""" @@ -1292,7 +1292,7 @@ def add_shop_to_active(shop: str, mode: str = "geoip", geo_region: str = "dach", shops = json.load(f) except: shops = {} - + shop_data = { "activated": datetime.now().isoformat(), "expiry": (datetime.now() + timedelta(hours=72)).isoformat(), @@ -1305,7 +1305,7 @@ def add_shop_to_active(shop: str, mode: str = "geoip", geo_region: str = "dach", shop_data["ban_duration"] = ban_duration if bot_monitor_only: shop_data["bot_monitor_only"] = True - + shops[shop] = shop_data with open(ACTIVE_SHOPS_FILE, 'w') as f: json.dump(shops, f, indent=2) @@ -1388,7 +1388,7 @@ def get_available_shops() -> List[str]: shops = [] if not os.path.exists(VHOSTS_DIR): return shops - + try: for entry in os.listdir(VHOSTS_DIR): shop_path = os.path.join(VHOSTS_DIR, entry) @@ -1398,7 +1398,7 @@ def get_available_shops() -> List[str]: shops.append(entry) except: pass - + return sorted(shops) @@ -1416,12 +1416,12 @@ def get_active_shops() -> List[str]: # ============================================================================= # ACTIVATE / DEACTIVATE BLOCKING # ============================================================================= -def activate_blocking(shop: str, silent: bool = True, mode: str = "geoip", - geo_region: str = "dach", rate_limit: int = None, +def activate_blocking(shop: str, silent: bool = True, mode: str = "geoip", + geo_region: str = "dach", rate_limit: int = None, ban_duration: int = None, bot_monitor_only: bool = False) -> bool: """ Aktiviert Blocking für einen Shop. - + Args: shop: Domain des Shops silent: Keine Konsolenausgabe @@ -1430,7 +1430,7 @@ def activate_blocking(shop: str, silent: bool = True, mode: str = "geoip", rate_limit: Requests pro Minute (nur bei bot-mode) ban_duration: Ban-Dauer in Sekunden (nur bei bot-mode) bot_monitor_only: Nur Monitoring, kein Blocking (nur bei bot-mode) - + Returns: True wenn erfolgreich, False sonst """ @@ -1463,7 +1463,7 @@ def activate_blocking(shop: str, silent: bool = True, mode: str = "geoip", # Ermittle Owner uid, gid = get_most_common_owner(httpdocs) - + if not silent: logger.info(f"Aktiviere {region_info['icon']} {region_info['name']} für: {shop}") @@ -1499,7 +1499,7 @@ def activate_blocking(shop: str, silent: bool = True, mode: str = "geoip", os.makedirs(ratelimit_path, mode=0o777, exist_ok=True) os.chmod(ratelimit_path, 0o777) set_owner(ratelimit_path, uid, gid, recursive=True) - + if bot_monitor_only: # Monitor-Only Modus: Nur Logging, kein Blocking geoip_content = BOT_MONITOR_TEMPLATE.format( @@ -1585,11 +1585,11 @@ def activate_blocking(shop: str, silent: bool = True, mode: str = "geoip", def deactivate_blocking(shop: str, silent: bool = True) -> bool: """ Deaktiviert Blocking für einen Shop. - + Args: shop: Domain des Shops silent: Keine Konsolenausgabe - + Returns: True wenn erfolgreich, False sonst """ @@ -1642,15 +1642,15 @@ def deactivate_blocking(shop: str, silent: bool = True) -> bool: def get_shop_log_stats(shop: str) -> Dict[str, Any]: """ Sammelt Statistiken aus dem Shop-Log. - + Returns: - Dict mit log_entries, total_bans, active_bans, banned_bots, + Dict mit log_entries, total_bans, active_bans, banned_bots, req_per_min, unique_ips, unique_bots, top_bots, top_ips """ httpdocs = os.path.join(VHOSTS_DIR, shop, 'httpdocs') log_file = os.path.join(httpdocs, SHOP_LOG_FILE) ratelimit_path = os.path.join(httpdocs, RATELIMIT_DIR) - + stats = { 'log_entries': 0, 'total_bans': 0, @@ -1662,10 +1662,10 @@ def get_shop_log_stats(shop: str) -> Dict[str, Any]: 'top_bots': {}, 'top_ips': {} } - + ips = {} bots = {} - + # Log-Datei auswerten if os.path.isfile(log_file): try: @@ -1747,22 +1747,22 @@ def get_shop_log_stats(shop: str) -> Dict[str, Any]: # Statistiken berechnen stats['unique_ips'] = len(ips) stats['unique_bots'] = len(bots) - + # Top Bots (max 10) sorted_bots = sorted(bots.items(), key=lambda x: x[1], reverse=True)[:10] stats['top_bots'] = dict(sorted_bots) - + # Top IPs (max 10) sorted_ips = sorted(ips.items(), key=lambda x: x[1]['count'], reverse=True)[:10] stats['top_ips'] = {ip: data['count'] for ip, data in sorted_ips} - + # Req/min berechnen activation_time = get_shop_activation_time(shop) if activation_time and stats['log_entries'] > 0: runtime_minutes = (datetime.now() - activation_time).total_seconds() / 60 if runtime_minutes > 0: stats['req_per_min'] = round(stats['log_entries'] / runtime_minutes, 2) - + return stats @@ -1774,7 +1774,7 @@ class LogWatcher: Überwacht Shop-Log-Dateien für Live-Streaming. Nur aktiv wenn explizit angefordert. """ - + def __init__(self, callback: Callable[[str, str], None]): """ Args: @@ -1786,24 +1786,24 @@ class LogWatcher: self.running = False self._thread: Optional[threading.Thread] = None self._lock = threading.Lock() - + def start(self): """Startet den Watcher-Thread.""" if self.running: return - + self.running = True self._thread = threading.Thread(target=self._watch_loop, daemon=True) self._thread.start() logger.debug("LogWatcher gestartet") - + def stop(self): """Stoppt den Watcher-Thread.""" self.running = False if self._thread: self._thread.join(timeout=2) logger.debug("LogWatcher gestoppt") - + def start_watching(self, shop: str): """Startet Überwachung für einen Shop.""" with self._lock: @@ -1815,51 +1815,51 @@ class LogWatcher: else: self.file_positions[shop] = 0 logger.debug(f"LogWatcher: Überwache {shop}") - + def stop_watching(self, shop: str): """Stoppt Überwachung für einen Shop.""" with self._lock: self.watching.discard(shop) self.file_positions.pop(shop, None) logger.debug(f"LogWatcher: Beende Überwachung von {shop}") - + def _watch_loop(self): """Hauptschleife - prüft alle 500ms auf neue Einträge.""" while self.running: with self._lock: shops_to_watch = list(self.watching) - + for shop in shops_to_watch: try: log_file = os.path.join(VHOSTS_DIR, shop, 'httpdocs', SHOP_LOG_FILE) - + if not os.path.isfile(log_file): continue - + current_size = os.path.getsize(log_file) last_pos = self.file_positions.get(shop, 0) - + if current_size > last_pos: # Neue Daten vorhanden with open(log_file, 'r') as f: f.seek(last_pos) new_content = f.read() self.file_positions[shop] = f.tell() - + for line in new_content.strip().split('\n'): if line: try: self.callback(shop, line) except Exception as e: logger.error(f"LogWatcher Callback Fehler: {e}") - + elif current_size < last_pos: # Log wurde rotiert self.file_positions[shop] = 0 - + except Exception as e: logger.error(f"LogWatcher Fehler für {shop}: {e}") - + time.sleep(0.5) @@ -1872,16 +1872,16 @@ def rotate_shop_logs(): try: httpdocs = os.path.join(VHOSTS_DIR, shop, 'httpdocs') log_file = os.path.join(httpdocs, SHOP_LOG_FILE) - + if not os.path.isfile(log_file): continue - + size = os.path.getsize(log_file) if size <= LOG_MAX_SIZE: continue - + logger.info(f"Rotiere Log für {shop} ({size / 1024 / 1024:.1f} MB)") - + # Rotiere: log -> log.1 -> log.2 -> log.3 (delete) for i in range(LOG_BACKUP_COUNT - 1, 0, -1): src = f"{log_file}.{i}" @@ -1891,15 +1891,15 @@ def rotate_shop_logs(): os.remove(src) else: shutil.move(src, dst) - + shutil.move(log_file, f"{log_file}.1") - + # Neues Log mit korrekter Ownership uid, gid = get_most_common_owner(httpdocs) with open(log_file, 'w') as f: f.write(f"# Log rotated at {utc_now_iso()}\n") set_owner(log_file, uid, gid) - + except Exception as e: logger.error(f"Log-Rotation Fehler für {shop}: {e}") @@ -1911,7 +1911,7 @@ class JTLWAFiAgent: """ WebSocket-basierter Agent für Echtzeit-Kommunikation mit dem Dashboard. """ - + def __init__(self, dashboard_url: str = DEFAULT_DASHBOARD_URL): self.dashboard_url = dashboard_url self.hostname = socket.getfqdn() @@ -1921,19 +1921,19 @@ class JTLWAFiAgent: self.running = False self.ws = None self.reconnect_delay = RECONNECT_BASE_DELAY - + # Log Watcher für Live-Streaming self.log_watcher = LogWatcher(callback=self._on_log_entry) - + # State Tracking self.last_stats: Dict[str, Dict] = {} self.last_stats_time = 0 self.last_heartbeat_time = 0 self.last_log_rotation_time = 0 - + # Token laden falls vorhanden self._load_token() - + def _load_token(self): """Lädt gespeicherten Token aus Datei.""" if os.path.isfile(TOKEN_FILE): @@ -1944,7 +1944,7 @@ class JTLWAFiAgent: logger.info(f"Token geladen aus {TOKEN_FILE}") except Exception as e: logger.warning(f"Konnte Token nicht laden: {e}") - + def _save_token(self, token: str): """Speichert Token in Datei.""" try: @@ -1956,7 +1956,7 @@ class JTLWAFiAgent: logger.info(f"Token gespeichert in {TOKEN_FILE}") except Exception as e: logger.error(f"Konnte Token nicht speichern: {e}") - + def _get_os_info(self) -> Dict[str, str]: """Sammelt OS-Informationen.""" return { @@ -1964,11 +1964,11 @@ class JTLWAFiAgent: "release": platform.release(), "machine": platform.machine() } - + def _get_system_stats(self) -> Dict[str, Any]: """Sammelt System-Statistiken.""" stats = {} - + try: load = os.getloadavg() stats["load_1m"] = round(load[0], 2) @@ -1976,7 +1976,7 @@ class JTLWAFiAgent: stats["load_15m"] = round(load[2], 2) except: pass - + try: with open('/proc/meminfo', 'r') as f: meminfo = {} @@ -1984,80 +1984,80 @@ class JTLWAFiAgent: parts = line.split(':') if len(parts) == 2: meminfo[parts[0].strip()] = int(parts[1].strip().split()[0]) - + total = meminfo.get('MemTotal', 1) available = meminfo.get('MemAvailable', meminfo.get('MemFree', 0)) stats["memory_percent"] = round((1 - available / total) * 100, 1) except: pass - + try: with open('/proc/uptime', 'r') as f: stats["uptime_seconds"] = int(float(f.read().split()[0])) except: pass - + return stats - + def _get_shops_summary(self) -> Dict[str, int]: """Gibt Shop-Zusammenfassung zurück.""" available = get_available_shops() active = get_active_shops() return {"total": len(available), "active": len(active)} - + def _get_all_shops_data(self) -> List[Dict[str, Any]]: """Sammelt Daten aller Shops.""" shops_data = [] available = get_available_shops() active = get_active_shops() - + for shop in available: is_active = shop in active link11_info = check_link11(shop) - + shop_data = { "domain": shop, "status": "active" if is_active else "inactive", "link11": link11_info['is_link11'], "link11_ip": link11_info['ip'] } - + if is_active: shop_mode = get_shop_mode(shop) shop_geo = get_shop_geo_region(shop) rate_limit, ban_duration = get_shop_rate_limit_config(shop) activation_time = get_shop_activation_time(shop) monitor_only = get_shop_monitor_mode(shop) - + shop_data["mode"] = shop_mode shop_data["geo_region"] = shop_geo shop_data["rate_limit"] = rate_limit shop_data["ban_duration"] = ban_duration shop_data["bot_monitor_only"] = monitor_only - + if activation_time: shop_data["activated"] = activation_time.isoformat() runtime = (datetime.now() - activation_time).total_seconds() / 60 shop_data["runtime_minutes"] = round(runtime, 1) - + # Stats sammeln shop_data["stats"] = get_shop_log_stats(shop) - + shops_data.append(shop_data) - + return shops_data - + def _on_log_entry(self, shop: str, line: str): """Callback für neue Log-Einträge.""" if not self.ws or not self.approved: return - + # Event senden asyncio.run_coroutine_threadsafe( self._send_event('log.entry', {'shop': shop, 'line': line}), self._loop ) - + # Prüfe auf Ban-Events if 'BANNED: ' in line: try: @@ -2072,12 +2072,12 @@ class JTLWAFiAgent: ) except: pass - + async def _send_event(self, event_type: str, data: Dict[str, Any]): """Sendet ein Event an das Dashboard.""" if not self.ws: return - + try: message = json.dumps({ 'type': event_type, @@ -2087,7 +2087,7 @@ class JTLWAFiAgent: logger.debug(f"Gesendet: {event_type}") except Exception as e: logger.error(f"Fehler beim Senden von {event_type}: {e}") - + async def _send_connect(self): """Sendet agent.connect Event.""" await self._send_event('agent.connect', { @@ -2098,7 +2098,7 @@ class JTLWAFiAgent: 'os_info': self._get_os_info(), 'shops_summary': self._get_shops_summary() }) - + async def _send_heartbeat(self): """Sendet agent.heartbeat Event.""" await self._send_event('agent.heartbeat', { @@ -2107,7 +2107,7 @@ class JTLWAFiAgent: 'shops_summary': self._get_shops_summary() }) self.last_heartbeat_time = time.time() - + async def _send_full_update(self): """Sendet shop.full_update Event.""" await self._send_event('shop.full_update', { @@ -2116,12 +2116,12 @@ class JTLWAFiAgent: 'shops': self._get_all_shops_data(), 'system': self._get_system_stats() }) - + async def _send_stats_update(self): """Sendet Stats-Updates für aktive Shops.""" for shop in get_active_shops(): stats = get_shop_log_stats(shop) - + # Nur senden wenn sich etwas geändert hat last = self.last_stats.get(shop, {}) if stats != last: @@ -2130,18 +2130,18 @@ class JTLWAFiAgent: 'stats': stats }) self.last_stats[shop] = stats - + self.last_stats_time = time.time() - + async def _handle_message(self, message: str): """Verarbeitet eingehende Nachrichten vom Dashboard.""" try: data = json.loads(message) event_type = data.get('type') event_data = data.get('data', {}) - + logger.debug(f"Empfangen: {event_type}") - + if event_type == 'auth.approved': # Token speichern token = event_data.get('token') @@ -2151,31 +2151,31 @@ class JTLWAFiAgent: logger.info("✅ Agent wurde im Dashboard freigegeben!") # Full Update senden await self._send_full_update() - + elif event_type == 'command.activate': await self._handle_activate_command(event_data) - + elif event_type == 'command.deactivate': await self._handle_deactivate_command(event_data) - + elif event_type == 'log.subscribe': shop = event_data.get('shop') if shop: self.log_watcher.start_watching(shop) - + elif event_type == 'log.unsubscribe': shop = event_data.get('shop') if shop: self.log_watcher.stop_watching(shop) - + elif event_type == 'ping': await self._send_event('pong', {'agent_id': self.agent_id}) - + except json.JSONDecodeError: logger.error(f"Ungültiges JSON empfangen: {message[:100]}") except Exception as e: logger.error(f"Fehler bei Nachrichtenverarbeitung: {e}") - + async def _handle_activate_command(self, data: Dict[str, Any]): """Verarbeitet activate-Command.""" command_id = data.get('command_id', 'unknown') @@ -2185,7 +2185,7 @@ class JTLWAFiAgent: rate_limit = data.get('rate_limit') ban_duration = data.get('ban_duration') bot_monitor_only = data.get('bot_monitor_only', False) - + # Korrektes Logging je nach Modus if mode == 'bot': if bot_monitor_only: @@ -2194,18 +2194,18 @@ class JTLWAFiAgent: logger.info(f"Aktiviere {shop} (mode=bot, rate_limit={rate_limit}/min, ban={ban_duration}s)") else: logger.info(f"Aktiviere {shop} (mode=geoip, region={geo_region})") - + try: success = activate_blocking( - shop, - silent=True, - mode=mode, + shop, + silent=True, + mode=mode, geo_region=geo_region, - rate_limit=rate_limit, + rate_limit=rate_limit, ban_duration=ban_duration, bot_monitor_only=bot_monitor_only ) - + if success: mode_desc = 'monitor' if bot_monitor_only else mode await self._send_event('command.result', { @@ -2230,17 +2230,17 @@ class JTLWAFiAgent: 'message': str(e), 'shop': shop }) - + async def _handle_deactivate_command(self, data: Dict[str, Any]): """Verarbeitet deactivate-Command.""" command_id = data.get('command_id', 'unknown') shop = data.get('shop') - + logger.info(f"Deaktiviere {shop}") - + try: success = deactivate_blocking(shop, silent=True) - + if success: await self._send_event('command.result', { 'command_id': command_id, @@ -2264,49 +2264,49 @@ class JTLWAFiAgent: 'message': str(e), 'shop': shop }) - + async def _periodic_tasks(self): """Führt periodische Tasks aus.""" while self.running: try: now = time.time() - + # Heartbeat (alle 60 Sekunden) if now - self.last_heartbeat_time >= HEARTBEAT_INTERVAL: if self.approved: await self._send_heartbeat() - + # Stats Update (alle 10 Sekunden) if now - self.last_stats_time >= STATS_UPDATE_INTERVAL: if self.approved: await self._send_stats_update() - + # Log Rotation (alle 5 Minuten prüfen) if now - self.last_log_rotation_time >= 300: rotate_shop_logs() self.last_log_rotation_time = now - + await asyncio.sleep(1) - + except asyncio.CancelledError: break except Exception as e: logger.error(f"Fehler in periodic_tasks: {e}") await asyncio.sleep(5) - + async def connect(self): """Stellt WebSocket-Verbindung her.""" # SSL Context der Self-Signed Certificates akzeptiert ssl_context = ssl.create_default_context() ssl_context.check_hostname = False ssl_context.verify_mode = ssl.CERT_NONE - + try: # websockets importieren import websockets - + logger.info(f"Verbinde zu {self.dashboard_url}...") - + async with websockets.connect( self.dashboard_url, ssl=ssl_context, @@ -2316,15 +2316,15 @@ class JTLWAFiAgent: ) as websocket: self.ws = websocket self.reconnect_delay = RECONNECT_BASE_DELAY - + logger.info("✅ WebSocket verbunden") - + # Connect-Event senden await self._send_connect() - + # Periodic Tasks starten periodic_task = asyncio.create_task(self._periodic_tasks()) - + try: # Nachrichten empfangen async for message in websocket: @@ -2335,7 +2335,7 @@ class JTLWAFiAgent: await periodic_task except asyncio.CancelledError: pass - + except ImportError: logger.error("websockets-Modul nicht installiert! Installiere mit: pip install websockets") raise @@ -2345,13 +2345,13 @@ class JTLWAFiAgent: finally: self.ws = None self.approved = False - + async def run_async(self): """Hauptschleife mit Auto-Reconnect.""" self._loop = asyncio.get_running_loop() self.running = True self._shutdown_event = asyncio.Event() - + # Signal Handler für asyncio def signal_handler(): logger.info("Beende Agent (Signal empfangen)...") @@ -2360,14 +2360,14 @@ class JTLWAFiAgent: # WebSocket schließen falls verbunden if self.ws: asyncio.create_task(self._close_websocket()) - + # Signale registrieren for sig in (signal.SIGINT, signal.SIGTERM): self._loop.add_signal_handler(sig, signal_handler) - + # LogWatcher starten self.log_watcher.start() - + try: while self.running: try: @@ -2378,7 +2378,7 @@ class JTLWAFiAgent: if not self.running: break logger.warning(f"Verbindung getrennt: {e}") - + if self.running: logger.info(f"Reconnect in {self.reconnect_delay}s...") try: @@ -2390,10 +2390,10 @@ class JTLWAFiAgent: break # Shutdown signal received except asyncio.TimeoutError: pass # Normal timeout, reconnect - + # Exponential Backoff self.reconnect_delay = min( - self.reconnect_delay * 2, + self.reconnect_delay * 2, RECONNECT_MAX_DELAY ) finally: @@ -2402,7 +2402,7 @@ class JTLWAFiAgent: # Signal Handler entfernen for sig in (signal.SIGINT, signal.SIGTERM): self._loop.remove_signal_handler(sig) - + async def _close_websocket(self): """Schließt WebSocket-Verbindung.""" if self.ws: @@ -2410,7 +2410,7 @@ class JTLWAFiAgent: await self.ws.close() except: pass - + def run(self): """Startet den Agent.""" logger.info("=" * 60) @@ -2420,13 +2420,13 @@ class JTLWAFiAgent: logger.info(f"Dashboard: {self.dashboard_url}") logger.info(f"Token: {'vorhanden' if self.token else 'nicht vorhanden'}") logger.info("=" * 60) - + # Asyncio Loop starten try: asyncio.run(self.run_async()) except KeyboardInterrupt: pass - + logger.info("Agent beendet.") @@ -2451,7 +2451,7 @@ Environment=PYTHONUNBUFFERED=1 WantedBy=multi-user.target """ service_path = "/etc/systemd/system/jtl-wafi-agent.service" - + try: with open(service_path, 'w') as f: f.write(service) @@ -2465,78 +2465,78 @@ WantedBy=multi-user.target def check_dependencies(): """Prüft ob alle Abhängigkeiten installiert sind.""" missing = [] - + try: import websockets except ImportError: missing.append("websockets") - + if missing: print("❌ Fehlende Abhängigkeiten:") for dep in missing: print(f" - {dep}") print(f"\nInstallieren mit: pip install {' '.join(missing)}") return False - + return True def main(): """Hauptfunktion mit CLI-Argumenten.""" import argparse - + parser = argparse.ArgumentParser( description=f"JTL-WAFi Agent v{VERSION} - WebSocket Real-Time Agent" ) parser.add_argument( - "--url", + "--url", default=DEFAULT_DASHBOARD_URL, help=f"Dashboard WebSocket URL (default: {DEFAULT_DASHBOARD_URL})" ) parser.add_argument( - "--debug", + "--debug", action="store_true", help="Debug-Logging aktivieren" ) parser.add_argument( - "--install-service", + "--install-service", action="store_true", help="Systemd Service installieren" ) parser.add_argument( - "--check-deps", + "--check-deps", action="store_true", help="Abhängigkeiten prüfen" ) - + args = parser.parse_args() - + # Logging initialisieren global logger logger = setup_logging(debug=args.debug) - + if args.install_service: create_systemd_service() return - + if args.check_deps: if check_dependencies(): print("✅ Alle Abhängigkeiten sind installiert") return - + # Abhängigkeiten prüfen if not check_dependencies(): sys.exit(1) - + # Root-Check if os.geteuid() != 0: print("❌ Root-Rechte erforderlich!") sys.exit(1) - + # Agent starten agent = JTLWAFiAgent(dashboard_url=args.url) agent.run() if __name__ == "__main__": - main() \ No newline at end of file + main()