2380 lines
83 KiB
Python
2380 lines
83 KiB
Python
#!/usr/bin/env python3
|
|
"""
|
|
GeoIP Agent v2.0.0 - WebSocket Real-Time Agent
|
|
|
|
Vollständige Neuimplementierung mit:
|
|
- WebSocket-basierte Echtzeit-Kommunikation
|
|
- Integrierter Shop-Manager (kein separates Script mehr)
|
|
- Token-basierte Authentifizierung
|
|
- On-Demand Live-Log-Streaming
|
|
- Auto-Reconnect mit Exponential Backoff
|
|
|
|
v2.0.0: WebSocket Real-Time + Integrierter Shop-Manager
|
|
"""
|
|
|
|
import os
|
|
import sys
|
|
import json
|
|
import time
|
|
import socket
|
|
import hashlib
|
|
import logging
|
|
import asyncio
|
|
import ssl
|
|
import shutil
|
|
import subprocess
|
|
import re
|
|
import ipaddress
|
|
import signal
|
|
import platform
|
|
import threading
|
|
from datetime import datetime, timedelta, timezone
|
|
from pathlib import Path
|
|
from collections import Counter
|
|
from typing import Optional, Dict, Any, List, Set, Callable
|
|
from logging.handlers import RotatingFileHandler
|
|
|
|
# =============================================================================
|
|
# VERSION
|
|
# =============================================================================
|
|
VERSION = "2.0.0"
|
|
|
|
# =============================================================================
|
|
# PFADE - AGENT
|
|
# =============================================================================
|
|
CONFIG_DIR = "/etc/geoip-agent"
|
|
TOKEN_FILE = "/etc/geoip-agent/token"
|
|
LOG_FILE = "/var/log/geoip-agent.log"
|
|
|
|
# =============================================================================
|
|
# PFADE - SHOPS
|
|
# =============================================================================
|
|
VHOSTS_DIR = "/var/www/vhosts"
|
|
ACTIVE_SHOPS_FILE = "/var/lib/geoip/active_shops.json"
|
|
|
|
# =============================================================================
|
|
# SHOP-DATEIEN
|
|
# =============================================================================
|
|
BACKUP_SUFFIX = ".geoip_backup"
|
|
BLOCKING_FILE = "geoip_blocking.php"
|
|
CACHE_FILE = "geoip_ip_ranges.cache"
|
|
SHOP_LOG_FILE = "geoip_blocked.log"
|
|
RATELIMIT_DIR = "geoip_ratelimit"
|
|
|
|
# =============================================================================
|
|
# WEBSOCKET
|
|
# =============================================================================
|
|
DEFAULT_DASHBOARD_URL = "wss://shop000.jtl-hosting.de:8000/ws/agent"
|
|
HEARTBEAT_INTERVAL = 60 # Sekunden
|
|
RECONNECT_BASE_DELAY = 1 # Sekunden
|
|
RECONNECT_MAX_DELAY = 60 # Sekunden
|
|
STATS_UPDATE_INTERVAL = 10 # Sekunden
|
|
|
|
# =============================================================================
|
|
# LOG ROTATION
|
|
# =============================================================================
|
|
LOG_MAX_SIZE = 10 * 1024 * 1024 # 10 MB
|
|
LOG_BACKUP_COUNT = 3
|
|
|
|
# =============================================================================
|
|
# RATE-LIMIT DEFAULTS
|
|
# =============================================================================
|
|
DEFAULT_RATE_LIMIT = 30 # Requests pro Minute
|
|
DEFAULT_BAN_DURATION = 5 # Minuten
|
|
|
|
# =============================================================================
|
|
# CACHE VALIDATION
|
|
# =============================================================================
|
|
MIN_RANGES = {
|
|
"dach": 1000,
|
|
"eurozone": 5000
|
|
}
|
|
|
|
# =============================================================================
|
|
# LINK11
|
|
# =============================================================================
|
|
LINK11_IP = "128.65.223.106"
|
|
|
|
# =============================================================================
|
|
# DNS CACHE
|
|
# =============================================================================
|
|
DNS_CACHE = {}
|
|
|
|
# =============================================================================
|
|
# GEO REGIONS
|
|
# =============================================================================
|
|
GEO_REGIONS = {
|
|
"dach": {
|
|
"name": "DACH",
|
|
"countries": ["de", "at", "ch"],
|
|
"description": "Deutschland, Österreich, Schweiz",
|
|
"icon": "🇩🇪🇦🇹🇨🇭",
|
|
"short": "DACH"
|
|
},
|
|
"eurozone": {
|
|
"name": "Eurozone + GB",
|
|
"countries": [
|
|
"de", "at", "ch", "be", "cy", "ee", "es", "fi", "fr", "gb",
|
|
"gr", "hr", "ie", "it", "lt", "lu", "lv", "mt", "nl", "pt", "si", "sk"
|
|
],
|
|
"description": "22 Länder: DE, AT, CH, BE, CY, EE, ES, FI, FR, GB, GR, HR, IE, IT, LT, LU, LV, MT, NL, PT, SI, SK",
|
|
"icon": "🇪🇺",
|
|
"short": "EU+"
|
|
},
|
|
"none": {
|
|
"name": "Bot-Only",
|
|
"countries": [],
|
|
"description": "Nur Bot-Rate-Limiting, weltweit erreichbar",
|
|
"icon": "🤖",
|
|
"short": "BOT"
|
|
}
|
|
}
|
|
|
|
# =============================================================================
|
|
# BOT IP RANGES - Für Bots die sich mit normalem User-Agent tarnen
|
|
# =============================================================================
|
|
BOT_IP_RANGES = {
|
|
# Alibaba Cloud / Alibaba Spider - tarnt sich oft mit normalem UA
|
|
# Große zusammengefasste Blöcke basierend auf APNIC/WHOIS-Daten
|
|
'Alibaba-Bot': [
|
|
# === Alibaba Cloud Singapore (ASEPL-SG) - Hauptblöcke ===
|
|
'43.0.0.0/9', # 43.0.0.0 - 43.127.255.255 (8.3 Mio IPs)
|
|
'8.128.0.0/10', # 8.128.0.0 - 8.191.255.255 (4.2 Mio IPs)
|
|
'8.208.0.0/12', # 8.208.0.0 - 8.223.255.255 (1 Mio IPs)
|
|
|
|
# === Alibaba Cloud Singapore - Weitere Blöcke ===
|
|
'47.74.0.0/15', # 47.74.0.0 - 47.75.255.255
|
|
'47.76.0.0/16', # 47.76.0.0 - 47.76.255.255
|
|
'47.88.0.0/15', # 47.88.0.0 - 47.89.255.255
|
|
'47.90.0.0/15', # 47.90.0.0 - 47.91.255.255
|
|
'47.241.0.0/16', # 47.241.0.0 - 47.241.255.255
|
|
'47.52.0.0/15', # 47.52.0.0 - 47.53.255.255 (HK)
|
|
'47.56.0.0/15', # 47.56.0.0 - 47.57.255.255 (HK)
|
|
'149.129.0.0/16', # 149.129.0.0 - 149.129.255.255
|
|
'161.117.0.0/16', # 161.117.0.0 - 161.117.255.255
|
|
'170.33.0.0/16', # 170.33.0.0 - 170.33.255.255
|
|
|
|
# === Alibaba Cloud China (Aliyun) ===
|
|
'39.96.0.0/13', # 39.96.0.0 - 39.103.255.255
|
|
'39.104.0.0/14', # 39.104.0.0 - 39.107.255.255
|
|
'39.108.0.0/16', # 39.108.0.0 - 39.108.255.255
|
|
'101.132.0.0/15', # 101.132.0.0 - 101.133.255.255
|
|
'106.14.0.0/15', # 106.14.0.0 - 106.15.255.255
|
|
'112.124.0.0/16', # 112.124.0.0 - 112.124.255.255
|
|
'114.55.0.0/16', # 114.55.0.0 - 114.55.255.255
|
|
'115.28.0.0/15', # 115.28.0.0 - 115.29.255.255
|
|
'116.62.0.0/16', # 116.62.0.0 - 116.62.255.255
|
|
'118.31.0.0/16', # 118.31.0.0 - 118.31.255.255
|
|
'119.23.0.0/16', # 119.23.0.0 - 119.23.255.255
|
|
'120.24.0.0/14', # 120.24.0.0 - 120.27.255.255
|
|
'120.55.0.0/16', # 120.55.0.0 - 120.55.255.255
|
|
'120.76.0.0/14', # 120.76.0.0 - 120.79.255.255
|
|
'121.40.0.0/14', # 121.40.0.0 - 121.43.255.255
|
|
'121.196.0.0/14', # 121.196.0.0 - 121.199.255.255
|
|
'139.196.0.0/16', # 139.196.0.0 - 139.196.255.255
|
|
'139.224.0.0/16', # 139.224.0.0 - 139.224.255.255
|
|
'140.205.0.0/16', # 140.205.0.0 - 140.205.255.255
|
|
'182.92.0.0/16', # 182.92.0.0 - 182.92.255.255
|
|
|
|
# === Alibaba Sonstige ===
|
|
'203.107.0.0/16', # Alibaba DNS
|
|
'103.206.40.0/22', # Alibaba Cloud SG
|
|
'185.218.176.0/22', # Alibaba Cloud
|
|
],
|
|
}
|
|
|
|
# =============================================================================
|
|
# BOT DETECTION PATTERNS
|
|
# =============================================================================
|
|
BOT_PATTERNS = {
|
|
# =========================================================================
|
|
# AI/LLM SERVICES
|
|
# =========================================================================
|
|
'ChatGPT-User': r'chatgpt-user',
|
|
'ChatGPT-Operator': r'chatgpt-operator',
|
|
'ChatGPT-Agent': r'chatgpt agent',
|
|
'ChatGPT': r'chatgpt',
|
|
'GPTBot (OpenAI)': r'gptbot',
|
|
'OAI-SearchBot (OpenAI)': r'oai-searchbot',
|
|
'OpenAI': r'openai',
|
|
'ClaudeBot (Anthropic)': r'claudebot',
|
|
'Claude-User': r'claude-user',
|
|
'Claude-Web': r'claude-web',
|
|
'Claude-SearchBot': r'claude-searchbot',
|
|
'Anthropic-AI': r'anthropic-ai',
|
|
'Anthropic': r'anthropic',
|
|
'Gemini-Deep-Research': r'gemini-deep-research',
|
|
'Google-NotebookLM': r'google-notebooklm',
|
|
'NotebookLM': r'notebooklm',
|
|
'GoogleAgent-Mariner': r'googleagent-mariner',
|
|
'PerplexityBot': r'perplexitybot',
|
|
'Perplexity-User': r'perplexity-user',
|
|
'Perplexity': r'perplexity',
|
|
'Cohere-AI': r'cohere-ai',
|
|
'Cohere-Training-Crawler': r'cohere-training-data-crawler',
|
|
'Cohere': r'cohere',
|
|
'MistralAI-User': r'mistralai-user',
|
|
'MistralAI': r'mistralai',
|
|
'Mistral': r'mistral',
|
|
'DeepSeekBot': r'deepseekbot',
|
|
'DeepSeek': r'deepseek',
|
|
'Bytespider (TikTok/ByteDance)': r'bytespider',
|
|
'TikTokSpider': r'tiktokspider',
|
|
'ByteDance': r'bytedance',
|
|
'AI2Bot-DeepResearchEval': r'ai2bot-deepresearcheval',
|
|
'AI2Bot-Dolma': r'ai2bot-dolma',
|
|
'AI2Bot (Allen Institute)': r'ai2bot',
|
|
'CCBot (Common Crawl)': r'ccbot',
|
|
'Diffbot': r'diffbot',
|
|
'img2dataset': r'img2dataset',
|
|
'LAIONDownloader': r'laiondownloader',
|
|
'LAION-HuggingFace': r'laion-huggingface',
|
|
'LAION': r'laion',
|
|
'HuggingFace': r'huggingface',
|
|
'BedrockBot (AWS)': r'bedrockbot',
|
|
'DuckAssistBot': r'duckassistbot',
|
|
'PhindBot': r'phindbot',
|
|
'YouBot': r'youbot',
|
|
'iAskSpider': r'iaskspider',
|
|
'iAskBot': r'iaskbot',
|
|
'ChatGLM-Spider': r'chatglm-spider',
|
|
'Panscient': r'panscient',
|
|
'Devin (Cognition)': r'devin',
|
|
'Manus-User': r'manus-user',
|
|
'TwinAgent': r'twinagent',
|
|
'NovaAct': r'novaact',
|
|
'FirecrawlAgent': r'firecrawlagent',
|
|
'Firecrawl': r'firecrawl',
|
|
'Crawl4AI': r'crawl4ai',
|
|
'Crawlspace': r'crawlspace',
|
|
'Cloudflare-AutoRAG': r'cloudflare-autorag',
|
|
'TerraCotta': r'terracotta',
|
|
'Thinkbot': r'thinkbot',
|
|
# =========================================================================
|
|
# SUCHMASCHINEN
|
|
# =========================================================================
|
|
'Googlebot-Image': r'googlebot-image',
|
|
'Googlebot-Video': r'googlebot-video',
|
|
'Googlebot-News': r'googlebot-news',
|
|
'Googlebot-Discovery': r'googlebot-discovery',
|
|
'Googlebot': r'googlebot',
|
|
'Google-Extended': r'google-extended',
|
|
'Google-CloudVertexBot': r'google-cloudvertexbot',
|
|
'Google-Firebase': r'google-firebase',
|
|
'Google-InspectionTool': r'google-inspectiontool',
|
|
'GoogleOther-Image': r'googleother-image',
|
|
'GoogleOther-Video': r'googleother-video',
|
|
'GoogleOther': r'googleother',
|
|
'Storebot-Google': r'storebot-google',
|
|
'AdsBot-Google': r'adsbot-google',
|
|
'Bingbot (Microsoft)': r'bingbot',
|
|
'BingPreview': r'bingpreview',
|
|
'MSNBot': r'msnbot',
|
|
'Baiduspider': r'baiduspider',
|
|
'Baidu': r'baidu',
|
|
'YandexBot': r'yandexbot',
|
|
'YandexAdditionalBot': r'yandexadditionalbot',
|
|
'YandexAdditional': r'yandexadditional',
|
|
'Yandex': r'yandex',
|
|
'DuckDuckBot': r'duckduckbot',
|
|
'DuckDuckGo': r'duckduckgo',
|
|
'Applebot-Extended': r'applebot-extended',
|
|
'Applebot': r'applebot',
|
|
'Yahoo Slurp': r'slurp',
|
|
'Sogou': r'sogou',
|
|
'Sosospider': r'sosospider',
|
|
'NaverBot': r'naverbot',
|
|
'Naver': r'naver',
|
|
'SeznamBot': r'seznambot',
|
|
'MojeekBot': r'mojeekbot',
|
|
'QwantBot': r'qwantbot',
|
|
'PetalBot (Huawei)': r'petalbot',
|
|
'CocCocBot': r'coccocbot',
|
|
'Exabot': r'exabot',
|
|
'BraveBot': r'bravebot',
|
|
'Bravest': r'bravest',
|
|
'SeekportBot': r'seekportbot',
|
|
# =========================================================================
|
|
# SEO & MARKETING TOOLS
|
|
# =========================================================================
|
|
'AhrefsBot': r'ahrefsbot',
|
|
'Ahrefs': r'ahrefs',
|
|
'SemrushBot-OCOB': r'semrushbot-ocob',
|
|
'SemrushBot-SWA': r'semrushbot-swa',
|
|
'SemrushBot': r'semrushbot',
|
|
'Semrush': r'semrush',
|
|
'MJ12Bot (Majestic)': r'mj12bot',
|
|
'Majestic': r'majestic',
|
|
'DotBot (Moz)': r'dotbot',
|
|
'RogerBot (Moz)': r'rogerbot',
|
|
'Screaming Frog': r'screaming frog',
|
|
'BLEXBot': r'blexbot',
|
|
'DataForSEOBot': r'dataforseobot',
|
|
'Linkdex': r'linkdex',
|
|
'SearchmetricsBot': r'searchmetricsbot',
|
|
# =========================================================================
|
|
# SOCIAL MEDIA
|
|
# =========================================================================
|
|
'Facebook External Hit': r'facebookexternalhit',
|
|
'FacebookBot': r'facebookbot',
|
|
'Facebot': r'facebot',
|
|
'Meta-ExternalAgent': r'meta-externalagent',
|
|
'Meta-ExternalFetcher': r'meta-externalfetcher',
|
|
'Meta-WebIndexer': r'meta-webindexer',
|
|
'Facebook': r'facebook',
|
|
'Twitterbot': r'twitterbot',
|
|
'Twitter': r'twitter',
|
|
'Instagram': r'instagram',
|
|
'LinkedInBot': r'linkedinbot',
|
|
'LinkedIn': r'linkedin',
|
|
'Pinterestbot': r'pinterestbot',
|
|
'Pinterest': r'pinterest',
|
|
'WhatsApp': r'whatsapp',
|
|
'TelegramBot': r'telegrambot',
|
|
'Telegram': r'telegram',
|
|
'DiscordBot': r'discordbot',
|
|
'Discord': r'discord',
|
|
'Slackbot': r'slackbot',
|
|
'Slack': r'slack',
|
|
'Quora-Bot': r'quora-bot',
|
|
'Snapchat': r'snapchat',
|
|
'RedditBot': r'redditbot',
|
|
# =========================================================================
|
|
# E-COMMERCE & PREISVERGLEICH
|
|
# =========================================================================
|
|
'Amazonbot': r'amazonbot',
|
|
'Amazon-Kendra': r'amazon-kendra',
|
|
'AmazonBuyForMe': r'amazonbuyforme',
|
|
'AMZNKAssocBot': r'amznkassocbot',
|
|
'Alibaba-Bot': r'alibaba|alibabagroup|aliyun|alicdn|alimama|taobao|tmall|1688\.com',
|
|
'AlibabaSpider': r'alibabaspider',
|
|
'Aliyun': r'aliyun',
|
|
'GeedoShopProductFinder': r'geedoshopproductfinder',
|
|
'Geedo': r'geedo',
|
|
'ShopWiki': r'shopwiki',
|
|
'PriceGrabber': r'pricegrabber',
|
|
'Shopify': r'shopify',
|
|
'Idealo': r'idealo',
|
|
'Guenstiger.de': r'guenstiger',
|
|
'Billiger.de': r'billiger',
|
|
'Ladenzeile': r'ladenzeile',
|
|
'Kelkoo': r'kelkoo',
|
|
'PriceRunner': r'pricerunner',
|
|
# =========================================================================
|
|
# ARCHIV & RESEARCH
|
|
# =========================================================================
|
|
'Archive.org Bot': r'archive\.org_bot|archive-org-bot',
|
|
'Internet Archive': r'ia_archiver|ia-archiver',
|
|
'Wayback Machine': r'wayback',
|
|
'Heritrix': r'heritrix',
|
|
'Apache Nutch': r'nutch',
|
|
'Common Crawl': r'commoncrawl',
|
|
# =========================================================================
|
|
# MONITORING & UPTIME
|
|
# =========================================================================
|
|
'UptimeRobot': r'uptimerobot',
|
|
'Pingdom': r'pingdom',
|
|
'StatusCake': r'statuscake',
|
|
'Site24x7': r'site24x7',
|
|
'NewRelic': r'newrelic',
|
|
'Datadog': r'datadog',
|
|
'GTmetrix': r'gtmetrix',
|
|
'PageSpeed Insights': r'pagespeed',
|
|
'Chrome Lighthouse': r'chrome-lighthouse',
|
|
# =========================================================================
|
|
# DOWNLOAD & SCRAPER TOOLS
|
|
# =========================================================================
|
|
'HTTrack': r'httrack',
|
|
'Teleport Pro': r'teleportpro|teleport pro',
|
|
'Teleport': r'teleport',
|
|
'GetRight': r'getright',
|
|
'FlashGet': r'flashget',
|
|
'LeechFTP': r'leechftp',
|
|
'LeechGet': r'leechget',
|
|
'Leech': r'leech',
|
|
'Offline Explorer': r'offline explorer',
|
|
'Offline Navigator': r'offline navigator',
|
|
'Offline Tool': r'offline',
|
|
'WebCopier': r'webcopier',
|
|
'WebCopy': r'webcopy',
|
|
'WebRipper': r'webripper',
|
|
'WebReaper': r'webreaper',
|
|
'WebStripper': r'webstripper',
|
|
'WebSauger': r'websauger',
|
|
'WebZIP': r'webzip',
|
|
'WebWhacker': r'webwhacker',
|
|
'WebBandit': r'webbandit',
|
|
'SiteSucker': r'sitesucker',
|
|
'SiteSnagger': r'sitesnagger',
|
|
'BlackWidow': r'blackwidow',
|
|
'Mass Downloader': r'mass downloader',
|
|
'Download Demon': r'download demon',
|
|
'Download Ninja': r'download ninja',
|
|
'Download Master': r'download master',
|
|
'FreshDownload': r'freshdownload',
|
|
'SmartDownload': r'smartdownload',
|
|
'RealDownload': r'realdownload',
|
|
'StarDownloader': r'stardownloader',
|
|
'Net Vampire': r'net vampire',
|
|
'NetAnts': r'netants',
|
|
'NetZIP': r'netzip',
|
|
'Go!Zilla': r'go!zilla|gozilla',
|
|
'Grabber': r'grabber',
|
|
'PageGrabber': r'pagegrabber',
|
|
'EirGrabber': r'eirgrabber',
|
|
'EmailSiphon': r'emailsiphon',
|
|
'EmailCollector': r'emailcollector',
|
|
'EmailWolf': r'emailwolf',
|
|
'Email Extractor': r'email extractor',
|
|
'ExtractorPro': r'extractorpro',
|
|
'HarvestMan': r'harvestman',
|
|
'Harvest': r'harvest',
|
|
'Collector': r'collector',
|
|
'Vacuum': r'vacuum',
|
|
'WebVac': r'webvac',
|
|
'Zeus': r'zeus',
|
|
'ScrapeBox': r'scrapebox',
|
|
'Xenu Link Sleuth': r'xenu',
|
|
'Larbin': r'larbin',
|
|
'Grub': r'grub',
|
|
# =========================================================================
|
|
# HTTP LIBRARIES & FRAMEWORKS
|
|
# =========================================================================
|
|
'Python-Requests': r'python-requests',
|
|
'Python-urllib': r'python-urllib',
|
|
'Python-HTTPX': r'python-httpx',
|
|
'Python HTTP': r'python/',
|
|
'aiohttp': r'aiohttp',
|
|
'HTTPX': r'httpx/',
|
|
'cURL': r'curl/|^curl',
|
|
'Wget': r'wget/|^wget',
|
|
'Go-HTTP-Client': r'go-http-client',
|
|
'Go HTTP': r'go http|go-http',
|
|
'Java HTTP Client': r'java/|java ',
|
|
'Apache-HttpClient': r'apache-httpclient',
|
|
'Jakarta Commons': r'jakarta',
|
|
'Axios': r'axios/|axios',
|
|
'Node-Fetch': r'node-fetch',
|
|
'Got (Node.js)': r'got/',
|
|
'libwww-perl': r'libwww-perl',
|
|
'LWP (Perl)': r'lwp::|lwp/',
|
|
'WWW-Mechanize': r'www-mechanize',
|
|
'Mechanize': r'mechanize',
|
|
'Scrapy': r'scrapy/|scrapy',
|
|
'HTTP.rb': r'http\.rb',
|
|
'Typhoeus': r'typhoeus',
|
|
'OkHttp': r'okhttp/|okhttp',
|
|
'CFNetwork': r'cfnetwork',
|
|
'WinHTTP': r'winhttp',
|
|
'Indy Library': r'indy library',
|
|
'Chilkat': r'chilkat',
|
|
'httplib': r'httplib',
|
|
'ApacheBench': r'apachebench',
|
|
'Guzzle (PHP)': r'guzzle',
|
|
'Requests': r'requests/',
|
|
# =========================================================================
|
|
# SECURITY SCANNER
|
|
# =========================================================================
|
|
'Nessus': r'nessus',
|
|
'SQLMap': r'sqlmap',
|
|
'Netsparker': r'netsparker',
|
|
'Nikto': r'nikto',
|
|
'Acunetix': r'acunetix',
|
|
'Burp Suite': r'burpsuite|burp',
|
|
'OWASP ZAP': r'owasp zap',
|
|
'OpenVAS': r'openvas',
|
|
'Nmap': r'nmap',
|
|
'Masscan': r'masscan',
|
|
'WPScan': r'wpscan',
|
|
# =========================================================================
|
|
# HEADLESS BROWSERS & AUTOMATION
|
|
# =========================================================================
|
|
'PhantomJS': r'phantomjs',
|
|
'Headless Chrome': r'headlesschrome',
|
|
'Headless Browser': r'headless',
|
|
'Selenium': r'selenium',
|
|
'Puppeteer': r'puppeteer',
|
|
'Playwright': r'playwright',
|
|
'Cypress': r'cypress',
|
|
# =========================================================================
|
|
# FEED READER & RSS
|
|
# =========================================================================
|
|
'FeedFetcher': r'feedfetcher',
|
|
'FeedParser': r'feedparser',
|
|
'Feedly': r'feedly',
|
|
'Inoreader': r'inoreader',
|
|
'NewsBlur': r'newsblur',
|
|
# =========================================================================
|
|
# WEITERE BEKANNTE BOTS
|
|
# =========================================================================
|
|
'OmgiliBot': r'omgilibot',
|
|
'Omgili': r'omgili',
|
|
'Webzio-Extended': r'webzio-extended',
|
|
'Webzio': r'webzio',
|
|
'Timpibot': r'timpibot',
|
|
'PanguBot': r'pangubot',
|
|
'ImagesiftBot': r'imagesiftbot',
|
|
'Kangaroo Bot': r'kangaroo bot',
|
|
'QualifiedBot': r'qualifiedbot',
|
|
'VelenPublicWebCrawler': r'velenpublicwebcrawler',
|
|
'Linguee Bot': r'linguee bot',
|
|
'Linguee': r'linguee',
|
|
'QuillBot': r'quillbot',
|
|
'TurnitinBot': r'turnitinbot',
|
|
'Turnitin': r'turnitin',
|
|
'ZanistaBot': r'zanistabot',
|
|
'WRTNBot': r'wrtnbot',
|
|
'WARDBot': r'wardbot',
|
|
'ShapBot': r'shapbot',
|
|
'LinerBot': r'linerbot',
|
|
'LinkupBot': r'linkupbot',
|
|
'KlaviyoAIBot': r'klaviyoaibot',
|
|
'KunatoCrawler': r'kunatocrawler',
|
|
'IbouBot': r'iboubot',
|
|
'BuddyBot': r'buddybot',
|
|
'BrightBot': r'brightbot',
|
|
'Channel3Bot': r'channel3bot',
|
|
'Andibot': r'andibot',
|
|
'Anomura': r'anomura',
|
|
'Awario': r'awario',
|
|
'BigSur.ai': r'bigsur',
|
|
'Cotoyogi': r'cotoyogi',
|
|
'AddSearchBot': r'addsearchbot',
|
|
'aiHitBot': r'aihitbot',
|
|
'Atlassian-Bot': r'atlassian-bot',
|
|
'RainBot': r'rainbot',
|
|
'TinyTestBot': r'tinytestbot',
|
|
'Brandwatch': r'brandwatch',
|
|
'Meltwater': r'meltwater',
|
|
'Netvibes': r'netvibes',
|
|
'BitlyBot': r'bitlybot',
|
|
'Mail.ru Bot': r'mail\.ru',
|
|
'YaK': r'yak',
|
|
}
|
|
|
|
# Generische Patterns (Fallback für unbekannte Bots)
|
|
GENERIC_BOT_PATTERNS = [
|
|
'bot', 'crawler', 'spider', 'scraper', 'fetch', 'scan', 'check',
|
|
'monitor', 'probe', 'index', 'archive', 'capture', 'reader',
|
|
'download', 'mirror', 'ripper', 'collector', 'extractor', 'siphon',
|
|
'copier', 'sucker', 'bandit', 'stripper', 'whacker', 'reaper',
|
|
'robot', 'agent', 'seeker', 'finder', 'walker', 'roam', 'snagger',
|
|
]
|
|
|
|
# =============================================================================
|
|
# LOGGING SETUP
|
|
# =============================================================================
|
|
def setup_logging(debug: bool = False):
|
|
"""Konfiguriert Logging mit Rotation."""
|
|
log_level = logging.DEBUG if debug else logging.INFO
|
|
|
|
# Formatter
|
|
formatter = logging.Formatter(
|
|
'%(asctime)s [%(levelname)s] %(message)s',
|
|
datefmt='%Y-%m-%d %H:%M:%S'
|
|
)
|
|
|
|
# Console Handler
|
|
console_handler = logging.StreamHandler()
|
|
console_handler.setFormatter(formatter)
|
|
console_handler.setLevel(log_level)
|
|
|
|
# File Handler mit Rotation
|
|
handlers = [console_handler]
|
|
|
|
log_dir = os.path.dirname(LOG_FILE)
|
|
if log_dir and os.path.exists(log_dir):
|
|
try:
|
|
file_handler = RotatingFileHandler(
|
|
LOG_FILE,
|
|
maxBytes=LOG_MAX_SIZE,
|
|
backupCount=LOG_BACKUP_COUNT
|
|
)
|
|
file_handler.setFormatter(formatter)
|
|
file_handler.setLevel(log_level)
|
|
handlers.append(file_handler)
|
|
except PermissionError:
|
|
pass
|
|
|
|
# Logger konfigurieren
|
|
logger = logging.getLogger('geoip_agent')
|
|
logger.setLevel(log_level)
|
|
logger.handlers = handlers
|
|
|
|
return logger
|
|
|
|
# Global Logger (wird in main() initialisiert)
|
|
logger = logging.getLogger('geoip_agent')
|
|
|
|
|
|
# =============================================================================
|
|
# UTILITY FUNCTIONS
|
|
# =============================================================================
|
|
def utc_now() -> datetime:
|
|
"""Gibt aktuelle UTC Zeit zurück."""
|
|
return datetime.now(timezone.utc)
|
|
|
|
|
|
def utc_now_iso() -> str:
|
|
"""Gibt aktuelle UTC Zeit als ISO-String zurück."""
|
|
return utc_now().strftime('%Y-%m-%dT%H:%M:%SZ')
|
|
|
|
|
|
def format_duration(minutes: float) -> str:
|
|
"""Formatiert Minuten als lesbare Dauer."""
|
|
if minutes < 60:
|
|
return f"{int(minutes)}m"
|
|
hours = minutes / 60
|
|
if hours < 24:
|
|
return f"{int(hours)}h {int(minutes % 60)}m"
|
|
return f"{int(hours / 24)}d {int(hours % 24)}h"
|
|
|
|
|
|
# =============================================================================
|
|
# OWNERSHIP HELPER FUNCTIONS
|
|
# =============================================================================
|
|
def get_most_common_owner(httpdocs_path: str):
|
|
"""
|
|
Ermittelt die häufigste uid:gid-Kombination im httpdocs-Verzeichnis.
|
|
Gibt (uid, gid) zurück oder (None, None) wenn nicht ermittelbar.
|
|
"""
|
|
if not os.path.isdir(httpdocs_path):
|
|
return None, None
|
|
|
|
owner_counts = Counter()
|
|
|
|
try:
|
|
for entry in os.listdir(httpdocs_path):
|
|
entry_path = os.path.join(httpdocs_path, entry)
|
|
try:
|
|
stat_info = os.stat(entry_path)
|
|
owner_counts[(stat_info.st_uid, stat_info.st_gid)] += 1
|
|
except (OSError, IOError):
|
|
continue
|
|
except (OSError, IOError):
|
|
return None, None
|
|
|
|
if not owner_counts:
|
|
return None, None
|
|
|
|
most_common = owner_counts.most_common(1)[0][0]
|
|
return most_common
|
|
|
|
|
|
def set_owner(path: str, uid: int, gid: int, recursive: bool = False):
|
|
"""
|
|
Setzt Owner und Gruppe für eine Datei oder einen Ordner.
|
|
Optional rekursiv für Verzeichnisse.
|
|
"""
|
|
if uid is None or gid is None:
|
|
return
|
|
|
|
try:
|
|
os.chown(path, uid, gid)
|
|
|
|
if recursive and os.path.isdir(path):
|
|
for root, dirs, files in os.walk(path):
|
|
for d in dirs:
|
|
try:
|
|
os.chown(os.path.join(root, d), uid, gid)
|
|
except (OSError, IOError):
|
|
pass
|
|
for f in files:
|
|
try:
|
|
os.chown(os.path.join(root, f), uid, gid)
|
|
except (OSError, IOError):
|
|
pass
|
|
except (OSError, IOError):
|
|
pass
|
|
|
|
|
|
# =============================================================================
|
|
# BOT DETECTION FUNCTIONS
|
|
# =============================================================================
|
|
def ip_in_cidr(ip_str: str, cidr_str: str) -> bool:
|
|
"""Prüft ob eine IP in einem CIDR-Netz liegt."""
|
|
try:
|
|
ip = ipaddress.ip_address(ip_str)
|
|
network = ipaddress.ip_network(cidr_str, strict=False)
|
|
return ip in network
|
|
except ValueError:
|
|
return False
|
|
|
|
|
|
def detect_bot(user_agent: str, ip: str = None) -> str:
|
|
"""
|
|
Erkennt Bots anhand des User-Agents und/oder der IP.
|
|
IP-basierte Erkennung hat Priorität (für getarnte Bots).
|
|
Gibt den Anzeigenamen zurück.
|
|
"""
|
|
# SCHRITT 1: IP-basierte Erkennung (höchste Priorität)
|
|
if ip:
|
|
for bot_name, ip_ranges in BOT_IP_RANGES.items():
|
|
for cidr in ip_ranges:
|
|
if ip_in_cidr(ip, cidr):
|
|
return bot_name
|
|
|
|
if not user_agent or user_agent == 'Unknown':
|
|
return 'Unbekannt'
|
|
|
|
# SCHRITT 2: Spezifische User-Agent Patterns
|
|
for bot_name, pattern in BOT_PATTERNS.items():
|
|
if re.search(pattern, user_agent, re.IGNORECASE):
|
|
return bot_name
|
|
|
|
# SCHRITT 3: Generische Patterns als Fallback
|
|
ua_lower = user_agent.lower()
|
|
for pattern in GENERIC_BOT_PATTERNS:
|
|
if pattern in ua_lower:
|
|
return f'Bot ({pattern})'
|
|
|
|
return 'Unbekannt'
|
|
|
|
|
|
# =============================================================================
|
|
# LINK11 CHECK
|
|
# =============================================================================
|
|
def check_link11(domain: str) -> Dict[str, Any]:
|
|
"""Prüft ob eine Domain hinter Link11 ist."""
|
|
global DNS_CACHE
|
|
|
|
if domain in DNS_CACHE:
|
|
return DNS_CACHE[domain]
|
|
|
|
try:
|
|
ip = socket.gethostbyname(domain)
|
|
is_link11 = (ip == LINK11_IP)
|
|
DNS_CACHE[domain] = {'is_link11': is_link11, 'ip': ip}
|
|
return DNS_CACHE[domain]
|
|
except socket.gaierror:
|
|
DNS_CACHE[domain] = {'is_link11': False, 'ip': 'N/A'}
|
|
return DNS_CACHE[domain]
|
|
|
|
|
|
def get_geo_region_info(geo_region: str) -> Dict[str, Any]:
|
|
"""Gibt Region-Info zurück."""
|
|
return GEO_REGIONS.get(geo_region, GEO_REGIONS["dach"])
|
|
|
|
|
|
def is_bot_mode(mode: str) -> bool:
|
|
"""Prüft ob der Modus Bot-Mode ist."""
|
|
return mode == 'bot'
|
|
|
|
|
|
# =============================================================================
|
|
# PHP TEMPLATE GENERATORS
|
|
# =============================================================================
|
|
def generate_php_countries_array(geo_region: str) -> str:
|
|
"""Generiert PHP-Array der erlaubten Länder."""
|
|
region_info = get_geo_region_info(geo_region)
|
|
return ", ".join([f"'{c}'" for c in region_info["countries"]])
|
|
|
|
|
|
def generate_php_bot_patterns() -> str:
|
|
"""Generiert PHP-Array der Bot-Patterns."""
|
|
patterns = []
|
|
for bot_name, pattern in BOT_PATTERNS.items():
|
|
escaped_pattern = pattern.replace("'", "\\'").replace("/", "\\/")
|
|
safe_bot_name = bot_name.replace("'", "\\'")
|
|
patterns.append(f"'{safe_bot_name}' => '/{escaped_pattern}/i'")
|
|
return ",\n ".join(patterns)
|
|
|
|
|
|
def generate_php_generic_patterns() -> str:
|
|
"""Generiert PHP-Array der generischen Patterns."""
|
|
patterns = [f"'{pattern}'" for pattern in GENERIC_BOT_PATTERNS]
|
|
return ", ".join(patterns)
|
|
|
|
|
|
def generate_php_bot_ip_ranges() -> str:
|
|
"""Generiert PHP-Array für IP-basierte Bot-Erkennung."""
|
|
lines = []
|
|
for bot_name, ip_ranges in BOT_IP_RANGES.items():
|
|
safe_bot_name = bot_name.replace("'", "\\'")
|
|
ranges_str = ", ".join([f"'{r}'" for r in ip_ranges])
|
|
lines.append(f"'{safe_bot_name}' => [{ranges_str}]")
|
|
return ",\n ".join(lines)
|
|
|
|
|
|
|
|
# =============================================================================
|
|
# PHP TEMPLATES - GEOIP
|
|
# =============================================================================
|
|
GEOIP_SCRIPT_TEMPLATE = '''<?php
|
|
/**
|
|
* GeoIP Blocking Script - {region_name}
|
|
* Valid until: {expiry_date}
|
|
* FAIL-OPEN: If cache is corrupt/empty, traffic is allowed through
|
|
*/
|
|
|
|
$expiry_date = strtotime('{expiry_timestamp}');
|
|
if (time() > $expiry_date) return;
|
|
|
|
$visitor_ip = $_SERVER['REMOTE_ADDR'] ?? '';
|
|
if (empty($visitor_ip)) return;
|
|
if (filter_var($visitor_ip, FILTER_VALIDATE_IP, FILTER_FLAG_NO_PRIV_RANGE | FILTER_FLAG_NO_RES_RANGE) === false) return;
|
|
|
|
$cache_file = __DIR__ . '/{cache_file}';
|
|
$cache_duration = 86400;
|
|
$log_file = __DIR__ . '/{log_file}';
|
|
$min_ranges = {min_ranges};
|
|
$allowed_countries = [{countries_array}];
|
|
|
|
function download_allowed_ranges($countries) {{
|
|
$ranges = [];
|
|
foreach ($countries as $country) {{
|
|
$url = "https://www.ipdeny.com/ipblocks/data/aggregated/$country-aggregated.zone";
|
|
$ctx = stream_context_create(['http' => ['timeout' => 30]]);
|
|
$content = @file_get_contents($url, false, $ctx);
|
|
if ($content !== false) {{
|
|
foreach (explode("\\n", trim($content)) as $line) {{
|
|
$line = trim($line);
|
|
if (!empty($line) && strpos($line, '/') !== false) $ranges[] = $line;
|
|
}}
|
|
}}
|
|
}}
|
|
return $ranges;
|
|
}}
|
|
|
|
function ip_in_range($ip, $cidr) {{
|
|
list($subnet, $mask) = explode('/', $cidr);
|
|
$mask_long = -1 << (32 - (int)$mask);
|
|
return (ip2long($ip) & $mask_long) == (ip2long($subnet) & $mask_long);
|
|
}}
|
|
|
|
$allowed_ranges = [];
|
|
$cache_valid = false;
|
|
|
|
if (file_exists($cache_file) && (time() - filemtime($cache_file)) < $cache_duration) {{
|
|
$cached_data = @file_get_contents($cache_file);
|
|
if ($cached_data !== false) {{
|
|
$allowed_ranges = @unserialize($cached_data);
|
|
if (is_array($allowed_ranges) && count($allowed_ranges) >= $min_ranges) {{
|
|
$cache_valid = true;
|
|
}} else {{
|
|
@unlink($cache_file);
|
|
$allowed_ranges = [];
|
|
}}
|
|
}}
|
|
}}
|
|
|
|
if (!$cache_valid) {{
|
|
$allowed_ranges = download_allowed_ranges($allowed_countries);
|
|
if (is_array($allowed_ranges) && count($allowed_ranges) >= $min_ranges) {{
|
|
@file_put_contents($cache_file, serialize($allowed_ranges));
|
|
$cache_valid = true;
|
|
}} else {{
|
|
error_log("GeoIP FAIL-OPEN: Could not load valid IP ranges (got " . count($allowed_ranges) . ", need $min_ranges)");
|
|
return;
|
|
}}
|
|
}}
|
|
|
|
$is_allowed = false;
|
|
foreach ($allowed_ranges as $range) {{
|
|
if (ip_in_range($visitor_ip, $range)) {{ $is_allowed = true; break; }}
|
|
}}
|
|
|
|
if (!$is_allowed) {{
|
|
$timestamp = date('Y-m-d H:i:s');
|
|
$ua = $_SERVER['HTTP_USER_AGENT'] ?? 'Unknown';
|
|
$uri = $_SERVER['REQUEST_URI'] ?? '/';
|
|
@file_put_contents($log_file, "[$timestamp] IP: $visitor_ip | UA: $ua | URI: $uri\\n", FILE_APPEND | LOCK_EX);
|
|
header('HTTP/1.1 403 Forbidden');
|
|
exit;
|
|
}}
|
|
'''
|
|
|
|
# =============================================================================
|
|
# PHP TEMPLATES - BOT RATE-LIMITING
|
|
# =============================================================================
|
|
BOT_SCRIPT_TEMPLATE = '''<?php
|
|
/**
|
|
* Bot Rate-Limiting Script - By Bot-Type
|
|
* Valid until: {expiry_date}
|
|
* Rate-limits known bots/crawlers BY BOT-TYPE (not by IP)
|
|
* All requests from the same bot-type share ONE counter
|
|
* Includes IP-based detection for bots that disguise their User-Agent
|
|
* Rate-Limit: {rate_limit} req/min, Ban: {ban_duration_min} min
|
|
*/
|
|
|
|
$expiry_date = strtotime('{expiry_timestamp}');
|
|
if (time() > $expiry_date) return;
|
|
|
|
$log_file = __DIR__ . '/{log_file}';
|
|
$ratelimit_dir = __DIR__ . '/{ratelimit_dir}';
|
|
$bans_dir = $ratelimit_dir . '/bans';
|
|
$counts_dir = $ratelimit_dir . '/counts';
|
|
|
|
// Rate-Limit Configuration
|
|
$rate_limit = {rate_limit}; // Requests per minute for this bot-type
|
|
$ban_duration = {ban_duration}; // Ban duration in seconds
|
|
$window_size = 60; // Window size in seconds (1 minute)
|
|
$cleanup_probability = 100; // 1 in X chance to run cleanup
|
|
|
|
$visitor_ip = $_SERVER['REMOTE_ADDR'] ?? '';
|
|
$user_agent = $_SERVER['HTTP_USER_AGENT'] ?? '';
|
|
|
|
// Ensure directories exist
|
|
if (!is_dir($bans_dir)) @mkdir($bans_dir, 0777, true);
|
|
if (!is_dir($counts_dir)) @mkdir($counts_dir, 0777, true);
|
|
|
|
// === IP-in-CIDR Check Function ===
|
|
function ip_in_cidr($ip, $cidr) {{
|
|
if (strpos($cidr, '/') === false) return false;
|
|
list($subnet, $mask) = explode('/', $cidr);
|
|
$ip_long = ip2long($ip);
|
|
$subnet_long = ip2long($subnet);
|
|
if ($ip_long === false || $subnet_long === false) return false;
|
|
$mask_long = -1 << (32 - (int)$mask);
|
|
return ($ip_long & $mask_long) === ($subnet_long & $mask_long);
|
|
}}
|
|
|
|
// === Bot IP Ranges (für getarnte Bots) ===
|
|
$bot_ip_ranges = [
|
|
{bot_ip_ranges}
|
|
];
|
|
|
|
// === Bot Detection Patterns (User-Agent) ===
|
|
$bot_patterns = [
|
|
{bot_patterns}
|
|
];
|
|
|
|
$generic_patterns = [{generic_patterns}];
|
|
|
|
// === STEP 0: IP-basierte Bot-Erkennung (höchste Priorität) ===
|
|
$detected_bot = null;
|
|
|
|
if (!empty($visitor_ip)) {{
|
|
foreach ($bot_ip_ranges as $bot_name => $ip_ranges) {{
|
|
foreach ($ip_ranges as $cidr) {{
|
|
if (ip_in_cidr($visitor_ip, $cidr)) {{
|
|
$detected_bot = $bot_name;
|
|
break 2; // Aus beiden Schleifen ausbrechen
|
|
}}
|
|
}}
|
|
}}
|
|
}}
|
|
|
|
// === STEP 1: User-Agent-basierte Erkennung (falls IP nicht erkannt) ===
|
|
if ($detected_bot === null && !empty($user_agent)) {{
|
|
// Check specific patterns first
|
|
foreach ($bot_patterns as $bot_name => $pattern) {{
|
|
if (preg_match($pattern, $user_agent)) {{
|
|
$detected_bot = $bot_name;
|
|
break;
|
|
}}
|
|
}}
|
|
|
|
// Check generic patterns as fallback
|
|
if ($detected_bot === null) {{
|
|
$ua_lower = strtolower($user_agent);
|
|
foreach ($generic_patterns as $pattern) {{
|
|
if (strpos($ua_lower, $pattern) !== false) {{
|
|
$detected_bot = "Bot ($pattern)";
|
|
break;
|
|
}}
|
|
}}
|
|
}}
|
|
}}
|
|
|
|
// Not a bot - allow through without any rate limiting
|
|
if ($detected_bot === null) return;
|
|
|
|
// === Create hash based on BOT-TYPE only (not IP!) ===
|
|
$bot_hash = md5($detected_bot);
|
|
|
|
// === STEP 2: Check if this bot-type is banned ===
|
|
$ban_file = "$bans_dir/$bot_hash.ban";
|
|
if (file_exists($ban_file)) {{
|
|
$ban_content = @file_get_contents($ban_file);
|
|
$ban_parts = explode('|', $ban_content, 2);
|
|
$ban_until = (int)$ban_parts[0];
|
|
if (time() < $ban_until) {{
|
|
// Bot-type is banned - log and block
|
|
$timestamp = date('Y-m-d H:i:s');
|
|
$remaining = $ban_until - time();
|
|
@file_put_contents($log_file, "[$timestamp] BLOCKED (banned): $detected_bot | IP: $visitor_ip | Remaining: {{$remaining}}s\\n", FILE_APPEND | LOCK_EX);
|
|
header('HTTP/1.1 403 Forbidden');
|
|
header('Retry-After: ' . $remaining);
|
|
exit;
|
|
}}
|
|
// Ban expired - remove file
|
|
@unlink($ban_file);
|
|
}}
|
|
|
|
// === STEP 3: Rate-Limit Check for this bot-type ===
|
|
$count_file = "$counts_dir/$bot_hash.count";
|
|
$current_time = time();
|
|
$count = 1;
|
|
$window_start = $current_time;
|
|
|
|
if (file_exists($count_file)) {{
|
|
$fp = @fopen($count_file, 'c+');
|
|
if ($fp && flock($fp, LOCK_EX)) {{
|
|
$content = fread($fp, 100);
|
|
if (!empty($content)) {{
|
|
$parts = explode('|', $content);
|
|
if (count($parts) === 2) {{
|
|
$window_start = (int)$parts[0];
|
|
$count = (int)$parts[1];
|
|
|
|
if ($current_time - $window_start > $window_size) {{
|
|
// New window
|
|
$window_start = $current_time;
|
|
$count = 1;
|
|
}} else {{
|
|
$count++;
|
|
}}
|
|
}}
|
|
}}
|
|
|
|
ftruncate($fp, 0);
|
|
rewind($fp);
|
|
fwrite($fp, "$window_start|$count");
|
|
flock($fp, LOCK_UN);
|
|
fclose($fp);
|
|
}}
|
|
}} else {{
|
|
@file_put_contents($count_file, "$window_start|$count", LOCK_EX);
|
|
}}
|
|
|
|
// === STEP 4: Check if limit exceeded ===
|
|
if ($count > $rate_limit) {{
|
|
// Create ban for this bot-type (store timestamp|botname)
|
|
$ban_until = $current_time + $ban_duration;
|
|
@file_put_contents($ban_file, "$ban_until|$detected_bot", LOCK_EX);
|
|
|
|
// Log the ban
|
|
$timestamp = date('Y-m-d H:i:s');
|
|
$ban_minutes = $ban_duration / 60;
|
|
@file_put_contents($log_file, "[$timestamp] BANNED: $detected_bot | IP: $visitor_ip | Exceeded $rate_limit req/min | Ban: {{$ban_minutes}}m | Total requests: $count\\n", FILE_APPEND | LOCK_EX);
|
|
|
|
// Block this request
|
|
header('HTTP/1.1 403 Forbidden');
|
|
header('Retry-After: ' . $ban_duration);
|
|
exit;
|
|
}}
|
|
|
|
// === STEP 5: Under limit - log and ALLOW through ===
|
|
$timestamp = date('Y-m-d H:i:s');
|
|
$uri = $_SERVER['REQUEST_URI'] ?? '/';
|
|
@file_put_contents($log_file, "[$timestamp] BOT: $detected_bot | IP: $visitor_ip | Count: $count/$rate_limit | URI: $uri\\n", FILE_APPEND | LOCK_EX);
|
|
|
|
// === STEP 6: Probabilistic cleanup ===
|
|
if (rand(1, $cleanup_probability) === 1) {{
|
|
$now = time();
|
|
foreach (glob("$bans_dir/*.ban") as $f) {{
|
|
$ban_content = @file_get_contents($f);
|
|
$ban_parts = explode('|', $ban_content, 2);
|
|
$ban_time = (int)$ban_parts[0];
|
|
if ($now > $ban_time) @unlink($f);
|
|
}}
|
|
foreach (glob("$counts_dir/*.count") as $f) {{
|
|
if ($now - filemtime($f) > $window_size * 2) @unlink($f);
|
|
}}
|
|
}}
|
|
|
|
// Bot is under rate limit - ALLOW through (no exit, no 403)
|
|
return;
|
|
'''
|
|
|
|
|
|
# =============================================================================
|
|
# CACHE VALIDATION FUNCTIONS
|
|
# =============================================================================
|
|
def generate_and_validate_cache(httpdocs_path: str, geo_region: str, uid: int = None, gid: int = None):
|
|
"""Generiert und validiert IP-Range Cache."""
|
|
cache_file = os.path.join(httpdocs_path, CACHE_FILE)
|
|
region_info = get_geo_region_info(geo_region)
|
|
countries = region_info["countries"]
|
|
min_expected = MIN_RANGES.get(geo_region, 1000)
|
|
|
|
php_script = f'''<?php
|
|
$countries = {json.dumps(countries)};
|
|
$ranges = [];
|
|
foreach ($countries as $country) {{
|
|
$url = "https://www.ipdeny.com/ipblocks/data/aggregated/$country-aggregated.zone";
|
|
$ctx = stream_context_create(['http' => ['timeout' => 30]]);
|
|
$content = @file_get_contents($url, false, $ctx);
|
|
if ($content !== false) {{
|
|
$lines = explode("\\n", trim($content));
|
|
foreach ($lines as $line) {{
|
|
$line = trim($line);
|
|
if (!empty($line) && strpos($line, '/') !== false) {{
|
|
$ranges[] = $line;
|
|
}}
|
|
}}
|
|
}}
|
|
}}
|
|
if (count($ranges) >= {min_expected}) {{
|
|
file_put_contents("{cache_file}", serialize($ranges));
|
|
echo "OK:" . count($ranges);
|
|
}} else {{
|
|
echo "FAIL:" . count($ranges);
|
|
}}
|
|
'''
|
|
|
|
temp_php = os.path.join(httpdocs_path, '_geoip_cache_gen.php')
|
|
try:
|
|
with open(temp_php, 'w') as f:
|
|
f.write(php_script)
|
|
result = subprocess.run(['php', temp_php], capture_output=True, text=True, timeout=120)
|
|
output = result.stdout.strip()
|
|
if output.startswith('OK:'):
|
|
if os.path.isfile(cache_file):
|
|
set_owner(cache_file, uid, gid)
|
|
return True, int(output.split(':')[1]), None
|
|
elif output.startswith('FAIL:'):
|
|
return False, int(output.split(':')[1]), f"Nur {output.split(':')[1]} Ranges (min. {min_expected} erwartet)"
|
|
return False, 0, f"Unerwartete Ausgabe: {output}"
|
|
except subprocess.TimeoutExpired:
|
|
return False, 0, "Timeout beim Laden der IP-Ranges"
|
|
except Exception as e:
|
|
return False, 0, str(e)
|
|
finally:
|
|
if os.path.exists(temp_php):
|
|
os.remove(temp_php)
|
|
|
|
|
|
def validate_existing_cache(httpdocs_path: str, geo_region: str):
|
|
"""Validiert bestehenden Cache."""
|
|
cache_file = os.path.join(httpdocs_path, CACHE_FILE)
|
|
min_expected = MIN_RANGES.get(geo_region, 1000)
|
|
|
|
if not os.path.exists(cache_file):
|
|
return False, 0, "Cache-Datei existiert nicht"
|
|
|
|
php_script = f'''<?php
|
|
$data = @unserialize(@file_get_contents("{cache_file}"));
|
|
if ($data === false || !is_array($data)) {{ echo "CORRUPT:0"; }}
|
|
else {{ echo "OK:" . count($data); }}
|
|
'''
|
|
try:
|
|
result = subprocess.run(['php', '-r', php_script], capture_output=True, text=True, timeout=10)
|
|
output = result.stdout.strip()
|
|
if output.startswith('OK:'):
|
|
count = int(output.split(':')[1])
|
|
if count >= min_expected:
|
|
return True, count, None
|
|
return False, count, f"Nur {count} Ranges (min. {min_expected} erwartet)"
|
|
return False, 0, "Cache-Datei ist korrupt"
|
|
except Exception as e:
|
|
return False, 0, str(e)
|
|
|
|
|
|
# =============================================================================
|
|
# SHOP REGISTRY FUNCTIONS
|
|
# =============================================================================
|
|
def add_shop_to_active(shop: str, mode: str = "geoip", geo_region: str = "dach",
|
|
rate_limit: int = None, ban_duration: int = None):
|
|
"""Registriert einen Shop als aktiv."""
|
|
os.makedirs(os.path.dirname(ACTIVE_SHOPS_FILE), exist_ok=True)
|
|
shops = {}
|
|
if os.path.isfile(ACTIVE_SHOPS_FILE):
|
|
try:
|
|
with open(ACTIVE_SHOPS_FILE, 'r') as f:
|
|
shops = json.load(f)
|
|
except:
|
|
shops = {}
|
|
|
|
shop_data = {
|
|
"activated": datetime.now().isoformat(),
|
|
"expiry": (datetime.now() + timedelta(hours=72)).isoformat(),
|
|
"mode": mode,
|
|
"geo_region": geo_region
|
|
}
|
|
if rate_limit is not None:
|
|
shop_data["rate_limit"] = rate_limit
|
|
if ban_duration is not None:
|
|
shop_data["ban_duration"] = ban_duration
|
|
|
|
shops[shop] = shop_data
|
|
with open(ACTIVE_SHOPS_FILE, 'w') as f:
|
|
json.dump(shops, f, indent=2)
|
|
|
|
|
|
def remove_shop_from_active(shop: str):
|
|
"""Entfernt einen Shop aus der aktiven Liste."""
|
|
if not os.path.isfile(ACTIVE_SHOPS_FILE):
|
|
return
|
|
try:
|
|
with open(ACTIVE_SHOPS_FILE, 'r') as f:
|
|
shops = json.load(f)
|
|
if shop in shops:
|
|
del shops[shop]
|
|
with open(ACTIVE_SHOPS_FILE, 'w') as f:
|
|
json.dump(shops, f, indent=2)
|
|
except:
|
|
pass
|
|
|
|
|
|
def get_shop_mode(shop: str) -> str:
|
|
"""Gibt den Modus eines Shops zurück."""
|
|
if not os.path.isfile(ACTIVE_SHOPS_FILE):
|
|
return "geoip"
|
|
try:
|
|
with open(ACTIVE_SHOPS_FILE, 'r') as f:
|
|
return json.load(f).get(shop, {}).get("mode", "geoip")
|
|
except:
|
|
return "geoip"
|
|
|
|
|
|
def get_shop_geo_region(shop: str) -> str:
|
|
"""Gibt die Geo-Region eines Shops zurück."""
|
|
if not os.path.isfile(ACTIVE_SHOPS_FILE):
|
|
return "dach"
|
|
try:
|
|
with open(ACTIVE_SHOPS_FILE, 'r') as f:
|
|
return json.load(f).get(shop, {}).get("geo_region", "dach")
|
|
except:
|
|
return "dach"
|
|
|
|
|
|
def get_shop_rate_limit_config(shop: str):
|
|
"""Gibt Rate-Limit Config eines Shops zurück."""
|
|
if not os.path.isfile(ACTIVE_SHOPS_FILE):
|
|
return None, None
|
|
try:
|
|
with open(ACTIVE_SHOPS_FILE, 'r') as f:
|
|
shop_data = json.load(f).get(shop, {})
|
|
return shop_data.get("rate_limit"), shop_data.get("ban_duration")
|
|
except:
|
|
return None, None
|
|
|
|
|
|
def get_shop_activation_time(shop: str) -> Optional[datetime]:
|
|
"""Gibt die Aktivierungszeit eines Shops zurück."""
|
|
if not os.path.isfile(ACTIVE_SHOPS_FILE):
|
|
return None
|
|
try:
|
|
with open(ACTIVE_SHOPS_FILE, 'r') as f:
|
|
activated_str = json.load(f).get(shop, {}).get("activated")
|
|
return datetime.fromisoformat(activated_str) if activated_str else None
|
|
except:
|
|
return None
|
|
|
|
|
|
def get_available_shops() -> List[str]:
|
|
"""Gibt Liste aller verfügbaren Shops zurück."""
|
|
shops = []
|
|
if not os.path.exists(VHOSTS_DIR):
|
|
return shops
|
|
|
|
try:
|
|
for entry in os.listdir(VHOSTS_DIR):
|
|
shop_path = os.path.join(VHOSTS_DIR, entry)
|
|
if os.path.isdir(shop_path) and entry not in ['chroot', 'system', 'default']:
|
|
httpdocs = os.path.join(shop_path, 'httpdocs')
|
|
if os.path.isdir(httpdocs) and os.path.isfile(os.path.join(httpdocs, 'index.php')):
|
|
shops.append(entry)
|
|
except:
|
|
pass
|
|
|
|
return sorted(shops)
|
|
|
|
|
|
def get_active_shops() -> List[str]:
|
|
"""Gibt Liste aller aktiven Shops zurück."""
|
|
active = []
|
|
for shop in get_available_shops():
|
|
httpdocs = os.path.join(VHOSTS_DIR, shop, 'httpdocs')
|
|
if os.path.isfile(os.path.join(httpdocs, BLOCKING_FILE)) or \
|
|
os.path.isfile(os.path.join(httpdocs, f'index.php{BACKUP_SUFFIX}')):
|
|
active.append(shop)
|
|
return active
|
|
|
|
|
|
# =============================================================================
|
|
# ACTIVATE / DEACTIVATE BLOCKING
|
|
# =============================================================================
|
|
def activate_blocking(shop: str, silent: bool = True, mode: str = "geoip",
|
|
geo_region: str = "dach", rate_limit: int = None,
|
|
ban_duration: int = None) -> bool:
|
|
"""
|
|
Aktiviert Blocking für einen Shop.
|
|
|
|
Args:
|
|
shop: Domain des Shops
|
|
silent: Keine Konsolenausgabe
|
|
mode: "geoip" oder "bot"
|
|
geo_region: "dach", "eurozone" oder "none"
|
|
rate_limit: Requests pro Minute (nur bei bot-mode)
|
|
ban_duration: Ban-Dauer in Sekunden (nur bei bot-mode)
|
|
|
|
Returns:
|
|
True wenn erfolgreich, False sonst
|
|
"""
|
|
httpdocs = os.path.join(VHOSTS_DIR, shop, 'httpdocs')
|
|
index_php = os.path.join(httpdocs, 'index.php')
|
|
backup_php = os.path.join(httpdocs, f'index.php{BACKUP_SUFFIX}')
|
|
blocking_file = os.path.join(httpdocs, BLOCKING_FILE)
|
|
ratelimit_path = os.path.join(httpdocs, RATELIMIT_DIR)
|
|
|
|
bot_mode = is_bot_mode(mode)
|
|
|
|
if bot_mode:
|
|
region_info = get_geo_region_info("none")
|
|
geo_region = "none"
|
|
else:
|
|
region_info = get_geo_region_info(geo_region)
|
|
|
|
min_ranges = MIN_RANGES.get(geo_region, 1000)
|
|
|
|
# Prüfe ob bereits aktiv
|
|
if os.path.isfile(backup_php):
|
|
if not silent:
|
|
logger.warning(f"Blocking bereits aktiv für {shop}")
|
|
return False
|
|
|
|
if not os.path.isfile(index_php):
|
|
if not silent:
|
|
logger.error(f"index.php nicht gefunden für {shop}")
|
|
return False
|
|
|
|
# Ermittle Owner
|
|
uid, gid = get_most_common_owner(httpdocs)
|
|
|
|
if not silent:
|
|
logger.info(f"Aktiviere {region_info['icon']} {region_info['name']} für: {shop}")
|
|
|
|
try:
|
|
# Step 1: Backup und PHP-Blocking aktivieren
|
|
shutil.copy2(index_php, backup_php)
|
|
set_owner(backup_php, uid, gid)
|
|
|
|
with open(index_php, 'r', encoding='utf-8') as f:
|
|
content = f.read()
|
|
|
|
lines = content.split('\n')
|
|
insert_line = 0
|
|
for i, line in enumerate(lines):
|
|
if 'declare(strict_types' in line:
|
|
insert_line = i + 1
|
|
break
|
|
elif '<?php' in line and insert_line == 0:
|
|
insert_line = i + 1
|
|
|
|
require_statement = f"require_once __DIR__ . '/{BLOCKING_FILE}';"
|
|
if require_statement not in content:
|
|
lines.insert(insert_line, require_statement)
|
|
with open(index_php, 'w', encoding='utf-8') as f:
|
|
f.write('\n'.join(lines))
|
|
set_owner(index_php, uid, gid)
|
|
|
|
expiry = datetime.now() + timedelta(hours=72)
|
|
|
|
# Step 2: Blocking-Script erstellen
|
|
if bot_mode:
|
|
# Rate-Limit Verzeichnisse erstellen
|
|
os.makedirs(os.path.join(ratelimit_path, 'bans'), mode=0o777, exist_ok=True)
|
|
os.makedirs(os.path.join(ratelimit_path, 'counts'), mode=0o777, exist_ok=True)
|
|
os.chmod(ratelimit_path, 0o777)
|
|
os.chmod(os.path.join(ratelimit_path, 'bans'), 0o777)
|
|
os.chmod(os.path.join(ratelimit_path, 'counts'), 0o777)
|
|
set_owner(ratelimit_path, uid, gid, recursive=True)
|
|
|
|
if rate_limit is None:
|
|
rate_limit = DEFAULT_RATE_LIMIT
|
|
if ban_duration is None:
|
|
ban_duration = DEFAULT_BAN_DURATION * 60
|
|
|
|
geoip_content = BOT_SCRIPT_TEMPLATE.format(
|
|
expiry_date=expiry.strftime('%Y-%m-%d %H:%M:%S CET'),
|
|
expiry_timestamp=expiry.strftime('%Y-%m-%d %H:%M:%S'),
|
|
log_file=SHOP_LOG_FILE,
|
|
ratelimit_dir=RATELIMIT_DIR,
|
|
bot_patterns=generate_php_bot_patterns(),
|
|
generic_patterns=generate_php_generic_patterns(),
|
|
bot_ip_ranges=generate_php_bot_ip_ranges(),
|
|
rate_limit=rate_limit,
|
|
ban_duration=ban_duration,
|
|
ban_duration_min=ban_duration // 60
|
|
)
|
|
else:
|
|
countries_array = generate_php_countries_array(geo_region)
|
|
geoip_content = GEOIP_SCRIPT_TEMPLATE.format(
|
|
region_name=region_info['name'],
|
|
region_description=region_info['description'],
|
|
expiry_date=expiry.strftime('%Y-%m-%d %H:%M:%S CET'),
|
|
expiry_timestamp=expiry.strftime('%Y-%m-%d %H:%M:%S'),
|
|
cache_file=CACHE_FILE,
|
|
log_file=SHOP_LOG_FILE,
|
|
countries_array=countries_array,
|
|
min_ranges=min_ranges
|
|
)
|
|
|
|
with open(blocking_file, 'w', encoding='utf-8') as f:
|
|
f.write(geoip_content)
|
|
set_owner(blocking_file, uid, gid)
|
|
|
|
# Step 3: Cache generieren (nur bei GeoIP)
|
|
if not bot_mode:
|
|
success, range_count, error = generate_and_validate_cache(httpdocs, geo_region, uid, gid)
|
|
if not success and not silent:
|
|
logger.warning(f"Cache-Generierung: {error}")
|
|
|
|
# Step 4: Registrieren
|
|
if bot_mode:
|
|
add_shop_to_active(shop, mode, geo_region, rate_limit, ban_duration)
|
|
else:
|
|
add_shop_to_active(shop, mode, geo_region)
|
|
|
|
if not silent:
|
|
logger.info(f"✅ Blocking aktiviert für {shop}")
|
|
|
|
return True
|
|
|
|
except Exception as e:
|
|
logger.error(f"Fehler beim Aktivieren von {shop}: {e}")
|
|
# Cleanup bei Fehler
|
|
if os.path.isfile(backup_php):
|
|
shutil.move(backup_php, index_php)
|
|
if os.path.isfile(blocking_file):
|
|
os.remove(blocking_file)
|
|
return False
|
|
|
|
|
|
def deactivate_blocking(shop: str, silent: bool = True) -> bool:
|
|
"""
|
|
Deaktiviert Blocking für einen Shop.
|
|
|
|
Args:
|
|
shop: Domain des Shops
|
|
silent: Keine Konsolenausgabe
|
|
|
|
Returns:
|
|
True wenn erfolgreich, False sonst
|
|
"""
|
|
httpdocs = os.path.join(VHOSTS_DIR, shop, 'httpdocs')
|
|
index_php = os.path.join(httpdocs, 'index.php')
|
|
backup_php = os.path.join(httpdocs, f'index.php{BACKUP_SUFFIX}')
|
|
ratelimit_path = os.path.join(httpdocs, RATELIMIT_DIR)
|
|
|
|
if not silent:
|
|
logger.info(f"Deaktiviere Blocking für: {shop}")
|
|
|
|
try:
|
|
# Step 1: Original wiederherstellen
|
|
if os.path.isfile(backup_php):
|
|
shutil.move(backup_php, index_php)
|
|
else:
|
|
# Manuell require entfernen
|
|
if os.path.isfile(index_php):
|
|
with open(index_php, 'r') as f:
|
|
content = f.read()
|
|
lines = [l for l in content.split('\n') if BLOCKING_FILE not in l]
|
|
with open(index_php, 'w') as f:
|
|
f.write('\n'.join(lines))
|
|
|
|
# Step 2: Dateien löschen
|
|
for f in [os.path.join(httpdocs, x) for x in [BLOCKING_FILE, CACHE_FILE, SHOP_LOG_FILE]]:
|
|
if os.path.isfile(f):
|
|
os.remove(f)
|
|
|
|
# Step 3: Rate-Limit Verzeichnis löschen
|
|
if os.path.isdir(ratelimit_path):
|
|
shutil.rmtree(ratelimit_path)
|
|
|
|
# Step 4: Deregistrieren
|
|
remove_shop_from_active(shop)
|
|
|
|
if not silent:
|
|
logger.info(f"✅ Blocking deaktiviert für {shop}")
|
|
|
|
return True
|
|
|
|
except Exception as e:
|
|
logger.error(f"Fehler beim Deaktivieren von {shop}: {e}")
|
|
return False
|
|
|
|
|
|
# =============================================================================
|
|
# SHOP LOG STATS
|
|
# =============================================================================
|
|
def get_shop_log_stats(shop: str) -> Dict[str, Any]:
|
|
"""
|
|
Sammelt Statistiken aus dem Shop-Log.
|
|
|
|
Returns:
|
|
Dict mit log_entries, total_bans, active_bans, banned_bots,
|
|
req_per_min, unique_ips, unique_bots, top_bots, top_ips
|
|
"""
|
|
httpdocs = os.path.join(VHOSTS_DIR, shop, 'httpdocs')
|
|
log_file = os.path.join(httpdocs, SHOP_LOG_FILE)
|
|
ratelimit_path = os.path.join(httpdocs, RATELIMIT_DIR)
|
|
|
|
stats = {
|
|
'log_entries': 0,
|
|
'total_bans': 0,
|
|
'active_bans': 0,
|
|
'banned_bots': [],
|
|
'req_per_min': 0.0,
|
|
'unique_ips': 0,
|
|
'unique_bots': 0,
|
|
'top_bots': {},
|
|
'top_ips': {}
|
|
}
|
|
|
|
ips = {}
|
|
bots = {}
|
|
|
|
# Log-Datei auswerten
|
|
if os.path.isfile(log_file):
|
|
try:
|
|
with open(log_file, 'r') as f:
|
|
for line in f:
|
|
stats['log_entries'] += 1
|
|
ip, ua = None, 'Unknown'
|
|
detected_bot = None
|
|
|
|
if 'BANNED: ' in line:
|
|
stats['total_bans'] += 1
|
|
try:
|
|
detected_bot = line.split('BANNED: ')[1].split(' |')[0].strip()
|
|
except:
|
|
pass
|
|
elif 'BOT: ' in line:
|
|
try:
|
|
detected_bot = line.split('BOT: ')[1].split(' |')[0].strip()
|
|
except:
|
|
pass
|
|
elif 'BLOCKED (banned): ' in line:
|
|
try:
|
|
detected_bot = line.split('BLOCKED (banned): ')[1].split(' |')[0].strip()
|
|
except:
|
|
pass
|
|
|
|
if 'IP: ' in line:
|
|
try:
|
|
ip = line.split('IP: ')[1].split(' |')[0].strip()
|
|
except:
|
|
pass
|
|
if 'UA: ' in line:
|
|
try:
|
|
ua = line.split('UA: ')[1].split(' |')[0].strip()
|
|
except:
|
|
pass
|
|
|
|
if ip:
|
|
if ip not in ips:
|
|
ips[ip] = {'count': 0, 'ua': ua}
|
|
ips[ip]['count'] += 1
|
|
|
|
if detected_bot:
|
|
bots[detected_bot] = bots.get(detected_bot, 0) + 1
|
|
elif ua and ua != 'Unknown':
|
|
bot_name = detect_bot(ua, ip)
|
|
if bot_name != 'Unbekannt':
|
|
bots[bot_name] = bots.get(bot_name, 0) + 1
|
|
except:
|
|
pass
|
|
|
|
# Aktive Bans zählen
|
|
bans_dir = os.path.join(ratelimit_path, 'bans')
|
|
if os.path.isdir(bans_dir):
|
|
now = time.time()
|
|
try:
|
|
for ban_file in os.listdir(bans_dir):
|
|
if ban_file.endswith('.ban'):
|
|
try:
|
|
with open(os.path.join(bans_dir, ban_file), 'r') as f:
|
|
content = f.read().strip()
|
|
parts = content.split('|', 1)
|
|
ban_until = int(parts[0])
|
|
bot_name = parts[1] if len(parts) > 1 else "Unbekannt"
|
|
if now < ban_until:
|
|
stats['active_bans'] += 1
|
|
stats['banned_bots'].append(bot_name)
|
|
except:
|
|
pass
|
|
except:
|
|
pass
|
|
|
|
# Statistiken berechnen
|
|
stats['unique_ips'] = len(ips)
|
|
stats['unique_bots'] = len(bots)
|
|
|
|
# Top Bots (max 10)
|
|
sorted_bots = sorted(bots.items(), key=lambda x: x[1], reverse=True)[:10]
|
|
stats['top_bots'] = dict(sorted_bots)
|
|
|
|
# Top IPs (max 10)
|
|
sorted_ips = sorted(ips.items(), key=lambda x: x[1]['count'], reverse=True)[:10]
|
|
stats['top_ips'] = {ip: data['count'] for ip, data in sorted_ips}
|
|
|
|
# Req/min berechnen
|
|
activation_time = get_shop_activation_time(shop)
|
|
if activation_time and stats['log_entries'] > 0:
|
|
runtime_minutes = (datetime.now() - activation_time).total_seconds() / 60
|
|
if runtime_minutes > 0:
|
|
stats['req_per_min'] = round(stats['log_entries'] / runtime_minutes, 2)
|
|
|
|
return stats
|
|
|
|
|
|
# =============================================================================
|
|
# LOG WATCHER - On-Demand Live-Streaming
|
|
# =============================================================================
|
|
class LogWatcher:
|
|
"""
|
|
Überwacht Shop-Log-Dateien für Live-Streaming.
|
|
Nur aktiv wenn explizit angefordert.
|
|
"""
|
|
|
|
def __init__(self, callback: Callable[[str, str], None]):
|
|
"""
|
|
Args:
|
|
callback: Wird mit (shop, line) aufgerufen für neue Einträge
|
|
"""
|
|
self.callback = callback
|
|
self.watching: Set[str] = set()
|
|
self.file_positions: Dict[str, int] = {}
|
|
self.running = False
|
|
self._thread: Optional[threading.Thread] = None
|
|
self._lock = threading.Lock()
|
|
|
|
def start(self):
|
|
"""Startet den Watcher-Thread."""
|
|
if self.running:
|
|
return
|
|
|
|
self.running = True
|
|
self._thread = threading.Thread(target=self._watch_loop, daemon=True)
|
|
self._thread.start()
|
|
logger.debug("LogWatcher gestartet")
|
|
|
|
def stop(self):
|
|
"""Stoppt den Watcher-Thread."""
|
|
self.running = False
|
|
if self._thread:
|
|
self._thread.join(timeout=2)
|
|
logger.debug("LogWatcher gestoppt")
|
|
|
|
def start_watching(self, shop: str):
|
|
"""Startet Überwachung für einen Shop."""
|
|
with self._lock:
|
|
self.watching.add(shop)
|
|
log_file = os.path.join(VHOSTS_DIR, shop, 'httpdocs', SHOP_LOG_FILE)
|
|
if os.path.isfile(log_file):
|
|
# Start at end of file
|
|
self.file_positions[shop] = os.path.getsize(log_file)
|
|
else:
|
|
self.file_positions[shop] = 0
|
|
logger.debug(f"LogWatcher: Überwache {shop}")
|
|
|
|
def stop_watching(self, shop: str):
|
|
"""Stoppt Überwachung für einen Shop."""
|
|
with self._lock:
|
|
self.watching.discard(shop)
|
|
self.file_positions.pop(shop, None)
|
|
logger.debug(f"LogWatcher: Beende Überwachung von {shop}")
|
|
|
|
def _watch_loop(self):
|
|
"""Hauptschleife - prüft alle 500ms auf neue Einträge."""
|
|
while self.running:
|
|
with self._lock:
|
|
shops_to_watch = list(self.watching)
|
|
|
|
for shop in shops_to_watch:
|
|
try:
|
|
log_file = os.path.join(VHOSTS_DIR, shop, 'httpdocs', SHOP_LOG_FILE)
|
|
|
|
if not os.path.isfile(log_file):
|
|
continue
|
|
|
|
current_size = os.path.getsize(log_file)
|
|
last_pos = self.file_positions.get(shop, 0)
|
|
|
|
if current_size > last_pos:
|
|
# Neue Daten vorhanden
|
|
with open(log_file, 'r') as f:
|
|
f.seek(last_pos)
|
|
new_content = f.read()
|
|
self.file_positions[shop] = f.tell()
|
|
|
|
for line in new_content.strip().split('\n'):
|
|
if line:
|
|
try:
|
|
self.callback(shop, line)
|
|
except Exception as e:
|
|
logger.error(f"LogWatcher Callback Fehler: {e}")
|
|
|
|
elif current_size < last_pos:
|
|
# Log wurde rotiert
|
|
self.file_positions[shop] = 0
|
|
|
|
except Exception as e:
|
|
logger.error(f"LogWatcher Fehler für {shop}: {e}")
|
|
|
|
time.sleep(0.5)
|
|
|
|
|
|
# =============================================================================
|
|
# LOG ROTATION
|
|
# =============================================================================
|
|
def rotate_shop_logs():
|
|
"""Rotiert alle Shop-Logs die > 10MB sind."""
|
|
for shop in get_active_shops():
|
|
try:
|
|
httpdocs = os.path.join(VHOSTS_DIR, shop, 'httpdocs')
|
|
log_file = os.path.join(httpdocs, SHOP_LOG_FILE)
|
|
|
|
if not os.path.isfile(log_file):
|
|
continue
|
|
|
|
size = os.path.getsize(log_file)
|
|
if size <= LOG_MAX_SIZE:
|
|
continue
|
|
|
|
logger.info(f"Rotiere Log für {shop} ({size / 1024 / 1024:.1f} MB)")
|
|
|
|
# Rotiere: log -> log.1 -> log.2 -> log.3 (delete)
|
|
for i in range(LOG_BACKUP_COUNT - 1, 0, -1):
|
|
src = f"{log_file}.{i}"
|
|
dst = f"{log_file}.{i + 1}"
|
|
if os.path.exists(src):
|
|
if i + 1 > LOG_BACKUP_COUNT:
|
|
os.remove(src)
|
|
else:
|
|
shutil.move(src, dst)
|
|
|
|
shutil.move(log_file, f"{log_file}.1")
|
|
|
|
# Neues Log mit korrekter Ownership
|
|
uid, gid = get_most_common_owner(httpdocs)
|
|
with open(log_file, 'w') as f:
|
|
f.write(f"# Log rotated at {utc_now_iso()}\n")
|
|
set_owner(log_file, uid, gid)
|
|
|
|
except Exception as e:
|
|
logger.error(f"Log-Rotation Fehler für {shop}: {e}")
|
|
|
|
|
|
# =============================================================================
|
|
# GEOIP AGENT - WebSocket Client
|
|
# =============================================================================
|
|
class GeoIPAgent:
|
|
"""
|
|
WebSocket-basierter Agent für Echtzeit-Kommunikation mit dem Dashboard.
|
|
"""
|
|
|
|
def __init__(self, dashboard_url: str = DEFAULT_DASHBOARD_URL):
|
|
self.dashboard_url = dashboard_url
|
|
self.hostname = socket.getfqdn()
|
|
self.agent_id = hashlib.md5(self.hostname.encode()).hexdigest()[:16]
|
|
self.token: Optional[str] = None
|
|
self.approved = False
|
|
self.running = False
|
|
self.ws = None
|
|
self.reconnect_delay = RECONNECT_BASE_DELAY
|
|
|
|
# Log Watcher für Live-Streaming
|
|
self.log_watcher = LogWatcher(callback=self._on_log_entry)
|
|
|
|
# State Tracking
|
|
self.last_stats: Dict[str, Dict] = {}
|
|
self.last_stats_time = 0
|
|
self.last_heartbeat_time = 0
|
|
self.last_log_rotation_time = 0
|
|
|
|
# Token laden falls vorhanden
|
|
self._load_token()
|
|
|
|
def _load_token(self):
|
|
"""Lädt gespeicherten Token aus Datei."""
|
|
if os.path.isfile(TOKEN_FILE):
|
|
try:
|
|
with open(TOKEN_FILE, 'r') as f:
|
|
self.token = f.read().strip()
|
|
if self.token:
|
|
logger.info(f"Token geladen aus {TOKEN_FILE}")
|
|
except Exception as e:
|
|
logger.warning(f"Konnte Token nicht laden: {e}")
|
|
|
|
def _save_token(self, token: str):
|
|
"""Speichert Token in Datei."""
|
|
try:
|
|
os.makedirs(os.path.dirname(TOKEN_FILE), exist_ok=True)
|
|
with open(TOKEN_FILE, 'w') as f:
|
|
f.write(token)
|
|
os.chmod(TOKEN_FILE, 0o600)
|
|
self.token = token
|
|
logger.info(f"Token gespeichert in {TOKEN_FILE}")
|
|
except Exception as e:
|
|
logger.error(f"Konnte Token nicht speichern: {e}")
|
|
|
|
def _get_os_info(self) -> Dict[str, str]:
|
|
"""Sammelt OS-Informationen."""
|
|
return {
|
|
"system": platform.system(),
|
|
"release": platform.release(),
|
|
"machine": platform.machine()
|
|
}
|
|
|
|
def _get_system_stats(self) -> Dict[str, Any]:
|
|
"""Sammelt System-Statistiken."""
|
|
stats = {}
|
|
|
|
try:
|
|
load = os.getloadavg()
|
|
stats["load_1m"] = round(load[0], 2)
|
|
stats["load_5m"] = round(load[1], 2)
|
|
stats["load_15m"] = round(load[2], 2)
|
|
except:
|
|
pass
|
|
|
|
try:
|
|
with open('/proc/meminfo', 'r') as f:
|
|
meminfo = {}
|
|
for line in f:
|
|
parts = line.split(':')
|
|
if len(parts) == 2:
|
|
meminfo[parts[0].strip()] = int(parts[1].strip().split()[0])
|
|
|
|
total = meminfo.get('MemTotal', 1)
|
|
available = meminfo.get('MemAvailable', meminfo.get('MemFree', 0))
|
|
stats["memory_percent"] = round((1 - available / total) * 100, 1)
|
|
except:
|
|
pass
|
|
|
|
try:
|
|
with open('/proc/uptime', 'r') as f:
|
|
stats["uptime_seconds"] = int(float(f.read().split()[0]))
|
|
except:
|
|
pass
|
|
|
|
return stats
|
|
|
|
def _get_shops_summary(self) -> Dict[str, int]:
|
|
"""Gibt Shop-Zusammenfassung zurück."""
|
|
available = get_available_shops()
|
|
active = get_active_shops()
|
|
return {"total": len(available), "active": len(active)}
|
|
|
|
def _get_all_shops_data(self) -> List[Dict[str, Any]]:
|
|
"""Sammelt Daten aller Shops."""
|
|
shops_data = []
|
|
available = get_available_shops()
|
|
active = get_active_shops()
|
|
|
|
for shop in available:
|
|
is_active = shop in active
|
|
link11_info = check_link11(shop)
|
|
|
|
shop_data = {
|
|
"domain": shop,
|
|
"status": "active" if is_active else "inactive",
|
|
"link11": link11_info['is_link11'],
|
|
"link11_ip": link11_info['ip']
|
|
}
|
|
|
|
if is_active:
|
|
shop_mode = get_shop_mode(shop)
|
|
shop_geo = get_shop_geo_region(shop)
|
|
rate_limit, ban_duration = get_shop_rate_limit_config(shop)
|
|
activation_time = get_shop_activation_time(shop)
|
|
|
|
shop_data["mode"] = shop_mode
|
|
shop_data["geo_region"] = shop_geo
|
|
shop_data["rate_limit"] = rate_limit
|
|
shop_data["ban_duration"] = ban_duration
|
|
|
|
if activation_time:
|
|
shop_data["activated"] = activation_time.isoformat()
|
|
runtime = (datetime.now() - activation_time).total_seconds() / 60
|
|
shop_data["runtime_minutes"] = round(runtime, 1)
|
|
|
|
# Stats sammeln
|
|
shop_data["stats"] = get_shop_log_stats(shop)
|
|
|
|
shops_data.append(shop_data)
|
|
|
|
return shops_data
|
|
|
|
def _on_log_entry(self, shop: str, line: str):
|
|
"""Callback für neue Log-Einträge."""
|
|
if not self.ws or not self.approved:
|
|
return
|
|
|
|
# Event senden
|
|
asyncio.run_coroutine_threadsafe(
|
|
self._send_event('log.entry', {'shop': shop, 'line': line}),
|
|
self._loop
|
|
)
|
|
|
|
# Prüfe auf Ban-Events
|
|
if 'BANNED: ' in line:
|
|
try:
|
|
bot_name = line.split('BANNED: ')[1].split(' |')[0].strip()
|
|
asyncio.run_coroutine_threadsafe(
|
|
self._send_event('bot.banned', {
|
|
'shop': shop,
|
|
'bot_name': bot_name,
|
|
'line': line
|
|
}),
|
|
self._loop
|
|
)
|
|
except:
|
|
pass
|
|
|
|
async def _send_event(self, event_type: str, data: Dict[str, Any]):
|
|
"""Sendet ein Event an das Dashboard."""
|
|
if not self.ws:
|
|
return
|
|
|
|
try:
|
|
message = json.dumps({
|
|
'type': event_type,
|
|
'data': data
|
|
})
|
|
await self.ws.send(message)
|
|
logger.debug(f"Gesendet: {event_type}")
|
|
except Exception as e:
|
|
logger.error(f"Fehler beim Senden von {event_type}: {e}")
|
|
|
|
async def _send_connect(self):
|
|
"""Sendet agent.connect Event."""
|
|
await self._send_event('agent.connect', {
|
|
'hostname': self.hostname,
|
|
'agent_id': self.agent_id,
|
|
'token': self.token,
|
|
'version': VERSION,
|
|
'os_info': self._get_os_info(),
|
|
'shops_summary': self._get_shops_summary()
|
|
})
|
|
|
|
async def _send_heartbeat(self):
|
|
"""Sendet agent.heartbeat Event."""
|
|
await self._send_event('agent.heartbeat', {
|
|
'agent_id': self.agent_id,
|
|
'system': self._get_system_stats(),
|
|
'shops_summary': self._get_shops_summary()
|
|
})
|
|
self.last_heartbeat_time = time.time()
|
|
|
|
async def _send_full_update(self):
|
|
"""Sendet shop.full_update Event."""
|
|
await self._send_event('shop.full_update', {
|
|
'agent_id': self.agent_id,
|
|
'hostname': self.hostname,
|
|
'shops': self._get_all_shops_data(),
|
|
'system': self._get_system_stats()
|
|
})
|
|
|
|
async def _send_stats_update(self):
|
|
"""Sendet Stats-Updates für aktive Shops."""
|
|
for shop in get_active_shops():
|
|
stats = get_shop_log_stats(shop)
|
|
|
|
# Nur senden wenn sich etwas geändert hat
|
|
last = self.last_stats.get(shop, {})
|
|
if stats != last:
|
|
await self._send_event('shop.stats', {
|
|
'domain': shop,
|
|
'stats': stats
|
|
})
|
|
self.last_stats[shop] = stats
|
|
|
|
self.last_stats_time = time.time()
|
|
|
|
async def _handle_message(self, message: str):
|
|
"""Verarbeitet eingehende Nachrichten vom Dashboard."""
|
|
try:
|
|
data = json.loads(message)
|
|
event_type = data.get('type')
|
|
event_data = data.get('data', {})
|
|
|
|
logger.debug(f"Empfangen: {event_type}")
|
|
|
|
if event_type == 'auth.approved':
|
|
# Token speichern
|
|
token = event_data.get('token')
|
|
if token:
|
|
self._save_token(token)
|
|
self.approved = True
|
|
logger.info("✅ Agent wurde im Dashboard freigegeben!")
|
|
# Full Update senden
|
|
await self._send_full_update()
|
|
|
|
elif event_type == 'command.activate':
|
|
await self._handle_activate_command(event_data)
|
|
|
|
elif event_type == 'command.deactivate':
|
|
await self._handle_deactivate_command(event_data)
|
|
|
|
elif event_type == 'log.subscribe':
|
|
shop = event_data.get('shop')
|
|
if shop:
|
|
self.log_watcher.start_watching(shop)
|
|
|
|
elif event_type == 'log.unsubscribe':
|
|
shop = event_data.get('shop')
|
|
if shop:
|
|
self.log_watcher.stop_watching(shop)
|
|
|
|
elif event_type == 'ping':
|
|
await self._send_event('pong', {'agent_id': self.agent_id})
|
|
|
|
except json.JSONDecodeError:
|
|
logger.error(f"Ungültiges JSON empfangen: {message[:100]}")
|
|
except Exception as e:
|
|
logger.error(f"Fehler bei Nachrichtenverarbeitung: {e}")
|
|
|
|
async def _handle_activate_command(self, data: Dict[str, Any]):
|
|
"""Verarbeitet activate-Command."""
|
|
command_id = data.get('command_id', 'unknown')
|
|
shop = data.get('shop')
|
|
mode = data.get('mode', 'geoip')
|
|
geo_region = data.get('geo_region', 'dach')
|
|
rate_limit = data.get('rate_limit')
|
|
ban_duration = data.get('ban_duration')
|
|
|
|
# Korrektes Logging je nach Modus
|
|
if mode == 'bot':
|
|
logger.info(f"Aktiviere {shop} (mode=bot, rate_limit={rate_limit}/min, ban={ban_duration}s)")
|
|
else:
|
|
logger.info(f"Aktiviere {shop} (mode=geoip, region={geo_region})")
|
|
|
|
try:
|
|
success = activate_blocking(
|
|
shop,
|
|
silent=True,
|
|
mode=mode,
|
|
geo_region=geo_region,
|
|
rate_limit=rate_limit,
|
|
ban_duration=ban_duration
|
|
)
|
|
|
|
if success:
|
|
await self._send_event('command.result', {
|
|
'command_id': command_id,
|
|
'status': 'success',
|
|
'message': f'Shop {shop} aktiviert ({mode})',
|
|
'shop': shop
|
|
})
|
|
# Full Update senden
|
|
await self._send_full_update()
|
|
else:
|
|
await self._send_event('command.result', {
|
|
'command_id': command_id,
|
|
'status': 'error',
|
|
'message': f'Aktivierung fehlgeschlagen',
|
|
'shop': shop
|
|
})
|
|
except Exception as e:
|
|
await self._send_event('command.result', {
|
|
'command_id': command_id,
|
|
'status': 'error',
|
|
'message': str(e),
|
|
'shop': shop
|
|
})
|
|
|
|
async def _handle_deactivate_command(self, data: Dict[str, Any]):
|
|
"""Verarbeitet deactivate-Command."""
|
|
command_id = data.get('command_id', 'unknown')
|
|
shop = data.get('shop')
|
|
|
|
logger.info(f"Deaktiviere {shop}")
|
|
|
|
try:
|
|
success = deactivate_blocking(shop, silent=True)
|
|
|
|
if success:
|
|
await self._send_event('command.result', {
|
|
'command_id': command_id,
|
|
'status': 'success',
|
|
'message': f'Shop {shop} deaktiviert',
|
|
'shop': shop
|
|
})
|
|
# Full Update senden
|
|
await self._send_full_update()
|
|
else:
|
|
await self._send_event('command.result', {
|
|
'command_id': command_id,
|
|
'status': 'error',
|
|
'message': f'Deaktivierung fehlgeschlagen',
|
|
'shop': shop
|
|
})
|
|
except Exception as e:
|
|
await self._send_event('command.result', {
|
|
'command_id': command_id,
|
|
'status': 'error',
|
|
'message': str(e),
|
|
'shop': shop
|
|
})
|
|
|
|
async def _periodic_tasks(self):
|
|
"""Führt periodische Tasks aus."""
|
|
while self.running:
|
|
try:
|
|
now = time.time()
|
|
|
|
# Heartbeat (alle 60 Sekunden)
|
|
if now - self.last_heartbeat_time >= HEARTBEAT_INTERVAL:
|
|
if self.approved:
|
|
await self._send_heartbeat()
|
|
|
|
# Stats Update (alle 10 Sekunden)
|
|
if now - self.last_stats_time >= STATS_UPDATE_INTERVAL:
|
|
if self.approved:
|
|
await self._send_stats_update()
|
|
|
|
# Log Rotation (alle 5 Minuten prüfen)
|
|
if now - self.last_log_rotation_time >= 300:
|
|
rotate_shop_logs()
|
|
self.last_log_rotation_time = now
|
|
|
|
await asyncio.sleep(1)
|
|
|
|
except asyncio.CancelledError:
|
|
break
|
|
except Exception as e:
|
|
logger.error(f"Fehler in periodic_tasks: {e}")
|
|
await asyncio.sleep(5)
|
|
|
|
async def connect(self):
|
|
"""Stellt WebSocket-Verbindung her."""
|
|
# SSL Context der Self-Signed Certificates akzeptiert
|
|
ssl_context = ssl.create_default_context()
|
|
ssl_context.check_hostname = False
|
|
ssl_context.verify_mode = ssl.CERT_NONE
|
|
|
|
try:
|
|
# websockets importieren
|
|
import websockets
|
|
|
|
logger.info(f"Verbinde zu {self.dashboard_url}...")
|
|
|
|
async with websockets.connect(
|
|
self.dashboard_url,
|
|
ssl=ssl_context,
|
|
ping_interval=30,
|
|
ping_timeout=10,
|
|
close_timeout=5
|
|
) as websocket:
|
|
self.ws = websocket
|
|
self.reconnect_delay = RECONNECT_BASE_DELAY
|
|
|
|
logger.info("✅ WebSocket verbunden")
|
|
|
|
# Connect-Event senden
|
|
await self._send_connect()
|
|
|
|
# Periodic Tasks starten
|
|
periodic_task = asyncio.create_task(self._periodic_tasks())
|
|
|
|
try:
|
|
# Nachrichten empfangen
|
|
async for message in websocket:
|
|
await self._handle_message(message)
|
|
finally:
|
|
periodic_task.cancel()
|
|
try:
|
|
await periodic_task
|
|
except asyncio.CancelledError:
|
|
pass
|
|
|
|
except ImportError:
|
|
logger.error("websockets-Modul nicht installiert! Installiere mit: pip install websockets")
|
|
raise
|
|
except Exception as e:
|
|
logger.error(f"WebSocket Fehler: {e}")
|
|
raise
|
|
finally:
|
|
self.ws = None
|
|
self.approved = False
|
|
|
|
async def run_async(self):
|
|
"""Hauptschleife mit Auto-Reconnect."""
|
|
self._loop = asyncio.get_running_loop()
|
|
self.running = True
|
|
self._shutdown_event = asyncio.Event()
|
|
|
|
# Signal Handler für asyncio
|
|
def signal_handler():
|
|
logger.info("Beende Agent (Signal empfangen)...")
|
|
self.running = False
|
|
self._shutdown_event.set()
|
|
# WebSocket schließen falls verbunden
|
|
if self.ws:
|
|
asyncio.create_task(self._close_websocket())
|
|
|
|
# Signale registrieren
|
|
for sig in (signal.SIGINT, signal.SIGTERM):
|
|
self._loop.add_signal_handler(sig, signal_handler)
|
|
|
|
# LogWatcher starten
|
|
self.log_watcher.start()
|
|
|
|
try:
|
|
while self.running:
|
|
try:
|
|
await self.connect()
|
|
except asyncio.CancelledError:
|
|
break
|
|
except Exception as e:
|
|
if not self.running:
|
|
break
|
|
logger.warning(f"Verbindung getrennt: {e}")
|
|
|
|
if self.running:
|
|
logger.info(f"Reconnect in {self.reconnect_delay}s...")
|
|
try:
|
|
# Warte mit Timeout, damit Signal schnell reagiert
|
|
await asyncio.wait_for(
|
|
self._shutdown_event.wait(),
|
|
timeout=self.reconnect_delay
|
|
)
|
|
break # Shutdown signal received
|
|
except asyncio.TimeoutError:
|
|
pass # Normal timeout, reconnect
|
|
|
|
# Exponential Backoff
|
|
self.reconnect_delay = min(
|
|
self.reconnect_delay * 2,
|
|
RECONNECT_MAX_DELAY
|
|
)
|
|
finally:
|
|
# LogWatcher stoppen
|
|
self.log_watcher.stop()
|
|
# Signal Handler entfernen
|
|
for sig in (signal.SIGINT, signal.SIGTERM):
|
|
self._loop.remove_signal_handler(sig)
|
|
|
|
async def _close_websocket(self):
|
|
"""Schließt WebSocket-Verbindung."""
|
|
if self.ws:
|
|
try:
|
|
await self.ws.close()
|
|
except:
|
|
pass
|
|
|
|
def run(self):
|
|
"""Startet den Agent."""
|
|
logger.info("=" * 60)
|
|
logger.info(f"GeoIP Agent v{VERSION} (WebSocket Real-Time)")
|
|
logger.info(f"Hostname: {self.hostname}")
|
|
logger.info(f"Agent-ID: {self.agent_id}")
|
|
logger.info(f"Dashboard: {self.dashboard_url}")
|
|
logger.info(f"Token: {'vorhanden' if self.token else 'nicht vorhanden'}")
|
|
logger.info("=" * 60)
|
|
|
|
# Asyncio Loop starten
|
|
try:
|
|
asyncio.run(self.run_async())
|
|
except KeyboardInterrupt:
|
|
pass
|
|
|
|
logger.info("Agent beendet.")
|
|
|
|
|
|
# =============================================================================
|
|
# CLI INTERFACE
|
|
# =============================================================================
|
|
def create_systemd_service():
|
|
"""Erstellt systemd Service-Datei."""
|
|
service = """[Unit]
|
|
Description=GeoIP Agent v2.0 (WebSocket)
|
|
After=network.target
|
|
|
|
[Service]
|
|
Type=simple
|
|
ExecStart=/usr/bin/python3 /opt/geoip-agent/geoip_agent.py
|
|
Restart=always
|
|
RestartSec=10
|
|
User=root
|
|
Environment=PYTHONUNBUFFERED=1
|
|
|
|
[Install]
|
|
WantedBy=multi-user.target
|
|
"""
|
|
service_path = "/etc/systemd/system/geoip-agent.service"
|
|
|
|
try:
|
|
with open(service_path, 'w') as f:
|
|
f.write(service)
|
|
print(f"✅ Service erstellt: {service_path}")
|
|
print(" Aktivieren mit: systemctl daemon-reload && systemctl enable --now geoip-agent")
|
|
except PermissionError:
|
|
print("❌ Root-Rechte erforderlich!")
|
|
sys.exit(1)
|
|
|
|
|
|
def check_dependencies():
|
|
"""Prüft ob alle Abhängigkeiten installiert sind."""
|
|
missing = []
|
|
|
|
try:
|
|
import websockets
|
|
except ImportError:
|
|
missing.append("websockets")
|
|
|
|
if missing:
|
|
print("❌ Fehlende Abhängigkeiten:")
|
|
for dep in missing:
|
|
print(f" - {dep}")
|
|
print(f"\nInstallieren mit: pip install {' '.join(missing)}")
|
|
return False
|
|
|
|
return True
|
|
|
|
|
|
def main():
|
|
"""Hauptfunktion mit CLI-Argumenten."""
|
|
import argparse
|
|
|
|
parser = argparse.ArgumentParser(
|
|
description=f"GeoIP Agent v{VERSION} - WebSocket Real-Time Agent"
|
|
)
|
|
parser.add_argument(
|
|
"--url",
|
|
default=DEFAULT_DASHBOARD_URL,
|
|
help=f"Dashboard WebSocket URL (default: {DEFAULT_DASHBOARD_URL})"
|
|
)
|
|
parser.add_argument(
|
|
"--debug",
|
|
action="store_true",
|
|
help="Debug-Logging aktivieren"
|
|
)
|
|
parser.add_argument(
|
|
"--install-service",
|
|
action="store_true",
|
|
help="Systemd Service installieren"
|
|
)
|
|
parser.add_argument(
|
|
"--check-deps",
|
|
action="store_true",
|
|
help="Abhängigkeiten prüfen"
|
|
)
|
|
|
|
args = parser.parse_args()
|
|
|
|
# Logging initialisieren
|
|
global logger
|
|
logger = setup_logging(debug=args.debug)
|
|
|
|
if args.install_service:
|
|
create_systemd_service()
|
|
return
|
|
|
|
if args.check_deps:
|
|
if check_dependencies():
|
|
print("✅ Alle Abhängigkeiten sind installiert")
|
|
return
|
|
|
|
# Abhängigkeiten prüfen
|
|
if not check_dependencies():
|
|
sys.exit(1)
|
|
|
|
# Root-Check
|
|
if os.geteuid() != 0:
|
|
print("❌ Root-Rechte erforderlich!")
|
|
sys.exit(1)
|
|
|
|
# Agent starten
|
|
agent = GeoIPAgent(dashboard_url=args.url)
|
|
agent.run()
|
|
|
|
|
|
if __name__ == "__main__":
|
|
main() |