Sindbad~EG File Manager
| Current Path : /opt/dedrads/ |
|
|
| Current File : //opt/dedrads/sparta.py |
#!/usr/lib/rads/venv/bin/python3
# pylint: disable=line-too-long
"""
SPARTA: System Performance Audit & Reporting Tool for Analysts
A command-line auditor that produces a full Markdown report of system health, resources, services, and a site’s context within the server.
Built by Dakota P.
"""
import argparse
import datetime as dt
import glob
import ipaddress
import json
import logging
import os
import platform
import pwd
import re
import shutil
import socket
import ssl
import subprocess
import sys
import textwrap
import time
from collections import Counter
from concurrent.futures import ThreadPoolExecutor
from dataclasses import dataclass
from pathlib import Path
from typing import List, Optional, Set
from urllib import error as urlerror
from urllib import request
import pymysql
import yaml
from cpapis import whmapi1
from rads import color
EXECUTOR = ThreadPoolExecutor(max_workers=2)
USE_COLORS = sys.stdout.isatty()
# --- Logging Creation and Configuration
LOG_FILE = "/var/log/SPARTA.log"
logger = logging.getLogger("SPARTA")
class NewlineStreamHandler(logging.StreamHandler):
# Custom stream handler, inserts newline before console logs to not overwrite progress bars
def emit(self, record):
try:
self.stream.write("\n")
self.stream.flush()
except Exception:
pass
super().emit(record)
def setup_logging() -> None:
# Creates logging handlers
try:
with open(LOG_FILE, "a", encoding="utf-8"):
os.utime(LOG_FILE, None)
except Exception as exc:
raise SystemExit(
f"[FATAL] Failed to prepare log file {LOG_FILE}: {exc}"
) from exc
logger.setLevel(logging.DEBUG)
if logger.handlers:
return
file_handler = logging.FileHandler(LOG_FILE)
file_handler.setLevel(logging.DEBUG)
file_handler.setFormatter(
logging.Formatter("%(asctime)s [%(levelname)s] %(message)s")
)
console_handler = NewlineStreamHandler()
console_handler.setLevel(logging.WARNING)
console_handler.setFormatter(
logging.Formatter("%(levelname)s: %(message)s")
)
logger.addHandler(file_handler)
logger.addHandler(console_handler)
# --- Progress Bar and Progress Section Handling
class ProgressBar:
"""
Simple TTY-friendly progress bar with ANSI coloring.
Renders like:
Building: <section> 73%
|██████████████████████████████░░░░░░░░░░░░|
Checking bounces/defers
"""
def __init__(self, total, *, width=50, prefix='Progress:', suffix='Complete', decimals=1):
self.total = max(1, int(total))
self.width = int(width)
self.prefix = str(prefix)
self.suffix = str(suffix)
self.decimals = int(decimals)
self.iteration = 0
self._is_tty = sys.stdout.isatty()
self._ansi_re = re.compile(r'\x1b\[[0-9;]*m')
self._color_bold = "\x1b[1m"
self._color_blue = "\x1b[34m"
self._color_reset = "\x1b[0m"
# Cached bar width and count of lines we last drew
self._bar_width = None
self._lines_drawn = 0
def _visible_len(self, value: str) -> int:
return len(self._ansi_re.sub('', value))
def _ensure_bar_width(self):
if not self._is_tty:
self._bar_width = self.width
return
if self._bar_width is not None:
return
try:
term_cols = shutil.get_terminal_size().columns
except Exception:
term_cols = 80
sample_percent = f"{100:.{self.decimals}f}"
sample_out = (
f"{self.prefix} "
f"{self._color_bold}{sample_percent}%{self._color_reset}"
)
reserved = self._visible_len(sample_out)
self._bar_width = max(10, min(self.width, max(20, term_cols) - reserved))
def update(self, value=None):
if value is None:
self.iteration = min(self.total, self.iteration + 1)
else:
try:
if isinstance(value, bool):
self.iteration = min(self.total, self.iteration + (1 if value else 0))
else:
numeric_value = int(value)
self.iteration = max(0, min(self.total, numeric_value))
except (TypeError, ValueError):
self.iteration = min(self.total, self.iteration + 1)
if self._is_tty:
self._ensure_bar_width()
bar_width = self._bar_width
else:
bar_width = self.width
percent_value = 100 * (self.iteration / float(self.total))
percent_str = f"{percent_value:.{self.decimals}f}"
filled_length = int(bar_width * self.iteration // self.total)
progress_bar = '█' * filled_length + '░' * (bar_width - filled_length)
if self._is_tty:
line1 = f"{self.prefix} {self._color_bold}{percent_str}%{self._color_reset}"
line2 = f"{self._color_blue}|{progress_bar}|{self._color_reset}"
else:
line1 = f"{self.prefix} {percent_str}%"
line2 = f"|{progress_bar}|"
line3 = self.suffix or ""
if not self._is_tty:
sys.stdout.write(line1 + "\n")
sys.stdout.write(line2 + "\n")
sys.stdout.write(line3 + "\n")
sys.stdout.flush()
return
try:
if self._lines_drawn:
# Move cursor up to the first line of the previous block
sys.stdout.write(f"\x1b[{self._lines_drawn}F")
# Clear each line, then move back up again to redraw
for _ in range(self._lines_drawn):
sys.stdout.write("\x1b[2K\n")
sys.stdout.write(f"\x1b[{self._lines_drawn}F")
sys.stdout.write(line1 + "\n")
sys.stdout.write(line2 + "\n")
sys.stdout.write(line3 + "\n")
sys.stdout.flush()
self._lines_drawn = 3
except Exception:
# Fallback: just print new lines without cursor tricks
sys.stdout.write(line1 + "\n")
sys.stdout.write(line2 + "\n")
sys.stdout.write(line3 + "\n")
sys.stdout.flush()
def finish(self):
if self._is_tty:
sys.stdout.write("\n")
sys.stdout.flush()
else:
self.update(self.total)
class SectionProgress:
"""
Maps one section to 100 virtual ticks on the main ProgressBar.
Call tick('label') at each sub-step to advance smoothly and show status.
"""
def __init__(
self,
progress,
section_position: tuple,
steps: int,
section_name: str = ""
):
section_index, sections_total = section_position
self.progress = progress
self.section_name = section_name
self._sections_total = sections_total
self.steps = max(1, int(steps))
self.i = 0
self.base = (max(1, int(section_index)) - 1) * 100
self.delta = 100 / self.steps
self.max_pos = self.base + 100
self._mini_width = 10
def tick(self, label: str = ""):
self.i = min(self.steps, self.i + 1)
if not self.progress:
return
pos = int(self.base + min(100, self.i * self.delta))
filled = int(self._mini_width * self.i / self.steps)
subbar = "■" * filled + "□" * (self._mini_width - filled)
try:
self.progress.prefix = f"Building: {self.section_name}"
suffix_label = label or ""
self.progress.suffix = f"{suffix_label} {subbar}".strip()
except Exception:
pass
self.progress.update(pos)
# --- Report Class (includes formatting/colors)
class SpartaReport:
def __init__(self):
self.sections: List[str] = [
"\n\n\n" + self.format_heading("# System Performance Audit Report:")
]
self.progress = None
self.section_index = None
self.sections_total = None
self.section_name = None
# --- Identification
self.server_id = None # ex: vps1234
self.server_type = None # ex: vps, dedi, shared
self.panel_type = None # ex: "cPanel", "Control Web Panel", "UltraStack ONE", "Baremetal"
self.domains = [] # ex: ["domain.com", "example.com", "another.com"]
self.managed_domain = None # ex: "domain.com"
# --- System
self.os_version = None # ex: "CentOS 7", "AlmaLinux 8.10"
self.main_ip = None # ex: "123.0.0.1"
self.mail_ip = None # ex: "123.0.0.2"
self.all_ips = [] # ex: ["123.0.0.1", "123.0.0.2", "123.0.0.3"]
self.managed_domain_ip = None # ex: "123.0.0.1"
self.logical_cores = None # ex: 8
self.mem_total_gb = None # ex: 16.0 (GB)
self.mem_avail_gb = None # ex: 2.5 (GB)
self.mem_alloc_gb = None # ex: 13.5 (GB used)
self.mem_swap_gb = None # ex: 2.0 (GB swap total)
self.mem_buffers_gb = None # buff/cache (Buffers + Cached + SReclaimable - Shmem) in GB
self.deadweight_future = None # background Future for deadweight scan
self._async_tasks = [] # list of (marker, future, render_fn)
# --- Account & Website
self.domain_count = None # ex: 233 (useful for PHP-FPM tuning)
self.domain_owner = None # ex: "userna5"
self.user_list = [] # ex: ["userna5", "userna6", ...]
self.user_count = None # ex: 4
self.domain_registrar = None # ex: "Tucows"
self.is_registered = None # ex: True / False
self.domain_reg_expiry = None # ex: "2025-09-14"
self.domain_reg_status = None # ex: "active", "expired", "pending"
self.whois_nameservers = [] # ex: ["ns1.inmotionhosting.com", "ns2.inmotionhosting.com"]
self.dig_nameservers = [] # ex: ["ns1.inmotionhosting.com", "ns2.inmotionhosting.com"]
self.domain_resolves = None # ex: True if A record matches expected IP
self.port_80_service = None # ex: "nginx", "httpd", "lshttpd"
self.vhost_path = None # ex: "/usr/local/apache/conf.d/vhosts/domain.com.conf"
self.web_stack = [] # ex: ["nginx", "varnish", "apache"]
self.docroot = None # ex: "/home/userna5/public_html"
self.cms_type = None # ex: "WordPress", "Joomla", "Drupal", "Custom"
self.http_status = None # ex: 200
self.has_gzip = None # ex: True
self.has_cache_hit = None # ex: True if CDN or app cache is working
# --- Traffic
self.has_cloudflare = None # ex: True
self.cdn_provider = None # ex: "Cloudflare", "BunnyCDN", "Generic CDN"
self.cdn_cache_status = None # ex: "HIT", "MISS", "BYPASS"
self.cdn_cache_hit = None # ex: True if recent response was cached
self.has_ultrastack = None # ex: True if UltraStack is detected on this server
# --- PHP
self.system_php_version = None # ex: "8.2"
self.php_version = None # ex: "7.4"
self.php_handler = None # ex: "suphp", "lsapi", "php-fpm"
self.has_php_fpm = None # ex: True
self.php_fpm_max_requests = None # ex: 400
self.php_fpm_max_children = None # ex: 120
self.php_fpm_max_idle_timeout = None # ex: 10
self.total_fpm_pools = None # ex: 10
self.total_max_children = None # ex: 100
self.max_fpm_children = None # recommended single site max_children
# --- Database
self.dbms_version = None # ex: "MariaDB 10.6" or "MySQL 5.7"
self.mysqltuner_opt_in = None # ex: True if CX allows running mysqltuner
self.mysqltuner_install = None # ex: True / False if we installed it
self.key_buffer_size = None # ex: 8388608
self.innodb_buffer_pool_size = None # ex: 134217728
self.max_connections = None # ex: 151
self.current_connections = None # ex: 12
self.largest_databases = None # ex: [("wpdb", 512.4), ("joomla", 233.1)]
self.query_cache_type = None # ex: "OFF" or "ON"
self.query_cache_size = None # ex: 67108864
self.tmp_table_size = None # ex: 67108864
self.max_heap_table_size = None # ex: 67108864
self.table_open_cache = None # ex: 4000
self.table_definition_cache = None # ex: 2000
self.innodb_log_file_size = None # ex: 50331648
self.join_buffer_size = None # ex: 262144
self.sort_buffer_size = None # ex: 262144
self.max_allowed_packet = None # ex: 67108864
self.slow_query_log = None # ex: "ON" or "OFF"
self.slow_query_log_file = None # ex: "/var/lib/mysql/hostname-slow.log"
self.long_query_time = None # ex: "1.000000"
self.performance_schema = None # ex: "ON" or "OFF"
# --- Email
self.local_email = None # ex: False if using Google Workspace
self.mail_ptr = None # ex: "vps1234.inmotionhosting.com"
self.email_accounts = [] # ex: ["user@domain.com", "user2@domain.com", ...]
self.email_user_count = None # ex: 20
self.email_disk_usage = None # ex: total bytes or "2.3 GB"
self.spam_protection = None # ex: "SpamAssassin", "Imunify", etc.
self.total_email_disk_usage = None # total bytes used across all mailboxes
self.highest_disk_mail_accounts = None # list of top accounts by usage
self.highest_disk_email_account = None # single highest usage account
self.highest_usage_email_folder = None # folder breakdown for top account
self.email_bounce_stats = None # summary of recent bounces for managed domain
# --- Security
self.has_monarx = None # ex: False
self.has_imunify = None # ex: True
self.firewall_stack = [] # ex: ["csf", "lfd", "fail2ban"]
self.has_ssl = None # ex: True
self.ssl_issuer = None # certificate issuer organization
self.ssl_expires = None # certificate expiry (date or ISO8601 string)
self.ssl_trusted = None # ex: True
self.ssl_self_signed = None # ex: True
# --- Services
self.installed_services = {} # ex: ["redis", "varnish", "memcached"]
self.failed_services = [] # ex: ["mailman.service", "httpd.service"]
self.apache_mpm = None # ex: "event", "worker", "prefork"
self.apache_maxworkers = None # ex: 400
self.apache_maxconnections = None # ex: 10000
self.apache_serverlimit = None # ex: 80
self.apache_threadsperchild = None # ex: 25
self.apache_startservers = None # ex: 5
self.apache_minsparethreads = None # ex: 75
self.apache_maxsparethreads = None # ex: 250
self.apache_minspareservers = None # ex: 5 (prefork)
self.apache_maxspareservers = None # ex: 10 (prefork)
self.apache_mpm_conf_path = None # ex: "/etc/httpd/conf.modules.d/00-mpm.conf"
# --- Formatting helpers attached to the report
def apply_color(self, style: str, text: str) -> str:
value = "" if text is None else str(text)
if not USE_COLORS:
return value
if style == "progress_blue":
return f"\x1b[34m{value}\x1b[0m"
style_func = getattr(color, style, None)
if style_func is None:
return value
return style_func(value)
def format_heading(self, text: str) -> str:
colored = self.apply_color("progress_blue", text)
return self.apply_color("bold", colored)
def format_subheading(self, text: str) -> str:
return self.apply_color("progress_blue", text)
def format_label_value(self, label: str, value) -> str:
label_text = f"{str(label).rstrip(':')}:"
raw_value = "" if value is None else str(value)
if not raw_value:
raw_value = "Unknown"
value_colored = self.apply_color("white", raw_value)
label_bold = self.apply_color("bold", label_text)
label_colored = self.apply_color("white", label_bold)
return f"{label_colored} {value_colored}"
def format_block(self, block_text: str) -> str:
return self.apply_color("white", str(block_text))
def format_block_dim(self, block_text: str) -> str:
return self.apply_color("dim", str(block_text))
def format_amber(self, text: str) -> str:
return self.apply_color("amber", str(text))
def format_error(self, text: str) -> str:
return self.apply_color("red", str(text))
def mem_format_label_value(self, label, value, value_style="white") -> str:
label_text = f"{str(label).rstrip(':')}:"
label_bold = self.apply_color("bold", label_text)
label_colored = self.apply_color("white", label_bold)
value_text = "" if value is None else str(value)
if not value_text:
value_text = "Unknown"
value_colored = self.apply_color(value_style, value_text)
return f"{label_colored} {value_colored}"
def reg_format_summary(self, registrar, nameservers, expiry, status):
def label_text(label):
label_clean = f"{str(label).rstrip(':')}:"
return self.apply_color("white", self.apply_color("bold", label_clean))
def value_text(value, style="white"):
raw = "" if value is None else str(value)
if not raw.strip():
raw = "Unknown"
return self.apply_color(style, raw)
registrar_value = value_text(registrar)
nameservers_value = value_text(", ".join(nameservers) if nameservers else "None")
expiry_value = value_text(expiry)
status_raw = "" if status is None else str(status).strip()
if not status_raw:
status_raw = "Unknown"
status_style = "green" if status_raw.strip().lower() == "active" else "red"
status_value = value_text(status_raw, style=status_style)
return (
f"{label_text('Registrar')} {registrar_value}\n"
f"{label_text('Nameservers')} {nameservers_value}\n"
f"{label_text('Expiration')} {expiry_value}\n"
f"{label_text('Status')} {status_value}"
)
def dns_format_label_value(self, label, value, value_style="white") -> str:
label_text = f"{str(label).rstrip(':')}:"
label_bold = self.apply_color("bold", label_text)
label_colored = self.apply_color("white", label_bold)
value_text = "" if value is None else str(value)
if not value_text.strip():
value_text = "Unknown"
value_colored = self.apply_color(value_style, value_text)
return f"{label_colored} {value_colored}"
def dns_dmarc_style(self, dmarc_summary: str) -> str:
summary = (dmarc_summary or "").strip().lower()
if not summary:
return "red"
if summary.startswith("present (policy missing"):
return "amber"
if summary.startswith("missing") or summary.startswith("none") or ("deliverability issues" in summary):
return "red"
return "white"
def site_stats_http_status_style(self, status_value):
try:
status_int = int(str(status_value).strip())
except Exception:
return "white"
if 200 <= status_int <= 299:
return "green"
if 400 <= status_int <= 599:
return "red"
return "white"
def site_stats_ttfb_style(self, ttfb_value_text):
ttfb_seconds = _site_stats_parse_seconds(ttfb_value_text)
if ttfb_seconds is None:
return "white"
if ttfb_seconds < 1.5:
return "green"
if ttfb_seconds < 3.0:
return "amber"
return "red"
def site_stats_render_summary(self, parsed_metrics, redirect_entries):
def label_value(label, value, value_style="white"):
label_text = f"{str(label).rstrip(':')}:"
label_bold = self.apply_color("bold", label_text)
label_colored = self.apply_color("white", label_bold)
value_text = "" if value is None else str(value)
if not value_text:
value_text = "Unknown"
value_colored = self.apply_color(value_style, value_text)
return f"{label_colored} {value_colored}"
http_status_value = getattr(self, "http_status", None) or parsed_metrics.get("HTTP Response Code", "N/A")
ttfb_text = parsed_metrics.get("Time to First Byte", "N/A")
has_gzip = bool(getattr(self, "has_gzip", False))
has_cache_hit = bool(getattr(self, "has_cache_hit", False))
lines = [
label_value(
"HTTP Response Code",
http_status_value,
value_style=self.site_stats_http_status_style(http_status_value),
),
label_value("TLS Handshake Time", parsed_metrics.get("TLS Handshake", "N/A")),
label_value(
"Time to First Byte (TTFB)",
ttfb_text,
value_style=self.site_stats_ttfb_style(ttfb_text),
),
label_value("Total Load Time", parsed_metrics.get("Total Time", "N/A")),
label_value("Redirects Followed", parsed_metrics.get("Redirects Followed", "N/A")),
label_value("Download Size", parsed_metrics.get("Download Size", "N/A")),
label_value("Download Speed", parsed_metrics.get("Download Speed", "N/A")),
label_value(
"GZIP Compression",
"Enabled" if has_gzip else "Disabled",
value_style=("green" if has_gzip else "red"),
),
label_value(
"Cache HIT",
"Yes" if has_cache_hit else "No",
value_style=("green" if has_cache_hit else "red"),
),
]
if not redirect_entries:
return "\n".join(lines)
redirect_section = ["", self.format_subheading("### Redirects Found")]
redirect_section.extend(redirect_entries)
return "\n".join(lines + redirect_section)
def phpini_render_summary(self, directives: List[str], effective_values: dict, active_path: str) -> str:
width = max((len(directive) for directive in directives), default=0)
lines = [f"Authoritative INI File: {active_path or 'N/A'}"]
for directive in directives:
lines.append(f"{directive.ljust(width)} {effective_values.get(directive, 'N/A')}")
return "\n".join(lines)
def mail_format_label_only(self, label: str) -> str:
label_text = f"{str(label).rstrip(':')}:"
label_bold = self.apply_color("bold", label_text)
return self.apply_color("white", label_bold)
def mail_format_bytes(self, num_bytes) -> str:
try:
value = float(num_bytes)
except (TypeError, ValueError):
return "Unknown"
units = ("B", "KB", "MB", "GB", "TB")
unit_index = 0
while value >= 1024 and unit_index < (len(units) - 1):
value /= 1024.0
unit_index += 1
return f"{value:.2f} {units[unit_index]}"
def email_bounce_format_summary(self, bounce_stats: dict) -> str:
def sort_key(item):
counts = item[1] or {}
return counts.get("bounced", 0) + counts.get("deferred", 0)
lines = []
for email_address, counts in sorted((bounce_stats or {}).items(), key=sort_key, reverse=True):
lines.append(
f"{email_address}: {counts.get('bounced', 0)} bounced, {counts.get('deferred', 0)} deferred"
)
return "\n".join(lines)
def ssh_label(self, label_text: str) -> str:
return self.apply_color("white", self.apply_color("bold", str(label_text)))
def ssh_render_summary(self, analysis: dict) -> str:
ports_str = ", ".join(analysis["ports"])
root_mode = analysis["root_mode"]
lines = [
f"{self.ssh_label('Port(s) in use:')} {self.apply_color('green', ports_str)}",
f"{self.ssh_label('Root Access:')} {self.apply_color('red' if analysis['root_allowed'] else 'green', root_mode)}",
f"{self.ssh_label('Public keys:')} {self.apply_color('green' if analysis['public_key_enabled'] else 'red', 'Enabled' if analysis['public_key_enabled'] else 'Disabled')}",
]
if not analysis["passwords_effective"]:
password_value, password_color = "Disabled", "green"
elif analysis["passwords_require_key"]:
password_value, password_color = "Key required with password", "yellow"
else:
password_value, password_color = "Enabled", "red"
lines.append(
f"{self.ssh_label('Password login:')} {self.apply_color(password_color, password_value)}"
)
tries_color = "yellow" if (analysis["tries_int"] is not None and analysis["tries_int"] > 6) else "green"
lines.append(
f"{self.ssh_label('Authentication Try Limit:')} {self.apply_color(tries_color, analysis['max_auth_tries'])}"
)
lines.append(
f"{self.ssh_label('Session Limit:')} {self.apply_color('green', analysis['max_sessions'])}"
)
empty_pw_value = "Yes" if analysis["empty_passwords_allowed"] else "No"
empty_pw_color = "red" if analysis["empty_passwords_allowed"] else "green"
lines.append(
f"{self.ssh_label('Empty Passwords Allowed:')} {self.apply_color(empty_pw_color, empty_pw_value)}"
)
return "\n".join(lines)
def ssl_label(self, label_text: str) -> str:
return self.apply_color("white", self.apply_color("bold", f"{label_text}:"))
def ssl_disabled_status_line(self) -> str:
status_label = self.apply_color("white", self.apply_color("bold", "status:"))
status_value = self.apply_color("red", "disabled")
return f"{status_label} {status_value}"
def ssl_status_text(self, trust_state, self_signed: bool) -> str:
if trust_state is True:
return self.apply_color("green", "enabled")
if trust_state is False:
if self_signed:
return self.apply_color("yellow", "enabled (self-signed, untrusted)")
return self.apply_color("yellow", "enabled (untrusted)")
return self.apply_color("yellow", "enabled (unverified)")
def ssl_issuer_text(self, self_signed: bool, issuer_org) -> str:
if self_signed:
return self.apply_color("yellow", "self signed")
if issuer_org:
return self.apply_color("green", issuer_org)
return self.apply_color("yellow", "unknown")
def ssl_expires_text(self, expiration_info) -> str:
expires_display, days_remaining, _expires_iso = expiration_info
if not (expires_display and days_remaining is not None):
return self.apply_color("yellow", "unknown")
expires_text = f"{expires_display} · days left: {days_remaining}"
if days_remaining <= 7:
return self.apply_color("red", expires_text)
if days_remaining <= 30:
return self.apply_color("yellow", expires_text)
return self.apply_color("green", expires_text)
def ssl_update_report_fields(self, trust_state, self_signed: bool, issuer_org, expiration_info) -> None:
expires_iso = None
if isinstance(expiration_info, (tuple, list)) and len(expiration_info) >= 3:
expires_iso = expiration_info[2]
self.ssl_trusted = trust_state
self.ssl_self_signed = True if self_signed else (False if trust_state is True else None)
self.ssl_issuer = issuer_org
self.ssl_expires = expires_iso
def format_installed_services(
self,
installed_services,
include_inactive=True,
include_disabled=True,
):
if not isinstance(installed_services, dict) or not installed_services:
return "None detected\n"
include_states = {"active", "failed", "activating"}
if include_inactive:
include_states.add("inactive")
if include_disabled:
include_states.add("inactive (disabled)")
state_style = {
"active": "green",
"failed": "red",
"activating": "amber",
"inactive": "dim",
"inactive (disabled)": "amber",
}
lines = []
for category_name, services in installed_services.items():
if not isinstance(services, dict) or not services:
continue
category_lines = []
for friendly_name, data in services.items():
data = data or {}
state = str(data.get("state", "unknown"))
if state not in include_states:
continue
style = state_style.get(state, "dim")
state_text = self.apply_color(style, state)
category_lines.append(f"- {friendly_name}: {state_text}")
if category_lines:
lines.append(self.format_subheading(f"### {category_name}"))
lines.extend(category_lines)
lines.append("")
if not lines:
return "None detected\n"
return "\n".join(lines).rstrip() + "\n"
def svc_format_service_failure_output(self, cleaned_history: dict) -> str:
output_lines = []
output_lines.append("Historical service failures seen in the journal (last 24 hours):")
for friendly_name, data in (cleaned_history or {}).items():
output_lines.append(f"- {friendly_name} ({data['count']} events):")
for entry in data["lines"]:
output_lines.append(f" {entry}")
return "\n".join(output_lines)
# --- Core report
def run_section(self, description, func):
logger.debug("Running section: %s", description)
try:
return func()
except Exception as e:
error = f"[Error] Failed to get {description}: {str(e)}"
logger.error(error)
return self.format_error(error)
def add_async_insertion(self, marker, future, render_fn):
# Register a background result to be injected by marker when ready.
self._async_tasks.append((marker, future, render_fn))
def _apply_async_insertions(self, text: str) -> str:
# Resolve all async tasks and replace markers exactly once.
for marker, fut, render in self._async_tasks:
try:
body = fut.result()
except Exception as e:
logger.exception("Async task failed for marker %r", marker)
body = f"[Error] Failed to gather deadweight usage: {e}"
body = self.format_error(body)
replacement = render(body)
text = text.replace(marker, replacement, 1)
return text
def add_section(self, content):
logger.debug("Adding new section to report.")
self.sections.append(f"\n{content}\n")
def generate_report(self):
logger.debug("Starting report generation")
text = "\n".join(self.sections)
text = self._apply_async_insertions(text)
return text
@dataclass
class SiteStatsArtifacts:
temp_directory: str
header_dump_path: str
trace_output_path: str
# --- Utilities
def build_arg_parser():
parser = argparse.ArgumentParser(
description="SPARTA: System Performance Audit & Reporting Tool for Analysts"
)
parser.add_argument(
"--full",
action="store_true",
help="Audit the full server environment (default)",
)
section_help = {
"system": "Audit only system information",
"site": "Audit only site and domain based information",
"traffic": "Audit only web server traffic information",
"php": "Audit only PHP and handler information",
"db": "Audit only database information",
"mail": "Audit only mail information",
"security": "Audit only security information",
"services": "Audit only services and versions",
}
for section_key, help_text in section_help.items():
parser.add_argument(
f"--{section_key}",
action="store_true",
help=help_text,
)
omit_flags = [f"--no-{section_key}"]
if section_key == "mail":
omit_flags.append("--no-email")
parser.add_argument(
*omit_flags,
dest=f"no_{section_key}",
action="store_true",
help=f"Omit the {section_key} section from the report",
)
return parser
def _cwp_php_short_ver(version_string):
cleaned = str(version_string or "").strip()
match = re.match(r"^(\d+)\.(\d+)", cleaned)
if match:
major, minor = match.group(1), match.group(2)
return f"{major}{minor}"
numeric_only = re.sub(r"\D", "", cleaned)
return numeric_only[:2] if len(numeric_only) >= 2 else numeric_only
# --- Identification
def get_server_id(report):
patterns = [
r"^(dedi\d+)$",
r"^(ded\d+)$",
r"^(cc\d+)$",
r"^(elite\d+)$",
r"^(advanced\d+)$",
r"^(vps\d+)$",
]
types = {
"dedi": "Dedicated",
"ded": "Dedicated",
"cc": "Dedicated",
"elite": "Dedicated",
"advanced": "Dedicated",
"vps": "VPS",
}
try:
hostname = subprocess.check_output(["hostname"], text=True, timeout=3).strip().lower()
logger.debug("Detected hostname: %s", hostname)
if ".inmotionhosting.com" in hostname:
shortname = hostname.split(".inmotionhosting.com", maxsplit=1)[0]
logger.debug("Extracted base hostname: %s", shortname)
for pattern in patterns:
match = re.search(pattern, shortname)
if match:
server_id = match.group(1)
parsed_id = re.sub(r"\d+", "", server_id)
report.server_id = server_id
report.server_type = types.get(parsed_id, "Unknown")
logger.info("Auto-detected server ID: %s", report.server_id)
logger.debug("Deduced server type: %s", report.server_type)
return
logger.debug("Hostname detected but did not match known ID patterns.")
else:
logger.debug("Hostname %s is not in standard format for auto-detection.", hostname)
except Exception as exc:
logger.debug("Hostname detection failed: %s", exc)
prompt = (
"Hostname is not typical. Enter the server ID below.\n"
"Accepted formats: dedi###, ded###, cc###, elite###, advanced###, and vps###:\n>>> "
)
while True:
id_entered = input(prompt).strip().lower()
if any(re.match(pattern, id_entered) for pattern in patterns):
parsed_id = re.sub(r"\d+", "", id_entered)
report.server_id = id_entered
report.server_type = types.get(parsed_id, "Unknown")
logger.info("Received server ID: %s", report.server_id)
logger.debug("Deduced server type: %s", report.server_type)
return
logger.warning("Invalid format entered. Double-check the server's ID before entering.\n")
def get_panel_type(report):
logger.debug("Determining panel type for given server ID: %s", report.server_id)
types = ("cPanel", "Control Web Panel", "UltraStack ONE", "Baremetal")
cp_path = "/usr/local/cpanel/cpsrvd"
cwp_path = "/usr/local/cwpsrv"
ultra_paths = ["/etc/ansible/", "/home/wordpress/"]
if os.path.exists(cp_path):
report.panel_type = types[0]
elif os.path.exists(cwp_path):
report.panel_type = types[1]
elif os.path.exists(ultra_paths[0]) and os.path.exists(ultra_paths[1]):
report.panel_type = types[2]
else:
report.panel_type = types[3]
logger.debug("Detected panel type: %s", report.panel_type)
return report.panel_type
def get_domains(report):
logger.debug("Gathering domain list for panel type: %s", report.panel_type)
try:
if report.panel_type == "cPanel":
data = whmapi1("get_domain_info")
report.domains = [d["domain"] for d in data["data"]["domains"]]
logger.debug("Retrieved %d domains from cPanel", len(report.domains))
elif report.panel_type == "Control Web Panel":
query = """
SELECT domain FROM user
UNION
SELECT domain from domains
UNION
SELECT CONCAT(subdomain, '.', domain) AS domain from subdomains;
"""
with get_mysql_connection("root_cwp") as conn:
with conn.cursor() as cursor:
cursor.execute(query)
results = cursor.fetchall()
report.domains = [row["domain"] for row in results]
logger.info("Retrieved %d domains from CWP", len(report.domains))
elif report.panel_type == "UltraStack ONE":
with get_mysql_connection() as conn:
with conn.cursor() as cursor:
cursor.execute(
"""
SELECT table_schema, table_name
FROM information_schema.tables
WHERE table_name LIKE '%\\_options';
"""
)
options_tables = cursor.fetchall()
for entry in options_tables:
db = entry["table_schema"]
table = entry["table_name"]
try:
cursor.execute(
f"""
SELECT option_value
FROM `{db}`.`{table}`
WHERE option_name = 'siteurl';
"""
)
result = cursor.fetchone()
if result:
raw_url = result["option_value"]
domain = re.sub(r"^https?://", "", raw_url.strip("/"))
report.domains.append(domain)
except Exception as inner_e:
logger.debug(
"Failed to parse siteurl from %s.%s: %s", db, table, inner_e
)
logger.debug("Retrieved %d domains from UltraStack ONE", len(report.domains))
elif report.panel_type == "Baremetal":
logger.warning("Baremetal domain detection is not implemented.")
report.domains = []
except Exception:
logger.exception("Fatal error while gathering domains for %s", report.panel_type)
sys.exit(1)
if not report.domains or not isinstance(report.domains, list):
logger.critical("[Fatal] No domains found. Aborting script.")
sys.exit(1)
report.domain_count = len(report.domains)
logger.debug("Domains gathered: %s", report.domains)
def get_managed_domain(report):
domain_reg = re.compile(
r"^(?=.{1,253}$)(?:[A-Za-z0-9](?:[A-Za-z0-9-]{0,61}[A-Za-z0-9])?\.)+"
r"[A-Za-z0-9](?:[A-Za-z0-9-]{0,61}[A-Za-z0-9])$"
)
def normalize_domain(domain_text):
domain_text = (domain_text or "").strip().lower().rstrip(".")
if domain_text.startswith("http://") or domain_text.startswith("https://"):
domain_text = domain_text.split("://", 1)[1]
domain_text = domain_text.split("/", 1)[0]
try:
domain_text = domain_text.encode("idna").decode("ascii")
except Exception:
pass
return domain_text
def is_valid_domain(domain_text):
return bool(domain_reg.match(normalize_domain(domain_text)))
if not getattr(report, "domains", None):
logger.critical("Unable to find domains on this server. Report this as a bug.")
sys.exit(1)
domains_lower = {d.lower() for d in report.domains}
current = getattr(report, "managed_domain", None)
if current:
dom = normalize_domain(current)
if is_valid_domain(dom) and dom in domains_lower:
if dom != current:
report.managed_domain = dom
return report.managed_domain
logger.warning("Pre-set managed_domain '%s' not found in server domains; prompting again.", current)
while True:
try:
raw = input("Managed domain for this report (e.g. example.com):\n>>> ")
except EOFError:
logger.error("No input available to determine managed domain.")
return None
dom = normalize_domain(raw)
if not is_valid_domain(dom):
logger.warning("Invalid domain format entered: %s. Examples: example.com, example.co.uk, subdomain.example.mx", raw)
continue
if dom in domains_lower:
report.managed_domain = dom
return report.managed_domain
variant_www = f"www.{dom}"
variant_apex = dom[4:] if dom.startswith("www.") else None
hint = variant_www if variant_www in domains_lower else (variant_apex if variant_apex and variant_apex in domains_lower else None)
logger.warning("Domain '%s' not found in server inventory. Hint: %s", dom, hint or "none")
# --- System
def get_os_version(report):
newer_file = "/etc/os-release"
older_file = "/etc/centos-release"
sym_older_file = "/etc/redhat-release"
try:
if os.path.exists(newer_file):
logger.debug("Detected OS info from /etc/os-release")
with open(newer_file, encoding="utf-8") as f:
lines = f.readlines()
os_info = dict(line.strip().split("=", 1) for line in lines if "=" in line)
report.os_version = os_info.get("PRETTY_NAME", "").strip().strip('"')
elif os.path.exists(older_file):
logger.debug("Detected OS info from /etc/centos-release")
with open(older_file, encoding="utf-8") as f:
report.os_version = f.read().strip()
elif os.path.exists(sym_older_file):
logger.debug("Detected OS info from /etc/redhat-release")
with open(sym_older_file, encoding="utf-8") as f:
report.os_version = f.read().strip()
else:
logger.warning("No known OS release file found. Defaulting to unknown.")
report.os_version = "Unknown Operating System"
except Exception:
logger.exception("Failed to retrieve operating system version")
report.os_version = "Unknown Operating System"
return report.os_version
def get_kernel():
try:
kernel_version = platform.release()
logger.debug("Kernel version detected: %s", kernel_version)
return kernel_version
except Exception:
logger.exception("Failed to retrieve kernel version")
return "[Error] Failed to get kernel version"
def get_uptime():
uptime_file = "/proc/uptime"
try:
if os.path.exists(uptime_file):
with open(uptime_file, encoding="utf-8") as f:
uptime_seconds = float(f.readline().split()[0])
days = int(uptime_seconds // (24 * 3600))
hours = int((uptime_seconds % (24 * 3600)) // 3600)
minutes = int((uptime_seconds % 3600) // 60)
return f"{days} days, {hours} hours, {minutes} minutes"
raise FileNotFoundError(f"{uptime_file} not found")
except Exception as e:
logger.warning("Primary uptime method failed: %s", e)
try:
output = subprocess.check_output(["uptime", "-p"], text=True).strip()
logger.debug("Uptime retrieved from fallback 'uptime -p': %s", output)
return output
except Exception as sub_err:
logger.exception("Both uptime methods failed: primary=%s, fallback=%s", e, sub_err)
return f"[Error] Failed to retrieve uptime: {e} | Fallback error: {sub_err}"
def get_cpu_stats(report):
try:
with open("/proc/cpuinfo", encoding="utf-8", errors="replace") as file_handle:
cpuinfo_lines = file_handle.read().splitlines()
cpu_model = next(
(line.split(":", 1)[1].strip() for line in cpuinfo_lines if line.startswith("model name")),
"Unknown",
)
report.logical_cores = sum(1 for line in cpuinfo_lines if line.startswith("processor"))
physical_cpu_ids = set()
physical_core_pairs = set()
current_physical_id = None
for line in cpuinfo_lines:
if line.startswith("physical id"):
try:
current_physical_id = int(line.split(":", 1)[1].strip())
except Exception:
current_physical_id = None
elif line.startswith("core id"):
try:
core_id = int(line.split(":", 1)[1].strip())
except Exception:
continue
if current_physical_id is not None:
physical_core_pairs.add((current_physical_id, core_id))
physical_cpu_ids.add(current_physical_id)
burstable = "Unknown"
server_type = (getattr(report, "server_type", "") or "").strip().lower()
if server_type == "vps":
try:
cgroup_v2_cpu_max = "/sys/fs/cgroup/cpu.max"
cgroup_v1_quota = "/sys/fs/cgroup/cpu/cpu.cfs_quota_us"
if os.path.exists(cgroup_v2_cpu_max):
with open(cgroup_v2_cpu_max, encoding="utf-8", errors="replace") as file_handle:
first_token = (file_handle.read().strip().split() or [""])[0]
burstable = "Yes" if first_token == "max" else "No"
elif os.path.exists(cgroup_v1_quota):
with open(cgroup_v1_quota, encoding="utf-8", errors="replace") as file_handle:
burstable = "Yes" if file_handle.read().strip() == "-1" else "No"
else:
burstable = "Not detected"
logger.info("VPS burstable: %s", burstable)
except Exception:
logger.warning("Failed to read cgroup quota for burstable detection.")
burstable = "Unknown"
lines = [report.format_label_value("CPU Model", cpu_model)]
if server_type == "dedicated":
lines.append(report.format_label_value("Logical Cores (Threads)", report.logical_cores))
lines.append(report.format_label_value("Physical Cores", len(physical_core_pairs)))
lines.append(report.format_label_value("CPU Sockets", len(physical_cpu_ids)))
elif server_type == "vps":
lines.append(report.format_label_value("Visible Logical Cores (vCPUs)", report.logical_cores))
if burstable != "Unknown":
lines.append(report.format_label_value("VPS Burstable", burstable))
if burstable == "Yes":
lines.append(
report.format_block_dim(
"NOTE: This is a legacy VPS plan. Logical core count may not reflect guaranteed CPU availability."
)
)
return "\n".join(lines) + "\n"
except Exception:
logger.exception("Failed to retrieve CPU stats")
return report.format_error("[Error] Failed to retrieve CPU stats")
def get_load_stats(report):
def get_historical_high_load_text():
try:
if not shutil.which("sar"):
logger.warning("'sar' not found for historical load.")
return "[Notice] 'sar' not installed"
sa_dir = "/var/log/sa"
if not (os.path.isdir(sa_dir) and any(name.startswith("sa") for name in os.listdir(sa_dir))):
logger.warning("sysstat logging not enabled.")
return "[Notice] sysstat logging not enabled."
output = subprocess.check_output(["sar", "-q", "-s", "00:00:00"], text=True)
high_load_event_count = 0
for line in output.splitlines()[3:]:
parts = line.split()
if len(parts) < 6:
continue
try:
one_min_load = float(parts[5])
except ValueError:
continue
if report.logical_cores and one_min_load >= report.logical_cores:
high_load_event_count += 1
if high_load_event_count > 0:
logger.info("%d high-load events found", high_load_event_count)
return f"{high_load_event_count} instance(s) of 1-minute load exceeding logical CPU core count."
return "No high load events recorded in the past 24 hours."
except Exception:
logger.exception("Failed to gather historical load data from sar")
return "[Error] Unable to gather historical load data."
def _format_label_only(label):
label_text = f"{str(label).rstrip(':')}:"
label_bold = report.apply_color("bold", label_text)
return report.apply_color("white", label_bold)
try:
with open("/proc/loadavg", encoding="utf-8", errors="replace") as file_handle:
one_value, five_value, fifteen_value = file_handle.read().strip().split()[:3]
except Exception:
logger.exception("Failed to retrieve runtime load averages from /proc/loadavg")
one_value = five_value = fifteen_value = "N/A"
historical_text = get_historical_high_load_text()
runtime_lines = [
_format_label_only("Runtime Load Averages"),
report.format_block_dim(f"1 min: {one_value}"),
report.format_block_dim(f"5 min: {five_value}"),
report.format_block_dim(f"15 min: {fifteen_value}"),
]
if (historical_text or "").strip().lower() == "no high load events recorded in the past 24 hours.":
historical_rendered = report.apply_color("green", historical_text)
else:
historical_rendered = report.format_block_dim(historical_text)
high_load_lines = [
_format_label_only("24hr High Load Events"),
historical_rendered,
]
return "\n".join(runtime_lines).rstrip() + "\n\n" + "\n".join(high_load_lines).rstrip() + "\n"
def _mem_parse_meminfo_values(report):
meminfo_file = "/proc/meminfo"
try:
with open(meminfo_file, encoding="utf-8", errors="replace") as file_handle:
meminfo = dict(line.strip().split(":", 1) for line in file_handle if ":" in line)
def kb(key_name):
return int(meminfo.get(key_name, "0 kB").strip().split()[0])
report.mem_total_gb = round(kb("MemTotal") / 1048576, 2)
report.mem_avail_gb = round(kb("MemAvailable") / 1048576, 2)
report.mem_alloc_gb = round(report.mem_total_gb - report.mem_avail_gb, 2)
report.mem_swap_gb = round(kb("SwapTotal") / 1048576, 2)
buffcache_kb = max(
0,
kb("Buffers") + (kb("Cached") + kb("SReclaimable") - kb("Shmem")),
)
report.mem_buffers_gb = round(buffcache_kb / 1048576, 2)
unsafe_available_gb = round(report.mem_total_gb * 0.05, 2)
logger.info(
"Memory totals: %s GB total, %s GB available, %s GB allocated, %s GB buff/cache, %s GB swap limit",
report.mem_total_gb,
report.mem_avail_gb,
report.mem_alloc_gb,
report.mem_buffers_gb,
report.mem_swap_gb,
)
return unsafe_available_gb
except Exception:
logger.exception("Failed to parse /proc/meminfo")
report.mem_total_gb = 0
report.mem_avail_gb = 0
report.mem_alloc_gb = 0
report.mem_buffers_gb = 0
report.mem_swap_gb = 0
return 0.2
def _mem_tail_lines(path, max_lines=50000, block_size=8192):
lines = []
try:
with open(path, "rb") as file_handle:
file_handle.seek(0, os.SEEK_END)
position = file_handle.tell()
buffer = b""
while position > 0 and len(lines) < max_lines:
read_size = min(block_size, position)
position -= read_size
file_handle.seek(position)
buffer = file_handle.read(read_size) + buffer
parts = buffer.split(b"\n")
buffer = parts[0]
for raw_line in reversed(parts[1:]):
if len(lines) >= max_lines:
break
lines.append(raw_line)
if buffer and len(lines) < max_lines:
lines.append(buffer)
return [line.decode("utf-8", "ignore") for line in reversed(lines)]
except Exception:
logger.exception("Failed to tail log file: %s", path)
return []
def _mem_collect_oom_history():
oom_count = 0
killed_processes = []
try:
for path in glob.glob("/var/log/messages*"):
if not os.path.isfile(path):
continue
for line in _mem_tail_lines(path, max_lines=50000):
lower_line = line.lower()
if "killed process" not in lower_line and "oom" not in lower_line:
continue
oom_count += 1
match = re.search(r"killed process \d+ \(([^)]+)\)", line, re.IGNORECASE)
if match:
killed_processes.append(match.group(1))
except Exception:
logger.exception("Failed to check /var/log/messages* for OOM history")
return oom_count, killed_processes
def _mem_collect_low_memory_events(unsafe_available_gb):
if not shutil.which("sar"):
return 0
sa_dir = "/var/log/sa"
if not (os.path.isdir(sa_dir) and any(filename.startswith("sa") for filename in os.listdir(sa_dir))):
return 0
try:
output = subprocess.check_output(["sar", "-r", "-s", "00:00:00"], text=True)
sar_lines = output.splitlines()
header_indexes = None
data_lines = []
for line_index, line in enumerate(sar_lines):
if "kbmemfree" in line.lower():
header = line.lower().split()
header_indexes = {
"kbmemfree": header.index("kbmemfree"),
"kbbuffers": header.index("kbbuffers"),
"kbcached": header.index("kbcached"),
}
data_lines = sar_lines[line_index + 1 :]
break
if not header_indexes:
return 0
low_memory_event_count = 0
for line in data_lines:
parts = line.split()
if len(parts) <= max(header_indexes.values()):
continue
try:
avail_gb = round(
(
int(parts[header_indexes["kbmemfree"]])
+ int(parts[header_indexes["kbbuffers"]])
+ int(parts[header_indexes["kbcached"]])
)
/ 1048576,
2,
)
except ValueError:
continue
if avail_gb < unsafe_available_gb:
low_memory_event_count += 1
logger.info("Low memory availability events (24h): %d", low_memory_event_count)
return low_memory_event_count
except Exception:
logger.exception("Failed to analyze historical memory usage with sar")
return 0
def get_memory_stats(report):
unsafe_available_gb = _mem_parse_meminfo_values(report)
oom_count, killed_processes = _mem_collect_oom_history()
low_memory_event_count = _mem_collect_low_memory_events(unsafe_available_gb)
try:
oom_value_style = "red" if int(oom_count) != 0 else "white"
except Exception:
oom_value_style = "white"
try:
low_mem_value_style = "green" if int(low_memory_event_count) == 0 else "red"
except Exception:
low_mem_value_style = "white"
lines = [
report.format_label_value("Total", f"{report.mem_total_gb} GB"),
report.format_label_value("Available", f"{report.mem_avail_gb} GB"),
report.format_label_value("Allocated", f"{report.mem_alloc_gb} GB"),
report.format_label_value("Buffered/Cache", f"{report.mem_buffers_gb} GB"),
report.format_label_value("Swap Limit", f"{report.mem_swap_gb} GB"),
"",
report.mem_format_label_value(
"Total Historical Out-of-Memory Events",
oom_count,
value_style=oom_value_style,
),
report.mem_format_label_value(
"Low Memory Events (24hr, <5% Memory)",
low_memory_event_count,
value_style=low_mem_value_style,
),
]
if killed_processes:
unique_killed = ", ".join(sorted(set(killed_processes)))
label_bold = report.apply_color("bold", "Recent Process Kills Due to OOM Events:")
lines.append(report.apply_color("white", label_bold))
lines.append(report.apply_color("red", unique_killed))
else:
lines.append(
report.mem_format_label_value(
"Recent Process Kills Due to OOM Events",
"None",
value_style="green",
)
)
return "\n".join(lines).rstrip() + "\n"
DISK_EXCLUDE_TYPES = {
"tmpfs", "devtmpfs", "proc", "sysfs", "cgroup", "overlay", "nsfs", "squashfs", "rpc_pipefs", "selinuxfs",
"debugfs", "tracefs", "configfs", "fusectl", "hugetlbfs", "mqueue", "pstore", "autofs", "binfmt_misc",
"aufs", "virtfs", "ramfs",
}
DISK_EXCLUDE_MOUNT_PREFIXES = (
"/proc", "/sys", "/dev", "/run", "/var/lib/docker", "/var/lib/containers", "/snap"
)
def _disk_to_gb(bytes_value):
try:
return round(int(bytes_value) / (1024 ** 3), 2)
except Exception:
return 0.0
def _disk_run(command, timeout=10):
try:
return subprocess.check_output(
command,
text=True,
stderr=subprocess.STDOUT,
timeout=timeout,
)
except subprocess.CalledProcessError as exc:
logger.debug("Command failed: %s -> %s", " ".join(command), exc)
return ""
except Exception as exc:
logger.debug("Exception running: %s -> %s", " ".join(command), exc)
return ""
def _disk_fstypes(df_types_text):
fs_type_map = {}
try:
for line in (df_types_text or "").splitlines()[1:]:
parts = line.split()
if len(parts) >= 7:
fs_type_map[(parts[0], parts[-1])] = parts[1]
except Exception:
logger.exception("Failed parsing df -PT output")
return fs_type_map
def _disk_skip(mountpoint, fstype, exclude_types, exclude_mount_prefixes):
if (fstype or "").lower() in exclude_types:
return True
return any(mountpoint.startswith(prefix) for prefix in exclude_mount_prefixes)
def _disk_mounts(df_sizes_text, fs_type_map, exclude_types, exclude_mount_prefixes):
totals = {"total": 0, "used": 0, "avail": 0}
per_mount = []
try:
for line_text in (df_sizes_text or "").splitlines()[1:]:
columns = line_text.split()
if len(columns) < 6:
continue
filesystem = columns[0]
mountpoint = columns[5]
fstype = (fs_type_map.get((filesystem, mountpoint), "") or "?")
if _disk_skip(mountpoint, fstype, exclude_types, exclude_mount_prefixes):
continue
totals["total"] += int(columns[1])
totals["used"] += int(columns[2])
totals["avail"] += int(columns[3])
per_mount.append(
{
"fs": filesystem,
"type": fstype,
"mnt": mountpoint,
"size_gb": _disk_to_gb(columns[1]),
"used_gb": _disk_to_gb(columns[2]),
"avail_gb": _disk_to_gb(columns[3]),
"pct": columns[4],
}
)
except Exception:
logger.exception("Failed parsing df size output")
single_root = len(per_mount) == 1 and per_mount[0].get("mnt") == "/"
pct_used = (
per_mount[0]["pct"]
if single_root
else (
f"{round((totals['used'] / totals['total']) * 100, 1)}%"
if totals["total"]
else "0%"
)
)
return {
"total_gb": round(totals["total"] / (1024 ** 3), 2) if totals["total"] else 0.0,
"used_gb": round(totals["used"] / (1024 ** 3), 2) if totals["used"] else 0.0,
"avail_gb": round(totals["avail"] / (1024 ** 3), 2) if totals["avail"] else 0.0,
"pct_used": pct_used,
"single_root": single_root,
"per_mount": per_mount,
}
def _disk_inodes(df_inodes_text, fs_type_map, exclude_types, exclude_mount_prefixes):
inode_total = 0
inode_used = 0
try:
for line in (df_inodes_text or "").splitlines()[1:]:
parts = line.split()
if len(parts) < 4:
continue
filesystem = parts[0]
itotal = parts[1]
iused = parts[2]
mountpoint = parts[-1]
fstype = fs_type_map.get((filesystem, mountpoint), "")
if _disk_skip(mountpoint, fstype, exclude_types, exclude_mount_prefixes):
continue
try:
inode_total += int(itotal)
inode_used += int(iused)
except ValueError:
continue
except Exception:
logger.exception("Failed parsing inode usage")
inode_pct = f"{round((inode_used / inode_total) * 100, 1)}%" if inode_total else "0%"
return {"total": inode_total, "used": inode_used, "pct": inode_pct}
def _disk_dedi():
result = {"devices": [], "raid": []}
lsblk_raw = ""
for command in (
["lsblk", "-P", "-o", "NAME,TYPE,SIZE,FSTYPE,MOUNTPOINT,MODEL,ROTA"],
["lsblk", "-P", "-o", "NAME,TYPE,SIZE,MOUNTPOINT,MODEL,ROTA"],
["lsblk", "-P"],
):
output_text = _disk_run(command, timeout=8)
if output_text.strip():
lsblk_raw = output_text
break
if lsblk_raw:
try:
for line_text in lsblk_raw.splitlines():
row = {k: v.strip('"') for k, v in re.findall(r'(\w+)=(".*?"|\S+)', line_text)}
if row.get("TYPE") in ("disk", "raid", "mpath"):
result["devices"].append(
f"- /dev/{row.get('NAME', '?')} ({row.get('MODEL', '-')}) "
f"Size: {row.get('SIZE', '?')} "
f"Media: {('SSD' if row.get('ROTA') == '0' else ('HDD' if row.get('ROTA') == '1' else '?'))}"
)
except Exception:
logger.exception("Failed parsing lsblk output")
if os.path.exists("/proc/mdstat"):
try:
with open("/proc/mdstat", encoding="utf-8") as file_handle:
mdstat_text = file_handle.read().strip()
if mdstat_text and "raid" in mdstat_text:
result["raid"].append("**/proc/mdstat:**")
result["raid"].append("```\n" + mdstat_text + "\n```")
except Exception:
logger.exception("Failed reading /proc/mdstat")
if shutil.which("mdadm"):
mdadm_scan = _disk_run(["mdadm", "--detail", "--scan"], timeout=5)
if mdadm_scan.strip():
result["raid"].append("**mdadm --detail --scan:**")
result["raid"].append("```\n" + mdadm_scan.strip() + "\n```")
return result
def get_disk_info(report):
logger.info("Gathering disk usage, mounts, and RAID/LVM details")
def _format_label_value_custom_color(label, value, value_style="white"):
label_text = f"{str(label).rstrip(':')}:"
label_bold = report.apply_color("bold", label_text)
label_colored = report.apply_color("white", label_bold)
value_text = "" if value is None else str(value)
if not value_text:
value_text = "Unknown"
value_colored = report.apply_color(value_style, value_text)
return f"{label_colored} {value_colored}"
fs_type_map = _disk_fstypes(_disk_run(["df", "-PT"]))
disk_data = _disk_mounts(
_disk_run(["df", "-B1", "--output=source,size,used,avail,pcent,target"]),
fs_type_map,
DISK_EXCLUDE_TYPES,
DISK_EXCLUDE_MOUNT_PREFIXES,
)
inode_text = _disk_run(["df", "-i", "--output=source,itotal,iused,target"]) or _disk_run(["df", "-i"])
inode_data = _disk_inodes(
inode_text,
fs_type_map,
DISK_EXCLUDE_TYPES,
DISK_EXCLUDE_MOUNT_PREFIXES,
)
dedicated_details = {"devices": [], "raid": []}
if (getattr(report, "server_type", "") or "").lower() == "dedicated":
dedicated_details = _disk_dedi()
try:
total_gb = float(disk_data.get("total_gb") or 0.0)
except Exception:
total_gb = 0.0
try:
avail_gb = float(disk_data.get("avail_gb") or 0.0)
except Exception:
avail_gb = 0.0
avail_pct = (avail_gb / total_gb) if total_gb else 0.0
avail_style = "red" if total_gb and avail_pct <= 0.10 else "white"
lines = [
_format_label_value_custom_color("Total Capacity", f"{disk_data['total_gb']} GB"),
_format_label_value_custom_color("Used", f"{disk_data['used_gb']} GB ({disk_data['pct_used']})"),
_format_label_value_custom_color("Available", f"{disk_data['avail_gb']} GB", value_style=avail_style),
_format_label_value_custom_color(
"Inodes",
f"{inode_data['used']} used / {inode_data['total']} total ({inode_data['pct']})",
),
"",
]
if disk_data["per_mount"] and not disk_data["single_root"]:
lines.append("#### Mounted Filesystems")
lines.append(
"```\n{:<32} {:<8} {:>8} {:>8} {:>8} {:>6}".format(
"Mount", "Type", "SizeGB", "UsedGB", "AvailGB", "Use%"
)
)
for mount_entry in sorted(disk_data["per_mount"], key=lambda x: x["mnt"]):
lines.append(
"{:<32} {:<8} {:>8} {:>8} {:>8} {:>6}".format(
mount_entry["mnt"][:32],
mount_entry["type"][:8],
f"{mount_entry['size_gb']:.2f}",
f"{mount_entry['used_gb']:.2f}",
f"{mount_entry['avail_gb']:.2f}",
mount_entry["pct"],
)
)
lines.append("```")
lines.append("")
if (getattr(report, "server_type", "") or "").lower() == "dedicated":
lines.append("#### Physical Storage Devices")
lines.extend(dedicated_details["devices"] or ["- [Info] Could not enumerate physical disks."])
if dedicated_details["raid"]:
lines.append("")
lines.append("#### RAID Status")
lines.extend(dedicated_details["raid"])
return "\n".join(lines).rstrip() + "\n"
def get_smart_stats(report):
if not report.server_type or report.server_type.lower() != "dedicated":
logger.info("Skipping SMART check: server is not a Dedicated server.")
return "SMART health checks are only available on Dedicated servers."
if not shutil.which("smartctl"):
logger.warning("smartctl not found on system.")
return "[Notice] smartctl not installed. Run `yum install smartmontools` to enable SMART checks."
try:
logger.info("Scanning for SMART-compatible devices using `smartctl --scan`.")
scan_output = subprocess.check_output(["smartctl", "--scan"], text=True)
devices = [line.split()[0] for line in scan_output.splitlines() if line.strip().startswith("/dev/")]
if not devices:
logger.warning("No SMART-compatible devices detected.")
return "[Warning] No SMART-compatible devices detected."
results = []
for device in devices:
try:
logger.info("Checking SMART status for device: %s", device)
output = subprocess.check_output(["smartctl", "-H", device], text=True, stderr=subprocess.STDOUT)
m = re.search(r"SMART overall-health self-assessment test result:\s*([A-Z]+)", output, re.I)
if not m:
m = re.search(r"SMART Health Status:\s*([A-Z]+)", output, re.I)
status = (m.group(1).upper() if m else "UNKNOWN").capitalize()
results.append(f"- {device}: {status}")
logger.debug("SMART check for device %s returned status: %s", device, status)
except subprocess.CalledProcessError as e:
logger.warning("smartctl failed for %s: rc=%d", device, e.returncode)
results.append(f"- {device}: [Error reading SMART data]")
except Exception:
logger.exception("Unexpected error reading SMART for %s", device)
results.append(f"- {device}: [Error reading SMART data]")
logger.info("Completed SMART check for all devices.")
return "\n".join(results)
except Exception as e:
logger.exception("Failed to retrieve SMART stats.")
return f"[Error] Failed to retrieve SMART stats: {e}"
def get_deadweight():
try:
find_cmd = [
"find", "/",
"-path", "/proc", "-prune", "-o",
"-path", "/sys", "-prune", "-o",
"-path", "/dev", "-prune", "-o",
"-path", "/run", "-prune", "-o",
"-path", "/var/spool/postfix", "-prune", "-o",
"-type", "f", "-size", "+500M", "-printf", "%s\t%p\n"
]
result = subprocess.run(
find_cmd,
stderr=subprocess.DEVNULL,
stdout=subprocess.PIPE,
text=True,
check=False
)
lines = (result.stdout or "").splitlines()
if not lines:
return "No files larger than 500MB found on the system.\n"
entries = []
for line in lines:
try:
size_str, path = line.split("\t", 1)
if "virtfs" in path:
continue
entries.append((int(size_str), path))
except Exception:
continue
if not entries:
return "No significant disk usage found.\n"
entries.sort(key=lambda t: t[0], reverse=True)
top = entries[:25]
def humanize(n):
units = ["B", "K", "M", "G", "T", "P"]
i = 0
n = float(n)
while n >= 1024 and i < len(units) - 1:
n /= 1024.0
i += 1
return f"{n:.0f}{units[i]}" if units[i] in ("B", "K", "M") else f"{n:.2f}{units[i]}"
summary = "```\n"
for size, path in top:
summary += f"- {humanize(size):>7} {path}\n"
summary += "```\n"
return summary
except Exception as e:
logger.exception("Failed to gather deadweight usage.")
return f"[Error] Failed to gather deadweight usage: {e}"
def get_crons(report):
def filter_cron_lines(text):
return [line.strip() for line in (text or "").splitlines() if line.strip() and not line.startswith("#")]
def contains_backup_keyword(text, keywords):
lowered = (text or "").lower()
return any(keyword in lowered for keyword in keywords)
def collect_backup_matches(entries, keywords):
return [entry for entry in entries if contains_backup_keyword(entry, keywords)]
def label_only(label):
label_text = f"{str(label).rstrip(':')}:"
label_bold = report.apply_color("bold", label_text)
return report.apply_color("white", label_bold)
def collect_system_cron_counts_and_backups(cron_directories, keywords):
system_count = 0
system_backup_entries = []
if os.path.exists("/etc/crontab"):
with open("/etc/crontab", encoding="utf-8", errors="replace") as crontab_file:
crontab_lines = [line.strip() for line in crontab_file if line.strip() and not line.startswith("#")]
system_count += len(crontab_lines)
backup_entries = collect_backup_matches(crontab_lines, keywords)
system_backup_entries.extend(backup_entries)
if backup_entries:
logger.info("Found %d backup-related entries in /etc/crontab", len(backup_entries))
for cron_directory in cron_directories:
if not os.path.isdir(cron_directory):
continue
directory_entries = [entry for entry in os.listdir(cron_directory) if not entry.startswith(".")]
system_count += len(directory_entries)
backup_entries = collect_backup_matches(directory_entries, keywords)
system_backup_entries.extend(backup_entries)
if backup_entries:
logger.info("Found %d backup-related cron entries in %s", len(backup_entries), cron_directory)
return system_count, system_backup_entries
def collect_user_cron_counts_and_backups(keywords):
root_count = 0
user_count = 0
user_backup_entries = []
try:
root_lines = filter_cron_lines(
subprocess.check_output(["crontab", "-l"], text=True, stderr=subprocess.DEVNULL)
)
root_count = len(root_lines)
backup_entries = collect_backup_matches(root_lines, keywords)
user_backup_entries.extend(backup_entries)
if backup_entries:
logger.info("Found %d backup-related cron entries in root's crontab", len(backup_entries))
except subprocess.CalledProcessError:
logger.debug("Root does not have a crontab defined")
for user_entry in pwd.getpwall():
username = user_entry.pw_name
if (
username in ("nobody", "mysql")
or user_entry.pw_uid < 1000
or user_entry.pw_shell in ("/sbin/nologin", "/usr/sbin/nologin", "/bin/false")
):
continue
try:
user_lines = filter_cron_lines(
subprocess.check_output(
["crontab", "-l", "-u", username],
text=True,
stderr=subprocess.DEVNULL,
)
)
except subprocess.CalledProcessError:
continue
if not user_lines:
continue
user_count += len(user_lines)
backup_entries = collect_backup_matches(user_lines, keywords)
user_backup_entries.extend(backup_entries)
if backup_entries:
logger.info("Found %d backup-related entries in %s's crontab", len(backup_entries), username)
return root_count, user_count, user_backup_entries
system_cron_directories = [
"/etc/cron.d",
"/etc/cron.daily",
"/etc/cron.hourly",
"/etc/cron.monthly",
"/etc/cron.weekly",
]
backup_keywords = ("backup",)
system_cron_job_count, system_backup_cron_entries = collect_system_cron_counts_and_backups(
system_cron_directories,
backup_keywords,
)
root_cron_job_count, user_cron_job_count, user_backup_cron_entries = collect_user_cron_counts_and_backups(
backup_keywords
)
logger.debug("System cron job count: %d", system_cron_job_count)
logger.debug("Root cron job count: %d", root_cron_job_count)
logger.debug("User cron job count: %d", user_cron_job_count)
lines = [
"Scheduled tasks configured on the server, either system-managed or user-defined.",
"",
report.format_label_value("System Cron Job Count (/etc/crontab and /etc/cron.*)", system_cron_job_count),
report.format_label_value("Root Cron Job Count (`crontab -l` for root)", root_cron_job_count),
report.format_label_value("User Cron Job Count (`crontab -l -u <user>`)", user_cron_job_count),
]
if system_backup_cron_entries or user_backup_cron_entries:
lines.extend(
[
"",
label_only("Backup-Related Cron Jobs"),
"Possibly related to system or user backups. Timing affects system load, therefore backups should run during nightly hours.",
]
)
if system_backup_cron_entries:
lines.append("")
lines.append(label_only("Found in system-level crontab or /etc/cron.* directories"))
lines.extend(f"- {entry}" for entry in system_backup_cron_entries)
if user_backup_cron_entries:
lines.append("")
lines.append(label_only("Found in crontab entries (root or users)"))
lines.extend(f"- {entry}" for entry in user_backup_cron_entries)
return "\n".join(lines).rstrip() + "\n"
def system_build_section(report):
logger.debug("Building 'System' section")
is_dedicated = bool(report.server_type and str(report.server_type).lower() == "dedicated")
steps = 8 + int(is_dedicated)
section_progress = SectionProgress(
report.progress,
(report.section_index, report.sections_total),
steps,
report.section_name,
)
def render_deadweight_error(body):
return report.format_block_dim(report.format_error(body))
def render_deadweight(body):
text = "" if body is None else str(body)
stripped = text.strip()
if stripped == "No files larger than 500MB found on the system.":
return report.apply_color("green", "No files larger than 500MB found on the system.\n")
return report.format_block_dim(text)
try:
summaries = {}
section_progress.tick("OS version")
summaries["os_version"] = get_os_version(report)
section_progress.tick("Kernel")
summaries["kernel_version"] = get_kernel()
section_progress.tick("Uptime")
summaries["uptime_value"] = get_uptime()
section_progress.tick("CPU stats")
summaries["cpu_summary"] = get_cpu_stats(report)
section_progress.tick("Load averages")
summaries["load_summary"] = get_load_stats(report)
section_progress.tick("Memory stats")
summaries["memory_summary"] = get_memory_stats(report)
section_progress.tick("Disk storage")
summaries["disk_summary"] = get_disk_info(report)
smart_block = ""
if is_dedicated:
section_progress.tick("SMART health")
summaries["smart_summary"] = get_smart_stats(report)
smart_block = (
f"{report.format_subheading('### SMART Status')}\n"
f"{report.format_block_dim(summaries['smart_summary'])}\n\n"
)
if getattr(report, "deadweight_future", None) is None:
try:
report.deadweight_future = EXECUTOR.submit(get_deadweight)
except Exception:
logger.exception("Failed to queue deadweight background scan")
report.deadweight_future = None
deadweight_marker = "<!--DEADWEIGHT_SLOT-->"
section_progress.tick("Crons")
summaries["cron_summary"] = get_crons(report)
section_text = (
f"{report.format_heading('## System Summary')}\n"
f"{report.format_label_value('Operating System', summaries['os_version'])}\n"
f"{report.format_label_value('Kernel', summaries['kernel_version'])}\n"
f"{report.format_label_value('Server Uptime', summaries['uptime_value'])}\n"
f"{report.format_block_dim(summaries['cpu_summary'])}\n"
f"{report.format_subheading('### Load Averages')}\n"
f"{report.format_block_dim(summaries['load_summary'])}\n"
f"{report.format_subheading('### Memory')}\n"
f"{report.format_block_dim(summaries['memory_summary'])}\n"
f"{report.format_subheading('### Disk Usage')}\n"
f"{report.format_block_dim(summaries['disk_summary'])}\n"
f"{smart_block}"
f"{report.format_subheading('### Large Files (>500MB)')}\n"
f"{deadweight_marker}\n"
f"{report.format_subheading('### Cron Jobs')}\n"
f"{report.format_block_dim(summaries['cron_summary'])}"
)
if report.deadweight_future is not None:
report.add_async_insertion(
marker=deadweight_marker,
future=report.deadweight_future,
render_fn=render_deadweight,
)
else:
report.add_async_insertion(
marker=deadweight_marker,
future=EXECUTOR.submit(str, "[Error] Deadweight scan could not be started.\n"),
render_fn=render_deadweight_error,
)
return section_text
except Exception:
logger.exception("Failed to build 'System' section")
error_text = (
"[Error] Failed to build system section. Please do a manual review for OS version, "
"Kernel version, uptime, CPU, Load, Memory, Disk, Deadweight files, SMART stats, "
"and cron jobs. Please report this bug."
)
return report.format_error(error_text)
# --- Account & Website
def get_user_stats(report):
def safe_setattr(obj, attr_name, value):
try:
setattr(obj, attr_name, value)
except Exception:
logger.exception("Failed to set %s", attr_name)
def cpanel_domain_owner(domain_name):
owner_name = None
try:
response = whmapi1("domainuserdata", {"domain": domain_name})
userdata = (response.get("data", {}) or {}).get("userdata", {}) or {}
owner_name = userdata.get("user") or None
if owner_name:
logger.info("cPanel domain owner for %s via domainuserdata: %s", domain_name, owner_name)
except Exception:
logger.exception("WHM API call 'domainuserdata' failed while looking up owner for %s", domain_name)
if owner_name:
return owner_name
try:
whoowns_output = subprocess.check_output(
["whoowns", domain_name],
text=True,
stderr=subprocess.DEVNULL,
timeout=5,
).strip()
owner_name = (whoowns_output.split() or [None])[0]
if owner_name:
logger.info("cPanel domain owner for %s via whoowns: %s", domain_name, owner_name)
except FileNotFoundError:
logger.debug("whoowns binary not found on system")
except Exception:
logger.exception("'whoowns' fallback failed while looking up owner for %s", domain_name)
return owner_name
def cpanel_users():
users_count = 0
users_list = []
try:
response = whmapi1("get_current_users_count")
users_count = (response.get("data", {}) or {}).get("users", 0)
logger.info("Total cPanel users: %s", users_count)
except Exception as exc:
users_count = f"[Error] Unexpected exception: {str(exc)}"
logger.exception("WHM API call to get user count failed")
try:
response = whmapi1("listaccts")
account_list = (response.get("data", {}) or {}).get("acct", []) or []
users_list = [acct.get("user") for acct in account_list if acct.get("user")]
logger.info("Collected cPanel user list: %d users", len(users_list))
except Exception:
logger.exception("WHM API call to get full user list failed for cPanel")
return users_count, users_list
def cwp_user_list():
try:
output = subprocess.check_output(
["/scripts/list_users"],
text=True,
timeout=10,
stderr=subprocess.DEVNULL,
)
lines = output.strip().splitlines()
return [line.split()[0] for line in lines[2:] if line.strip()]
except Exception:
output = subprocess.check_output(
[
"find",
"/usr/local/cwpsrv/var/services/users",
"-maxdepth",
"1",
"-type",
"l",
"-printf",
"%f\n",
],
text=True,
timeout=10,
stderr=subprocess.DEVNULL,
)
return [line.strip() for line in output.splitlines() if line.strip()]
def cwp_domain_owner(domain_name):
try:
owner_name = subprocess.check_output(
["whoowns", domain_name],
text=True,
timeout=5,
stderr=subprocess.DEVNULL,
).strip()
return owner_name or None
except Exception:
logger.warning("'whoowns' failed for %s, falling back to SQL query", domain_name)
query = """
SELECT owner FROM (
SELECT username AS owner, domain FROM user
UNION ALL
SELECT user AS owner, domain FROM domains
UNION ALL
SELECT user AS owner, CONCAT(subdomain, '.', domain) AS domain FROM subdomains
) AS all_owners
WHERE domain = %s
LIMIT 1;
"""
with get_mysql_connection("root_cwp") as conn:
with conn.cursor() as cursor:
cursor.execute(query, (domain_name,))
result = cursor.fetchone()
if not result:
return None
return next(iter(result.values()))
def baremetal_users():
minimum_uid = 1000
try:
with open("/etc/login.defs", encoding="utf-8") as login_defs_file:
for line in login_defs_file:
if line.strip().startswith("UID_MIN"):
parts = line.split()
if len(parts) >= 2:
minimum_uid = int(parts[1])
break
except Exception:
logger.warning("Failed to parse UID_MIN from /etc/login.defs")
invalid_shells = {"/sbin/nologin", "/usr/sbin/nologin", "/bin/false"}
login_users = [
entry.pw_name
for entry in pwd.getpwall()
if entry.pw_uid >= minimum_uid and entry.pw_shell not in invalid_shells
]
return len(login_users), login_users
panel_type = (getattr(report, "panel_type", "") or "").strip()
managed_domain = (getattr(report, "managed_domain", "") or "").strip()
user_list = []
user_count = 0
domain_owner = None
if panel_type == "cPanel":
domain_owner = cpanel_domain_owner(managed_domain)
if not domain_owner:
logger.error(
"Unable to determine cPanel domain owner for %s; report.domain_owner will remain None",
managed_domain,
)
user_count, user_list = cpanel_users()
elif panel_type in {"Control Web Panel", "CWP"}:
try:
user_list = cwp_user_list()
user_count = len(user_list)
domain_owner = cwp_domain_owner(managed_domain)
if domain_owner:
logger.info("Domain owner for %s via CWP detection: %s", managed_domain, domain_owner)
else:
logger.warning("No domain owner found for %s via CWP detection", managed_domain)
except Exception as exc:
user_count = f"[Error] Unexpected exception: {str(exc)}\n"
logger.exception("Failed to get CWP user data")
elif panel_type == "UltraStack ONE":
domain_owner = "wordpress"
user_count = 1
user_list = ["wordpress"]
elif panel_type == "Baremetal":
user_count, user_list = baremetal_users()
domain_owner = None
safe_setattr(report, "domain_owner", domain_owner)
safe_setattr(report, "user_list", user_list)
safe_setattr(report, "user_count", user_count)
return getattr(report, "user_count", user_count)
def _reg_normalize_date(date_str):
value = (date_str or "").strip()
value = value.split("T", 1)[0].split(" ", 1)[0]
if re.match(r"^\d{4}\.\d{2}\.\d{2}$", value):
value = value.replace(".", "-")
return value
def _reg_unique_ordered(sequence):
seen = set()
for item in (sequence or []):
if item not in seen:
seen.add(item)
yield item
def _reg_build_ssl_context():
def candidate_ca_files():
env_cafile = os.environ.get("SSL_CERT_FILE")
if env_cafile:
yield env_cafile
candidates = (
"/etc/pki/tls/certs/ca-bundle.crt",
"/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem",
"/etc/ssl/certs/ca-bundle.crt",
)
for ca_path in candidates:
yield ca_path
context = ssl.create_default_context()
if context.get_ca_certs():
return context
for ca_path in candidate_ca_files():
try:
if ca_path and Path(ca_path).is_file():
context.load_verify_locations(cafile=ca_path)
logger.debug("Loaded CA bundle for RDAP TLS verify: %s", ca_path)
break
except Exception as exc:
logger.debug("Failed loading CA bundle %s for RDAP: %s", ca_path, exc)
return context
def _reg_normalize_statuses(status_list):
return [s.capitalize() for s in _reg_unique_ordered(status_list or [])]
def _reg_normalize_nameservers(ns_list):
cleaned = {(ns or "").rstrip(".").lower() for ns in (ns_list or []) if ns}
return sorted(cleaned, key=str.lower)
def _reg_extract_tld(domain):
if "." in domain:
return domain.split(".", 1)[1].lower()
return None
def _reg_fetch_json(url, timeout=15):
req = request.Request(url, headers={"Accept": "application/json"})
ssl_context = _reg_build_ssl_context()
with request.urlopen(req, timeout=timeout, context=ssl_context) as resp:
if resp.status != 200:
raise urlerror.HTTPError(url, resp.status, f"HTTP {resp.status}", resp.headers, None)
return json.loads(resp.read().decode("utf-8", errors="replace"))
def _reg_rdap_endpoints(domain):
candidates = [f"https://rdap.org/domain/{domain}"]
tld = _reg_extract_tld(domain)
if not tld:
return candidates
cache_path = "/tmp/iana_rdap_dns.json"
iana_data = None
try:
cache_stat = os.stat(cache_path)
if cache_stat and (time.time() - cache_stat.st_mtime) < 86400:
with open(cache_path, "r", encoding="utf-8") as handle:
iana_data = json.load(handle)
except Exception:
iana_data = None
if not iana_data:
try:
iana_data = _reg_fetch_json("https://data.iana.org/rdap/dns.json", timeout=10)
try:
with open(cache_path, "w", encoding="utf-8") as handle:
json.dump(iana_data, handle)
except Exception:
pass
except Exception:
iana_data = None
services = iana_data.get("services") if isinstance(iana_data, dict) else None
if isinstance(services, list):
for service in services:
if not isinstance(service, list) or len(service) < 2:
continue
tlds, bases = service[0], service[1]
if not any(tld.lower() == str(t).lower() for t in (tlds or [])):
continue
for base in (bases or []):
base = (base or "").rstrip("/")
if base:
candidates.append(f"{base}/domain/{domain}")
seen = set()
deduped = []
for url in candidates:
if url not in seen:
seen.add(url)
deduped.append(url)
return deduped
def _reg_parse_rdap(rdap_data):
registrar = None
expiry_raw = None
statuses = list(rdap_data.get("status", []) or [])
nameservers = []
for entity in (rdap_data.get("entities", []) or []):
roles = entity.get("roles") or []
if not any((role or "").lower() == "registrar" for role in roles):
continue
vcard_array = entity.get("vcardArray", [])
if isinstance(vcard_array, list) and len(vcard_array) >= 2:
for entry in vcard_array[1]:
if isinstance(entry, list) and len(entry) >= 4 and entry[0] == "fn":
registrar = str(entry[3]).strip() or None
break
if registrar:
break
for event in (rdap_data.get("events", []) or []):
if (event.get("eventAction", "") or "").lower() == "expiration":
expiry_raw = event.get("eventDate")
break
for ns_obj in (rdap_data.get("nameservers", []) or []):
nameservers.append(ns_obj.get("ldhName") or ns_obj.get("unicodeName"))
return {
"registrar": registrar or None,
"expiry": _reg_normalize_date(expiry_raw) if expiry_raw else None,
"statuses": _reg_normalize_statuses(statuses),
"nameservers": _reg_normalize_nameservers(nameservers),
"has_object": True,
}
def _reg_run_cmd(command_args, timeout_seconds=15):
proc = subprocess.run(
command_args,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True,
timeout=timeout_seconds,
check=False,
)
return proc.returncode, (proc.stdout or ""), (proc.stderr or "")
def _reg_whois_referral_server(domain, whois_path):
tld = _reg_extract_tld(domain)
if not tld or not whois_path:
return None
try:
returncode, stdout, _stderr = _reg_run_cmd([whois_path, "-h", "whois.iana.org", tld], timeout_seconds=10)
if returncode != 0 or not stdout:
return None
match = re.search(r"^\s*whois:\s*(\S+)\s*$", stdout, re.I | re.M)
return match.group(1).strip() if match else None
except Exception:
return None
def _reg_parse_whois(whois_text):
registrar_re = re.compile(r"^\s*(Registrar|Sponsoring Registrar|Registrar Name)\s*:\s*(.+)$", re.I)
expiry_re = re.compile(
r"^\s*(Registry Expiry Date|Registrar Registration Expiration Date|Expiry Date|Expiration Time|paid-till|Expiration Date)\s*:\s*(.+)$",
re.I,
)
ns_line_re = re.compile(r"^\s*Name Server\s*:\s*([A-Za-z0-9\.\-]+)", re.I)
status_re = re.compile(r"^\s*(Domain Status|Status)\s*:\s*([^\s].*?)(?:\s+https?://\S+)?\s*$", re.I)
domain_name_re = re.compile(r"^\s*Domain Name\s*:\s*(.+)$", re.I)
registrar = None
expiry_raw = None
status_list = []
nameservers = set()
saw_domain_name = False
for raw_line in (whois_text or "").splitlines():
line = (raw_line or "").strip()
if not line:
continue
if not registrar:
match = registrar_re.match(line)
if match:
registrar = match.group(2).strip() or None
continue
if not expiry_raw:
match = expiry_re.match(line)
if match:
expiry_raw = match.group(2).strip() or None
continue
match = ns_line_re.match(line)
if match:
nameservers.add(match.group(1).strip().rstrip(".").lower())
continue
match = status_re.match(line)
if match:
status_list.append(match.group(2).strip())
continue
if not saw_domain_name:
match = domain_name_re.match(line)
if match:
saw_domain_name = True
return {
"registrar": registrar or None,
"expiry": _reg_normalize_date(expiry_raw) if expiry_raw else None,
"statuses": _reg_normalize_statuses(status_list),
"nameservers": sorted(nameservers, key=str.lower),
"saw_domain_name": saw_domain_name,
}
def _reg_whois_not_found(whois_text):
low = (whois_text or "").lower()
signals = (
"no match for",
"not found",
"status: available",
"status: free",
"no data found",
"no entries found",
"query_status: 220",
"domain you requested is not known",
"this domain name has not been registered",
"object does not exist",
"no such domain",
)
return any(sig in low for sig in signals)
def _reg_reset_fields(report):
report.domain_registrar = None
report.domain_reg_expiry = None
report.domain_reg_status = None
report.whois_nameservers = []
report.is_registered = None
def get_registration_info(report):
domain = (getattr(report, "managed_domain", "") or "").strip().lower()
_reg_reset_fields(report)
if not domain or "." not in domain:
logger.error("get_registration_info called without a valid report.managed_domain")
return "[Error] Managed domain is not set."
for url in _reg_rdap_endpoints(domain):
try:
logger.debug("RDAP lookup: %s", url)
data = _reg_fetch_json(url, timeout=20)
except Exception as exc:
logger.debug("RDAP candidate failed: %s -> %s", url, exc)
continue
try:
fields = _reg_parse_rdap(data)
except Exception:
logger.exception("RDAP parse error; falling back to WHOIS.")
break
report.domain_registrar = fields["registrar"]
report.domain_reg_expiry = fields["expiry"]
report.domain_reg_status = ", ".join(fields["statuses"]) if fields["statuses"] else None
report.whois_nameservers = fields["nameservers"]
report.is_registered = bool(fields["has_object"])
logger.info("[RDAP] Registrar: %s", report.domain_registrar or "Unknown")
logger.info("[RDAP] Expiration: %s", report.domain_reg_expiry or "Unknown")
logger.info("[RDAP] Status: %s", report.domain_reg_status or "Unknown")
logger.info(
"[RDAP] Nameservers: %s",
", ".join(report.whois_nameservers) if report.whois_nameservers else "None",
)
return report.reg_format_summary(
report.domain_registrar,
report.whois_nameservers,
report.domain_reg_expiry,
report.domain_reg_status,
)
logger.info("Attempting WHOIS fallback.")
whois_path = shutil.which("whois")
if not whois_path:
logger.warning("whois binary not found in PATH; cannot do WHOIS fallback.")
report.is_registered = None
return "[Notice] Unable to determine domain registration: RDAP failed and WHOIS is not installed."
referral_server = _reg_whois_referral_server(domain, whois_path)
whois_output = ""
for command_args in (
([whois_path, "-h", referral_server, domain] if referral_server else None),
[whois_path, domain],
):
if not command_args:
continue
try:
logger.debug("WHOIS command: %s", " ".join(command_args))
returncode, stdout, stderr = _reg_run_cmd(command_args, timeout_seconds=35)
output = (stdout or "").strip()
if output and (returncode == 0 or len(output) > 50):
whois_output = output
break
if stderr:
logger.debug(
"WHOIS attempt returncode %s. stderr: %s",
returncode,
(stderr or "")[:200],
)
except subprocess.TimeoutExpired:
logger.warning("WHOIS attempt timed out; trying next option.")
except Exception as exc:
logger.warning("WHOIS attempt failed: %s; trying next option.", exc)
if not whois_output:
report.is_registered = None
return "[Error] WHOIS failed or returned no output."
if _reg_whois_not_found(whois_output):
logger.info("WHOIS indicates %s is not registered.", domain)
_reg_reset_fields(report)
report.is_registered = False
return "[Notice] Domain is not registered."
whois_lower = whois_output.lower()
if "limit exceeded" in whois_lower or "quota exceeded" in whois_lower or "please try again later" in whois_lower:
logger.warning("WHOIS rate limit or throttle indicated.")
report.is_registered = None
return "[Error] WHOIS rate limit exceeded. Try again later."
fields = _reg_parse_whois(whois_output)
report.domain_registrar = fields["registrar"]
report.domain_reg_expiry = fields["expiry"]
report.domain_reg_status = ", ".join(fields["statuses"]) if fields["statuses"] else None
report.whois_nameservers = fields["nameservers"]
if not any(
[
bool(report.domain_registrar),
bool(report.domain_reg_expiry),
bool(report.whois_nameservers),
bool(report.domain_reg_status),
bool(fields.get("saw_domain_name", False)),
]
):
logger.info("WHOIS parsing produced no registration signals for %s; treating as unregistered.", domain)
report.is_registered = False
return "[Notice] Domain is not registered."
report.is_registered = True
logger.info("[WHOIS] Registrar: %s", report.domain_registrar or "Unknown")
logger.info("[WHOIS] Expiration: %s", report.domain_reg_expiry or "Unknown")
logger.info("[WHOIS] Status: %s", report.domain_reg_status or "Unknown")
logger.info(
"[WHOIS] Nameservers: %s",
", ".join(report.whois_nameservers) if report.whois_nameservers else "None",
)
return report.reg_format_summary(
report.domain_registrar,
report.whois_nameservers,
report.domain_reg_expiry,
report.domain_reg_status,
)
def _dns_run_dig(name, rrtype, short=True, timeout_seconds=5):
args = ["dig"]
if short:
args.append("+short")
args += [rrtype, name]
try:
return subprocess.check_output(args, text=True, timeout=timeout_seconds).strip()
except Exception as exc:
logger.debug("dig failed for %s %s: %s", name, rrtype, exc)
return ""
def _dns_a_records(name):
output_text = _dns_run_dig(name, "a", short=True)
return [
ip
for ip in output_text.splitlines()
if re.match(r"^\d{1,3}(\.\d{1,3}){3}$", ip)
]
def _dns_txt_records(name):
output_text = _dns_run_dig(name, "txt", short=True)
txt_entries = []
for line_text in output_text.splitlines():
stripped = line_text.strip()
if stripped.startswith('"') and stripped.endswith('"'):
stripped = stripped[1:-1]
if stripped:
txt_entries.append(stripped)
return txt_entries
def _dns_mx_records(domain_name):
mx_records = []
mx_full_output = _dns_run_dig(domain_name, "mx", short=False)
for line_text in (mx_full_output or "").splitlines():
if "\tMX\t" not in line_text:
continue
parts = re.split(r"\s+", line_text.strip())
try:
priority = int(parts[-2])
target = parts[-1].rstrip(".")
mx_records.append((priority, target))
except Exception:
continue
if not mx_records:
mx_short_output = _dns_run_dig(domain_name, "mx", short=True)
for line_text in mx_short_output.splitlines():
match = re.match(r"^\s*(\d+)\s+(\S+)\.?$", line_text)
if match:
mx_records.append((int(match.group(1)), match.group(2).rstrip(".")))
mx_records.sort(key=lambda item: item[0])
return mx_records
def _dns_spf_record(domain_name):
return next((t for t in _dns_txt_records(domain_name) if t.lower().startswith("v=spf1")), None)
def _dns_spf_display(domain_name):
spf_record = _dns_spf_record(domain_name)
if not spf_record:
return "Missing, which can cause deliverability issues.", "red"
return spf_record, "white"
def _dns_dmarc_policy_summary(domain_name):
dmarc_raw = next(
(t for t in _dns_txt_records(f"_dmarc.{domain_name}") if t.lower().startswith("v=dmarc1")),
None,
)
if not dmarc_raw:
return "Missing, which can cause deliverability issues."
policy_match = re.search(r"(?:^|;)\s*p\s*=\s*([^;\s]+)", dmarc_raw, flags=re.I)
if not policy_match:
return "Present (policy missing or unparseable)"
policy_value = policy_match.group(1).strip().lower()
if policy_value == "none":
return "None, which can cause deliverability issues."
return policy_value
def _dns_dmarc_display(domain_name, report):
dmarc_summary = _dns_dmarc_policy_summary(domain_name)
style = report.dns_dmarc_style(dmarc_summary)
return dmarc_summary, style
def _dns_dkim_exists(domain_name):
for selector in ("default", "selector1", "selector2", "google", "mail", "dkim"):
fqdn = f"{selector}._domainkey.{domain_name}"
txt_records = _dns_txt_records(fqdn)
if any(("p=" in t) or ("k=" in t) or ("dkim" in t.lower()) for t in txt_records):
return True
if any("dkim" in t.lower() for t in _dns_txt_records(domain_name)):
return True
return False
def _dns_dkim_display(domain_name):
dkim_exists = _dns_dkim_exists(domain_name)
if dkim_exists:
return "Detected", "green"
return "Missing, which can cause deliverability issues.", "red"
def _dns_mail_routing_status(report, mx_records, mx_top_target_a):
try:
mail_ip = getattr(report, "mail_IP", None)
if not mail_ip:
try:
mail_ip = get_mail_ip(report)
except Exception:
logger.exception("get_mail_ip() failed while trying to determine mail routing IP")
if mx_top_target_a and mail_ip:
if mail_ip in mx_top_target_a:
report.local_email = True
return "Local"
report.local_email = False
return "External"
if mx_top_target_a and not mail_ip:
report.local_email = None
return "Unknown (MX resolves but outbound mail IP is unknown)"
if not mx_records:
report.local_email = None
return "No MX records found"
report.local_email = None
return "Unknown"
except Exception:
logger.exception("Failed while determining local_email from MX and mail IP")
report.local_email = None
return "Unknown (error while determining mail routing)"
def get_dns_info(report):
apex_domain = re.sub(
r"^\s*https?://",
"",
(getattr(report, "managed_domain", "") or "").strip(),
flags=re.I,
).split("/", 1)[0].strip().rstrip(".").lower()
if not apex_domain:
return "[Error] managed_domain is empty"
ns_records = [
line.rstrip(".")
for line in _dns_run_dig(apex_domain, "ns", short=True).splitlines()
if line.strip()
]
a_records = _dns_a_records(apex_domain)
report.domain_resolves = bool(a_records)
mx_records = _dns_mx_records(apex_domain)
mx_top_target_a = _dns_a_records(mx_records[0][1]) if mx_records else []
mail_routing_status = _dns_mail_routing_status(report, mx_records, mx_top_target_a)
www_cname_output = _dns_run_dig(f"www.{apex_domain}", "cname", short=True)
www_cname = www_cname_output.splitlines()[0].rstrip(".") if www_cname_output else None
www_a_records = _dns_a_records(f"www.{apex_domain}") if not www_cname else []
spf_display, spf_style = _dns_spf_display(apex_domain)
dmarc_display, dmarc_style = _dns_dmarc_display(apex_domain, report)
dkim_display, dkim_style = _dns_dkim_display(apex_domain)
lines = [
report.dns_format_label_value("NS", ", ".join(ns_records) if ns_records else "None found"),
report.dns_format_label_value("A", ", ".join(a_records) if a_records else "None found"),
report.dns_format_label_value(
"MX",
(
", ".join([f"Priority: {prio} Target: {tgt}" for prio, tgt in mx_records])
if mx_records
else "None found"
),
),
report.dns_format_label_value(
"Highest Priority MX Target",
", ".join(mx_top_target_a) if mx_top_target_a else "None found",
),
report.dns_format_label_value("Mail Routing (Based on DNS)", mail_routing_status),
report.dns_format_label_value("WWW CNAME", www_cname or "None found"),
]
if not www_cname:
lines.append(
report.dns_format_label_value("WWW A", ", ".join(www_a_records) if www_a_records else "None found")
)
lines.append(
report.dns_format_label_value(
"SPF",
spf_display,
value_style=spf_style,
)
)
lines.append(
report.dns_format_label_value(
"DMARC Policy",
dmarc_display,
value_style=dmarc_style,
)
)
lines.append(
report.dns_format_label_value(
"DKIM",
dkim_display,
value_style=dkim_style,
)
)
return "\n".join(lines)
def get_ip_info(report):
def run_command(command_args):
return subprocess.check_output(command_args, text=True, timeout=5, stderr=subprocess.DEVNULL)
def bold_label(label_text):
return report.apply_color("white", report.apply_color("bold", f"{str(label_text).rstrip(':')}:"))
def get_main_ip():
try:
output = run_command(["ip", "route", "get", "1.1.1.1"])
match = re.search(r"\bsrc\s+(\d+\.\d+\.\d+\.\d+)", output)
return match.group(1) if match else ""
except Exception as err:
logger.debug("main ip detection failed: %s", err)
return ""
def get_all_ipv4():
ipv4_addresses = set()
try:
output = run_command(["ip", "-4", "addr"])
for line in output.splitlines():
line = line.strip()
match = re.match(r"inet\s+(\d+\.\d+\.\d+\.\d+)/\d+\s", line)
if match:
ip_address = match.group(1)
if ip_address != "127.0.0.1":
ipv4_addresses.add(ip_address)
except Exception as err:
logger.debug("all ipv4 detection failed: %s", err)
def ip_sort_key(ip_text):
try:
return tuple(int(part) for part in ip_text.split("."))
except Exception:
return (999, 999, 999, 999)
return sorted(ipv4_addresses, key=ip_sort_key)
def normalize_domain(raw_domain):
domain_value = (raw_domain or "").strip()
domain_value = re.sub(r"^\s*https?://", "", domain_value, flags=re.I)
domain_value = re.sub(r"^.*@", "", domain_value)
domain_value = domain_value.split("/", 1)[0].rstrip(".").lower()
return domain_value
def resolve_domain_ipv4(domain_name):
if not domain_name or "." not in domain_name:
return ""
try:
output = run_command(["dig", "+short", "A", domain_name])
for line in output.splitlines():
line = line.strip()
if re.match(r"^\d+\.\d+\.\d+\.\d+$", line):
return line
except Exception:
pass
try:
_hostname, _aliases, addresses = socket.gethostbyname_ex(domain_name)
for ip_address in addresses:
if re.match(r"^\d+\.\d+\.\d+\.\d+$", ip_address):
return ip_address
except Exception as err:
logger.debug("dns resolve fallback failed for %s: %s", domain_name, err)
return ""
main_ip = get_main_ip()
all_ips = get_all_ipv4()
normalized_domain = normalize_domain(getattr(report, "managed_domain", ""))
managed_domain_ip = resolve_domain_ipv4(normalized_domain)
report.main_ip = main_ip
report.all_ips = all_ips
report.managed_domain_ip = managed_domain_ip
managed_domain_display = getattr(report, "managed_domain", "") or "managed domain"
summary_lines = [
f"{bold_label('Server Host IP:')} {main_ip or 'Unknown'}",
f"{bold_label('IPs on Server')} {len(all_ips)}",
f"{bold_label(f'IP Assigned to {managed_domain_display}')} {managed_domain_ip or 'None'}",
]
return "\n".join(summary_lines)
def get_web_stack(report):
"""
Determine the front-end service on :80 and the upstream chain.
Sets:
- report.web_stack (list)
- report.vhost_path (path to the *Apache* vhost when Apache is the terminal backend;
for standalone NGINX, we set it to the NGINX vhost path.)
"""
logger.info("Determining the server's web stack and vhost path for %s", report.managed_domain)
report.port_80_service = get_service_on_port(80)
logger.debug("Service on port 80: %s", report.port_80_service)
if not report.port_80_service:
logger.warning("No service found on port 80.")
report.web_stack = []
report.vhost_path = None
return
svc = report.port_80_service.lower()
if "nginx" in svc:
report.web_stack.append("nginx")
proxy_port = get_nginx_proxy_pass(report)
if proxy_port:
backend = (get_service_on_port(proxy_port) or "").lower()
logger.debug("Backend service behind NGINX: %s", backend or "UNKNOWN")
if "cache-main" in backend or "varnish" in backend:
report.web_stack.append("varnish")
logger.info("Varnish detected behind NGINX. Checking for backend port in Varnish config.")
varnish_port = get_varnish_backend_port(report)
final_backend = (get_service_on_port(varnish_port) or "").lower()
if "httpd" in final_backend:
report.web_stack.append("apache")
report.vhost_path = f"/usr/local/apache/conf.d/vhosts/{report.managed_domain}.conf"
else:
logger.warning("Unknown final backend behind Varnish: %s", final_backend or "UNKNOWN")
elif "httpd" in backend:
report.web_stack.append("apache")
report.vhost_path = f"/usr/local/apache/conf.d/vhosts/{report.managed_domain}.conf"
else:
logger.warning("Unknown backend behind NGINX: %s", backend or "UNKNOWN")
report.vhost_path = get_nginx_vhost_path(report)
else:
logger.info("NGINX is standalone.")
report.vhost_path = get_nginx_vhost_path(report)
elif "httpd" in svc:
report.web_stack.append("apache")
report.vhost_path = f"/usr/local/apache/conf.d/vhosts/{report.managed_domain}.conf"
else:
report.web_stack.append(svc)
logger.warning("Unrecognized port 80 service: %s", report.port_80_service)
logger.info("Web stack detected: %s", report.web_stack)
logger.info("Vhost path resolved: %s", report.vhost_path)
def _cpanel_try_api_docroot(domain):
try:
api_response = whmapi1("domainuserdata", {"domain": domain})
userdata = api_response.get("data", {}).get("userdata", {}) or {}
return (userdata.get("documentroot") or "").strip()
except Exception:
logger.exception("WHM API domainuserdata failed for %s", domain)
return ""
def _cpanel_try_userdata_docroot(owner, domain):
base_path = f"/var/cpanel/userdata/{owner}/{domain}"
candidate_paths = (base_path, base_path + ".yaml", base_path + ".json")
for candidate_path in candidate_paths:
if not os.path.exists(candidate_path):
continue
try:
with open(candidate_path, "r", encoding="utf-8") as userdata_file:
if candidate_path.endswith(".json"):
userdata = json.load(userdata_file)
else:
userdata = yaml.safe_load(userdata_file)
if isinstance(userdata, dict):
docroot_value = (userdata.get("documentroot") or "").strip()
if docroot_value:
return docroot_value
except Exception:
logger.exception("Failed reading cPanel userdata file: %s", candidate_path)
return ""
def _cwp_guess_docroot_from_db(domain):
try:
with get_mysql_connection("root_cwp") as conn, conn.cursor() as cursor:
cursor.execute(
"""
SELECT username, CONCAT('/home/', username, '/public_html') AS docroot
FROM user WHERE domain = %s
UNION
SELECT user AS username, path AS docroot FROM domains WHERE domain = %s
UNION
SELECT user AS username, CONCAT('/home/', user, '/public_html/', subdomain) AS docroot
FROM subdomains WHERE CONCAT(subdomain, '.', domain) = %s
LIMIT 1;
""",
(domain, domain, domain),
)
row = cursor.fetchone()
return (row.get("docroot") or "").strip() if row else ""
except Exception:
logger.exception("Failed querying CWP root_cwp for docroot guess (%s)", domain)
return ""
def _cwp_try_apache_vhost_docroot(vhost_path):
if not vhost_path or not os.path.exists(vhost_path):
return ""
try:
with open(vhost_path, "r", encoding="utf-8", errors="replace") as vhost_file:
vhost_content = vhost_file.read()
match = re.search(r'DocumentRoot\s+("?)(\/[^\s"]+)\1', vhost_content)
return match.group(2).strip() if match else ""
except Exception:
logger.exception("Failed parsing Apache vhost for DocumentRoot: %s", vhost_path)
return ""
def get_docroot(report):
logger.info("Starting docroot resolution for %s on [%s]", report.managed_domain, report.panel_type)
panel_type = (getattr(report, "panel_type", "") or "").strip()
if panel_type == "cPanel":
domain = (getattr(report, "managed_domain", "") or "").strip()
owner = (getattr(report, "domain_owner", "") or "").strip()
if not domain:
report.docroot = "[Error] No managed_domain set on report"
logger.error(report.docroot)
return report.docroot
if not owner:
report.docroot = "[Error] Unable to resolve cPanel domain owner. Docroot lookup skipped."
logger.error(report.docroot)
return report.docroot
api_docroot = _cpanel_try_api_docroot(domain)
if api_docroot:
report.docroot = api_docroot
logger.info("cPanel docroot from WHM API: %s", report.docroot)
return report.docroot
fs_docroot = _cpanel_try_userdata_docroot(owner, domain)
if fs_docroot:
report.docroot = fs_docroot
logger.info("cPanel docroot from userdata file: %s", report.docroot)
return report.docroot
report.docroot = "[Error] No userdata file found or 'documentroot' missing"
logger.warning("cPanel docroot not found for %s under owner %s", domain, owner)
return report.docroot
if panel_type == "Control Web Panel":
guessed_docroot = _cwp_guess_docroot_from_db(getattr(report, "managed_domain", "") or "")
web_stack = getattr(report, "web_stack", None) or []
try:
if "apache" in web_stack and getattr(report, "vhost_path", None):
apache_docroot = _cwp_try_apache_vhost_docroot(report.vhost_path)
report.docroot = apache_docroot or guessed_docroot or "[Error] Apache vhost missing DocumentRoot"
if apache_docroot:
logger.info("Apache DocumentRoot: %s", report.docroot)
else:
logger.warning("Apache DocumentRoot not found. Using fallback docroot: %s", report.docroot)
return report.docroot
if "nginx" in web_stack and "apache" not in web_stack:
report.docroot = get_nginx_docroot(report) or guessed_docroot or "[Error] Could not determine NGINX docroot"
logger.info("Nginx-only docroot: %s", report.docroot)
return report.docroot
report.docroot = guessed_docroot or "[Error] Unsupported or missing service in stack"
logger.warning("Could not resolve docroot. Stack: %s", web_stack)
return report.docroot
except Exception:
report.docroot = guessed_docroot or "[Error] Unexpected exception during CWP docroot resolution"
logger.exception("Failed to resolve docroot for CWP")
return report.docroot
if panel_type == "UltraStack ONE":
report.docroot = "/home/wordpress/doc_root"
logger.info("UltraStack ONE: docroot set to %s", report.docroot)
return report.docroot
report.docroot = "[Error] Unsupported panel type"
logger.warning("Unsupported panel type: %s", report.panel_type)
return report.docroot
def get_cms(report):
logger.info("Detecting CMS for: %s", report.managed_domain)
document_root = report.docroot
if not document_root or not os.path.isdir(document_root):
report.cms_type = "Unknown"
return report.cms_type
def exists(rel_path, mode="any"):
path = os.path.join(document_root, rel_path)
if mode == "f":
return os.path.isfile(path)
if mode == "d":
return os.path.isdir(path)
return os.path.exists(path)
def read_file(rel_path, max_bytes=200000):
try:
path = os.path.join(document_root, rel_path)
if os.path.isfile(path):
with open(path, "r", encoding="utf-8", errors="ignore") as fh:
return fh.read(max_bytes)
except Exception:
return ""
return ""
composer_raw = read_file("composer.json")
composer_blob = ""
if composer_raw:
try:
required = json.loads(composer_raw).get("require", {}) or {}
composer_blob = (" ".join(required.keys()) + " " + composer_raw).lower()
except Exception:
composer_blob = composer_raw.lower()
rules = [
("WordPress (WooCommerce)", lambda: exists("wp-config.php", "f") and exists("wp-content", "d") and exists("wp-includes", "d") and exists("wp-content/plugins/woocommerce", "d")),
("WordPress", lambda: exists("wp-config.php", "f") and exists("wp-content", "d") and exists("wp-includes", "d")),
("Joomla", lambda: exists("configuration.php", "f") and exists("administrator", "d") and exists("components", "d")),
("Drupal", lambda: exists("core", "d") and exists("sites", "d")),
("Magento", lambda: exists("app/etc/env.php", "f") and exists("bin/magento", "f")),
("PrestaShop", lambda: exists("config/settings.inc.php", "f") and exists("classes", "d")),
("OpenCart", lambda: exists("catalog", "d") and exists("admin", "d") and exists("system", "d")),
("MediaWiki", lambda: exists("LocalSettings.php", "f") and exists("includes", "d")),
("Moodle", lambda: exists("version.php", "f") and exists("lib/moodlelib.php", "f")),
("TYPO3", lambda: exists("typo3", "d") and exists("typo3conf/LocalConfiguration.php", "f")),
("Concrete CMS", lambda: exists("concrete", "d") and exists("application", "d")),
("MODX", lambda: exists("core", "d") and exists("manager", "d") and exists("connectors", "d")),
("OctoberCMS", lambda: exists("modules/system", "d") and exists("config/cms.php", "f")),
("Craft CMS", lambda: exists("craft", "d") or "craftcms/cms" in composer_blob),
("SilverStripe", lambda: "silverstripe/framework" in composer_blob or exists("app/_config", "d") or exists("mysite/_config", "d")),
("ExpressionEngine", lambda: exists("system/ee", "d")),
("phpBB", lambda: exists("config.php", "f") and exists("styles/prosilver", "d")),
("XenForo", lambda: exists("src/XF", "d") and exists("internal_data", "d")),
("MyBB", lambda: exists("inc/config.php", "f") and exists("inc/settings.php", "f")),
("Grav", lambda: exists("system", "d") and exists("user/config", "d") and "grav" in read_file("system/defines.php").lower()),
("DokuWiki", lambda: exists("doku.php", "f") and exists("conf/dokuwiki.php", "f")),
("Kirby", lambda: exists("kirby", "d") and exists("site/config", "d")),
("Shopware", lambda: "shopware" in composer_blob),
("Sylius", lambda: "sylius" in composer_blob),
("Ghost", lambda: exists("content", "d") and "ghost" in read_file("package.json").lower()),
]
try:
for cms_name, check in rules:
if check():
report.cms_type = cms_name
return cms_name
report.cms_type = "Unknown"
return "Unknown"
except Exception:
logger.exception("Unhandled CMS detection error")
report.cms_type = "Unknown"
return "Unknown"
def _site_stats_prepare_temp(temp_directory="/root/SPARTA_tmp/"):
artifacts = SiteStatsArtifacts(
temp_directory=temp_directory,
header_dump_path=os.path.join(temp_directory, "curl_headers.txt"),
trace_output_path=os.path.join(temp_directory, "curl_trace.txt"),
)
os.makedirs(artifacts.temp_directory, exist_ok=True)
return artifacts
def _site_stats_curl_format_string():
return (
"\n--- Performance Metrics ---\n"
"URL: %{url_effective}\n"
"HTTP Response Code: %{http_code}\n"
"Redirects Followed: %{num_redirects}\n"
"TLS Handshake: %{time_appconnect}s\n"
"Time to First Byte: %{time_starttransfer}s\n"
"Total Time: %{time_total}s\n"
"Download Size: %{size_download} bytes\n"
"Download Speed: %{speed_download} B/s\n"
)
def _site_stats_build_curl_command(url, header_dump_path):
return [
"curl",
"-s",
"-o",
"/dev/null",
"-A",
"IMH",
"-H",
"Accept-Encoding: gzip",
"-L",
"-w",
_site_stats_curl_format_string(),
"--dump-header",
header_dump_path,
"-v",
url,
]
def _site_stats_coerce_text(value):
if value is None:
return ""
if isinstance(value, str):
return value
if isinstance(value, bytes):
return value.decode("utf-8", errors="replace")
if isinstance(value, (list, tuple)):
return "\n".join(_site_stats_coerce_text(item) for item in value)
return str(value)
def _site_stats_write_text_file(path, content):
try:
content_text = _site_stats_coerce_text(content)
with open(path, "w", encoding="utf-8", errors="replace") as file_handle:
file_handle.write(content_text or "")
return True
except Exception:
logger.debug("Failed to write %s", path, exc_info=True)
return False
def _site_stats_run_curl(curl_command, url, trace_output_path, timeout_seconds=25):
try:
return subprocess.run(
curl_command,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True,
encoding="utf-8",
errors="replace",
timeout=timeout_seconds,
check=False,
)
except subprocess.TimeoutExpired as timeout_error:
stderr_text = _site_stats_coerce_text(
getattr(timeout_error, "stderr", None)
or getattr(timeout_error, "output", None)
or ""
)
_site_stats_write_text_file(trace_output_path, stderr_text)
logger.error(
"curl timed out after %ss for %s",
getattr(timeout_error, "timeout", "unknown"),
url,
)
if stderr_text:
stderr_lines = stderr_text.splitlines()
logger.debug(
"curl stderr tail (timeout):\n%s",
"\n".join(stderr_lines[-40:] if len(stderr_lines) > 40 else stderr_lines),
)
return f"[Error] curl timed out after {getattr(timeout_error, 'timeout', 'unknown')}s"
except FileNotFoundError:
logger.exception("curl executable not found when attempting to run.")
return "[Error] curl executable not found."
except Exception as exc:
logger.exception("Unexpected exception running curl: %s", exc)
return "[Error] Failed to execute curl."
def _site_stats_parse_metrics(stdout_text):
parsed_metrics = {}
for line in (stdout_text or "").splitlines():
if ": " not in line:
continue
key, value = line.split(": ", 1)
parsed_metrics[key.strip()] = value.strip()
return parsed_metrics
def _site_stats_parse_redirects(stderr_text):
redirect_entries = []
http_code = "???"
http_protocol = "HTTP/1.1"
for line in (stderr_text or "").splitlines():
if line.startswith("< HTTP"):
parts = line.split()
if len(parts) >= 3:
http_protocol, http_code = parts[1], parts[2]
elif line.startswith("< Location:"):
location = line.split(": ", 1)[1].strip()
redirect_entries.append(f"{http_code} {location} ({http_protocol})")
return redirect_entries
def _site_stats_final_headers_from_dump(header_dump_path):
headers_by_request = {}
header_index = -1
try:
with open(header_dump_path, "r", encoding="utf-8", errors="replace") as header_file:
for line in header_file:
if line.lower().startswith("http/"):
header_index += 1
headers_by_request[header_index] = []
if header_index >= 0:
headers_by_request[header_index].append(line.strip())
except Exception:
logger.warning("Failed to read/parse dumped headers for final response.")
return []
return headers_by_request.get(header_index, [])
def _site_stats_has_gzip(final_headers):
return any("content-encoding: gzip" in (header or "").lower() for header in final_headers)
def _site_stats_has_cache_hit(final_headers):
for header in final_headers:
header_lower = (header or "").lower()
if "cache" not in header_lower:
continue
if "miss" in header_lower:
continue
if "hit" in header_lower or "hit from" in header_lower or "cache hit" in header_lower:
return True
return False
def _site_stats_cleanup(paths):
for path in paths:
try:
if path and os.path.exists(path):
os.remove(path)
except Exception:
logger.warning("Failed to remove %s", path)
def _site_stats_parse_seconds(value_text):
if value_text is None:
return None
raw_text = str(value_text).strip()
if raw_text.endswith("s"):
raw_text = raw_text[:-1]
try:
return float(raw_text)
except Exception:
return None
def get_site_stats(report):
logger.info("Gathering site performance stats for: %s", report.managed_domain)
curl_path = shutil.which("curl")
if not curl_path:
logger.error("curl binary not found in PATH. Cannot gather site stats.")
return "[Error] curl not found on this system."
try:
artifacts = _site_stats_prepare_temp()
except Exception:
logger.exception("[Error] Failed to create temp directory for site stats.")
return "[Error] Failed to create temp directory for site stats."
url = getattr(report, "managed_domain", "") or ""
curl_command = _site_stats_build_curl_command(url, artifacts.header_dump_path)
logger.debug("Using curl at: %s", curl_path)
logger.debug("curl command: %s", " ".join(curl_command))
logger.debug("Dump headers -> %s ; verbose/err -> %s", artifacts.header_dump_path, artifacts.trace_output_path)
process_or_error = _site_stats_run_curl(curl_command, url, artifacts.trace_output_path, timeout_seconds=25)
if isinstance(process_or_error, str):
_site_stats_cleanup([artifacts.header_dump_path, artifacts.trace_output_path])
return process_or_error
process = process_or_error
_site_stats_write_text_file(artifacts.trace_output_path, process.stderr or "")
if process.returncode != 0:
logger.error("curl exited with non-zero status %s for %s", process.returncode, url)
stderr_tail = (process.stderr or "").splitlines()[-40:]
logger.debug("curl stderr tail:\n%s", "\n".join(stderr_tail))
_site_stats_cleanup([artifacts.header_dump_path, artifacts.trace_output_path])
return "Unable to retrieve performance metrics at this time due to error:\n" + "\n".join(stderr_tail)
try:
parsed_metrics = _site_stats_parse_metrics(process.stdout)
report.http_status = parsed_metrics.get("HTTP Response Code")
redirect_entries = _site_stats_parse_redirects(process.stderr)
final_headers = _site_stats_final_headers_from_dump(artifacts.header_dump_path)
report.has_gzip = _site_stats_has_gzip(final_headers)
report.has_cache_hit = _site_stats_has_cache_hit(final_headers)
logger.info("HTTP Response Code: %s", report.http_status)
logger.info("GZIP Compression: %s", report.has_gzip)
logger.info("Cache Hit Detected: %s", report.has_cache_hit)
output_text = report.site_stats_render_summary(parsed_metrics, redirect_entries)
_site_stats_cleanup([artifacts.header_dump_path, artifacts.trace_output_path])
return output_text
except Exception as parse_error:
logger.exception("Failed to parse curl output for site stats")
_site_stats_cleanup([artifacts.header_dump_path, artifacts.trace_output_path])
return f"[Error] Failed to parse site stats: {parse_error}"
def account_build_section(report):
logger.debug("Building 'Account & Website' section")
section_progress = SectionProgress(
report.progress,
(report.section_index, report.sections_total),
8,
report.section_name,
)
try:
section_outputs = {}
section_progress.tick("Gathering user information")
report.user_count = get_user_stats(report)
section_progress.tick("Finding server IP information")
section_outputs["ip_summary"] = get_ip_info(report)
section_progress.tick("Checking domain registration")
section_outputs["registration_summary"] = get_registration_info(report)
if isinstance(section_outputs["registration_summary"], str) and section_outputs["registration_summary"].lstrip().startswith("[Notice]"):
section_outputs["registration_summary"] = report.apply_color("red", section_outputs["registration_summary"])
section_progress.tick("Building DNS summary")
try:
section_outputs["dns_summary"] = get_dns_info(report)
except Exception:
logger.exception("DNS summary build failed")
section_outputs["dns_summary"] = report.apply_color(
"red",
"[Error] Failed to retrieve DNS information via dig.",
)
report.domain_resolves = False
section_progress.tick("Detecting web stack")
get_web_stack(report)
section_progress.tick("Finding document root")
get_docroot(report)
section_progress.tick("Detecting CMS")
report.cms_name = get_cms(report)
section_progress.tick("Collecting site performance statistics")
if not getattr(report, "domain_resolves", False):
section_outputs["site_stats_summary"] = report.apply_color(
"red",
f"{report.managed_domain} does not resolve to any IP address on the internet.\n"
"Unable to determine website performance statistics",
)
else:
try:
section_outputs["site_stats_summary"] = get_site_stats(report)
except subprocess.TimeoutExpired:
logger.error("Site stats curl timed out for %s", report.managed_domain)
section_outputs["site_stats_summary"] = report.apply_color(
"red",
"[Warning] curl timed out while collecting site performance stats.\n"
"The server may be under heavy load or not responding quickly.\n"
"Skipping detailed performance metrics for this run.",
)
except Exception:
logger.exception("Site stats collection failed")
section_outputs["site_stats_summary"] = report.apply_color(
"red",
"[Error] Failed to collect site performance statistics.\n"
"Please review curl or site availability manually.",
)
return (
f"{report.format_heading('## Account, Website, and Domain Summary')}\n"
f"{report.format_label_value('Managed Domain', report.managed_domain)}\n"
f"{report.format_label_value('Account Owner', getattr(report, 'domain_owner', None))}\n"
f"{report.format_label_value('Panel', report.panel_type)}\n"
f"{report.format_label_value('User Accounts on Server', getattr(report, 'user_count', 'Unknown'))}\n"
f"{report.format_label_value('Domains on Server', report.domain_count)}\n"
f"{report.format_label_value('Web Server Stack', (', '.join(report.web_stack) if report.web_stack else 'Not detected'))}\n"
f"{report.format_label_value('Document Root', getattr(report, 'docroot', 'Unknown'))}\n"
f"{report.format_label_value('CMS Detected', getattr(report, 'cms_name', None) or 'None')}\n\n"
f"{report.format_subheading('### Site Performance Stats:')}\n"
f"{report.format_block(section_outputs.get('site_stats_summary', ''))}\n\n"
f"{report.format_subheading('### Domain Registration:')}\n"
f"{report.format_block(section_outputs.get('registration_summary', ''))}\n\n"
f"{report.format_subheading('### DNS:')}\n"
f"{report.format_block(section_outputs.get('dns_summary', ''))}\n\n"
f"{report.format_subheading('### IP Address Info:')}\n"
f"{report.format_block_dim(section_outputs.get('ip_summary', ''))}"
)
except Exception:
logger.exception("Failed to build 'Account & Website' section")
error_text = (
"[Error] Failed to build account section. Please do a manual review for "
"account, DNS, IP, and site stats. Please report this bug."
)
return report.apply_color("red", error_text)
# --- Traffic
def get_apache_version(report):
apache_commands = [
["apachectl", "-v"],
["/usr/local/apache/bin/apachectl", "-v"],
["httpd", "-v"],
["/usr/local/apache/bin/httpd", "-v"],
["apache2ctl", "-v"],
["apache2", "-v"],
]
for command in apache_commands:
binary_name = command[0]
if os.path.isabs(binary_name):
if not (os.path.isfile(binary_name) and os.access(binary_name, os.X_OK)):
continue
else:
if shutil.which(binary_name) is None:
continue
try:
result = subprocess.run(
command,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True,
timeout=5,
check=False,
)
except subprocess.TimeoutExpired:
logger.debug("Apache version probe timed out for %s", binary_name)
continue
except Exception as exc:
logger.debug("Apache version probe failed for %s: %s", binary_name, exc)
continue
command_output = (result.stdout or result.stderr or "").strip()
if not command_output:
continue
version_match = re.search(
r"Apache/?(\d+\.\d+(?:\.\d+)?)",
command_output,
re.IGNORECASE,
)
if not version_match:
logger.debug("No Apache version string found in output from %s", binary_name)
continue
version_string = version_match.group(1)
report.apache_version = f"Apache {version_string}"
logger.info("Detected Apache version '%s' using %s", report.apache_version, binary_name)
return report.apache_version
logger.warning("Apache version could not be determined from available binaries.")
report.apache_version = "Unable to detect, do this manually."
return report.apache_version
def _apache_run_command(command_args, timeout=5):
try:
return subprocess.check_output(
command_args,
text=True,
stderr=subprocess.DEVNULL,
timeout=timeout,
)
except Exception:
return ""
def _apache_read_first_existing(report, candidate_paths):
for candidate_path in candidate_paths:
if os.path.isfile(candidate_path):
try:
with open(candidate_path, "r", encoding="utf-8", errors="ignore") as config_file:
config_text = config_file.read()
report.apache_mpm_conf_path = candidate_path
return config_text
except Exception:
continue
return ""
def _apache_extract_int(directive_name, source_text):
directive_match = re.search(
rf"(?mi)^\s*{re.escape(directive_name)}\s+(\d+)",
source_text or "",
)
if not directive_match:
return None
try:
return int(directive_match.group(1))
except Exception:
return None
def _apache_detect_mpm_from_modules(modules_output):
for output_line in (modules_output or "").splitlines():
module_match = re.match(r"^mpm_(\w+)_module\b", output_line.strip())
if module_match:
return module_match.group(1).lower()
return None
def _apache_detect_mpm_from_config(config_text):
for candidate_mpm in ("event", "worker", "prefork"):
if re.search(rf"LoadModule\s+mpm_{candidate_mpm}_module", config_text or "", re.I):
return candidate_mpm
return None
def _apache_extract_mpm_block(config_text, mpm_name):
if not config_text or not mpm_name:
return ""
block_match = re.search(
rf"<IfModule\s+mpm_{re.escape(mpm_name)}_module>(.*?)</IfModule>",
config_text,
re.I | re.S,
)
return block_match.group(1) if block_match else ""
def get_apache_conf(report):
def _format_label_value_bold_label(label, value):
label_text = f"{str(label).rstrip(':')}:"
label_bold = report.apply_color("bold", label_text)
label_colored = report.apply_color("white", label_bold)
value_text = "" if value is None else str(value)
if not value_text:
value_text = "Unknown"
value_colored = report.apply_color("white", value_text)
return f"{label_colored} {value_colored}"
web_stack = getattr(report, "web_stack", []) or []
if isinstance(web_stack, str):
web_stack = [web_stack]
port_80_service = (getattr(report, "port_80_service", "") or "").lower()
has_apache = any(
any(term in str(item).lower() for term in ("apache", "httpd"))
for item in web_stack
) or port_80_service in ("apache", "httpd")
if not has_apache:
logger.info("Apache not in web stack")
return "Apache not detected in web stack."
panel_type = (getattr(report, "panel_type", "") or "").strip().lower()
apache_modules_output = _apache_run_command(["apachectl", "-M"]) or _apache_run_command(["httpd", "-M"])
report.apache_mpm = _apache_detect_mpm_from_modules(apache_modules_output)
config_paths = (
(
"/etc/apache2/conf.modules.d/00-mpm.conf",
"/etc/apache2/conf/httpd.conf",
"/etc/apache2/conf/extra/httpd-mpm.conf",
)
if panel_type == "cpanel"
else (
"/usr/local/apache/conf/extra/httpd-mpm.conf",
"/usr/local/apache/conf/httpd.conf",
)
if panel_type in ("cwp", "control web panel")
else (
"/etc/httpd/conf.modules.d/00-mpm.conf",
"/etc/httpd/conf/httpd.conf",
"/etc/apache2/conf.modules.d/00-mpm.conf",
"/etc/apache2/conf/httpd.conf",
)
)
config_text = _apache_read_first_existing(report, config_paths)
if config_text and not report.apache_mpm:
report.apache_mpm = _apache_detect_mpm_from_config(config_text)
source_text = _apache_extract_mpm_block(config_text, report.apache_mpm) or config_text
report.apache_maxworkers = _apache_extract_int("MaxRequestWorkers", source_text)
if report.apache_maxworkers is None:
report.apache_maxworkers = _apache_extract_int("MaxClients", source_text)
report.apache_maxconnections = _apache_extract_int("MaxConnectionsPerChild", source_text)
if report.apache_maxconnections is None:
report.apache_maxconnections = _apache_extract_int("MaxRequestsPerChild", source_text)
report.apache_serverlimit = _apache_extract_int("ServerLimit", source_text)
report.apache_threadsperchild = _apache_extract_int("ThreadsPerChild", source_text)
report.apache_startservers = _apache_extract_int("StartServers", source_text)
report.apache_minsparethreads = _apache_extract_int("MinSpareThreads", source_text)
report.apache_maxsparethreads = _apache_extract_int("MaxSpareThreads", source_text)
report.apache_minspareservers = _apache_extract_int("MinSpareServers", source_text)
report.apache_maxspareservers = _apache_extract_int("MaxSpareServers", source_text)
summary_lines = [
_format_label_value_bold_label("Apache MPM", report.apache_mpm.capitalize() if report.apache_mpm else "Unknown"),
_format_label_value_bold_label("MaxRequestWorkers", report.apache_maxworkers),
_format_label_value_bold_label("MaxConnectionsPerChild", report.apache_maxconnections),
]
if report.apache_serverlimit is not None:
summary_lines.append(_format_label_value_bold_label("ServerLimit", report.apache_serverlimit))
if report.apache_threadsperchild is not None:
summary_lines.append(_format_label_value_bold_label("ThreadsPerChild", report.apache_threadsperchild))
if report.apache_startservers is not None:
summary_lines.append(_format_label_value_bold_label("StartServers", report.apache_startservers))
if report.apache_minsparethreads is not None:
summary_lines.append(_format_label_value_bold_label("MinSpareThreads", report.apache_minsparethreads))
if report.apache_maxsparethreads is not None:
summary_lines.append(_format_label_value_bold_label("MaxSpareThreads", report.apache_maxsparethreads))
if report.apache_minspareservers is not None:
summary_lines.append(_format_label_value_bold_label("MinSpareServers", report.apache_minspareservers))
if report.apache_maxspareservers is not None:
summary_lines.append(_format_label_value_bold_label("MaxSpareServers", report.apache_maxspareservers))
if getattr(report, "apache_mpm_conf_path", None):
summary_lines.append(_format_label_value_bold_label("Config Source", report.apache_mpm_conf_path))
logger.info("Apache config summary built")
return "\n".join(summary_lines)
def get_nginx_version(report):
port_80_service = getattr(report, "port_80_service", None)
if not port_80_service:
port_80_service = get_service_on_port(80)
report.port_80_service = port_80_service
if not port_80_service or "nginx" not in str(port_80_service).lower():
logger.info("NGINX is not serving on port 80; skipping version detection.")
report.nginx_version = None
return "Not detected (NGINX is not serving HTTP on port 80)"
nginx_binary = shutil.which("nginx")
if not nginx_binary:
for candidate in ("/usr/sbin/nginx", "/usr/local/sbin/nginx", "/usr/bin/nginx"):
if os.path.isfile(candidate) and os.access(candidate, os.X_OK):
nginx_binary = candidate
break
if not nginx_binary:
logger.warning("NGINX binary not found in PATH or common locations.")
report.nginx_version = None
return "Unable to detect NGINX version (binary not found, check manually)."
try:
completed = subprocess.run(
[nginx_binary, "-v"],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True,
timeout=5,
check=False,
)
except Exception as exc:
logger.exception("Failed to execute %s -v: %s", nginx_binary, exc)
report.nginx_version = None
return "Unable to detect NGINX version (nginx -v failed, check manually)."
raw_output = "\n".join(
part for part in [completed.stdout, completed.stderr] if part
).strip()
logger.debug("Raw NGINX version output: %s", raw_output or "<empty>")
version_match = re.search(r"nginx[^0-9]*([\d.]+\d)", raw_output or "")
if not version_match:
logger.warning("Could not parse NGINX version from output.")
report.nginx_version = None
return "Unable to detect NGINX version (unparseable output, check manually)."
version_string = version_match.group(1)
report.nginx_version = version_string
logger.info("Detected NGINX version: %s", version_string)
return f"NGINX {version_string}"
def get_nginx_vhost_path(report):
domain = (getattr(report, "managed_domain", "") or "").strip().lower()
domain_underscored = domain.replace(".", "_")
panel = (getattr(report, "panel_type", "") or "").strip()
if panel == "cPanel":
owner = (getattr(report, "domain_owner", "") or "").strip()
if owner:
p = f"/etc/nginx/vhosts/{owner}__{domain_underscored}.conf"
if os.path.isfile(p):
return p
def match_domain(path, target):
try:
with open(path, encoding="utf-8") as f:
for line in f:
if "server_name" not in line:
continue
cleaned = re.sub(r"\s+", " ", line.replace("server_name", "")).strip(" ;\n")
tokens = [t.lower() for t in cleaned.split()]
for t in tokens:
if (
t == target
or t == f"www.{target}"
or t.endswith(f".{target}")
):
return True
except Exception:
logger.exception("Error scanning NGINX vhost %s", path)
return False
search_dirs = [
"/etc/nginx/vhosts",
"/etc/nginx/conf.d/vhosts",
"/etc/nginx/conf.d",
]
for d in search_dirs:
if not os.path.isdir(d):
continue
for path in sorted(glob.glob(os.path.join(d, "*.conf"))):
if match_domain(path, domain):
return path
fallback = glob.glob(f"/etc/nginx/vhosts/*__{domain_underscored}.conf")
if fallback:
return fallback[0]
return f"/etc/nginx/vhosts/{owner or 'UNKNOWN'}__{domain_underscored}.conf"
if panel == "Control Web Panel":
return f"/etc/nginx/conf.d/vhosts/{domain}.conf"
if panel == "UltraStack ONE":
return f"/etc/nginx/conf.d/{domain}.conf"
return None
def get_nginx_proxy_pass(report):
def resolve_upstream_port(upstream_name):
search_paths = ["/etc/nginx/conf.d/upstream.conf"]
search_paths.extend(sorted(glob.glob("/etc/nginx/conf.d/*.conf")))
upstream_block_re = re.compile(
rf'upstream\s+{re.escape(upstream_name)}\s*\{{(.*?)\}}',
re.S
)
server_re = re.compile(r'^\s*server\s+([^;#]+);', re.M)
for config_path in search_paths:
if not os.path.isfile(config_path):
continue
try:
with open(config_path, encoding="utf-8") as fh:
text = fh.read()
block_match = upstream_block_re.search(text)
if not block_match:
continue
block_body = block_match.group(1)
server_match = server_re.search(block_body)
if not server_match:
continue
target = server_match.group(1).strip()
if target.startswith("unix:"):
return None
port_match = re.search(r':(\d+)$', target)
if port_match:
return port_match.group(1)
except Exception:
logger.exception("Error reading upstream %s in %s", upstream_name, config_path)
return None
def block_matches(block_lines, domain_name):
lowered = domain_name.lower()
for raw in block_lines:
if "server_name" not in raw:
continue
cleaned = re.sub(r"\s+", " ", raw.replace("server_name", "")).strip(" ;\n")
for token in cleaned.split():
token = token.lower()
if (
token == lowered
or token == f"www.{lowered}"
or token.endswith(f".{lowered}")
):
return True
return False
def iter_vhosts(report):
seen = set()
initial = get_nginx_vhost_path(report)
if initial and os.path.isfile(initial):
seen.add(initial)
yield initial
dirs = [
"/etc/nginx/vhosts",
"/etc/nginx/conf.d/vhosts",
"/etc/nginx/conf.d",
]
for directory in dirs:
if not os.path.isdir(directory):
continue
for p in sorted(glob.glob(os.path.join(directory, "*.conf"))):
if p not in seen and os.path.isfile(p):
seen.add(p)
yield p
def extract_server_blocks(lines):
blocks = []
cur = []
depth = 0
in_server = False
for raw in lines:
stripped = raw.strip()
if not in_server and stripped.startswith("server"):
in_server = True
cur = [raw]
depth = raw.count("{") - raw.count("}")
continue
if in_server:
cur.append(raw)
depth += raw.count("{") - raw.count("}")
if depth <= 0:
blocks.append(cur)
in_server = False
cur = []
return blocks
def extract_location_block(block_lines):
in_loc = False
depth = 0
collected = []
for raw in block_lines:
stripped = raw.strip()
if not in_loc and re.match(r'location\s+/\s*\{?', stripped):
in_loc = True
collected = [raw]
depth = raw.count("{") - raw.count("}")
continue
if in_loc:
collected.append(raw)
depth += raw.count("{") - raw.count("}")
if depth <= 0:
break
return collected
def parse_proxy_pass(report, location_lines):
for raw in location_lines:
stripped = raw.strip()
if "proxy_pass" not in stripped:
continue
literal = re.search(r'proxy_pass\s+https?://[^\s:;]+:(\d+)\s*;', stripped)
if literal:
return literal.group(1)
upstream = (
re.search(r'proxy_pass\s+https?://([A-Za-z0-9_.-]+)\s*;', stripped)
or re.search(r'proxy_pass\s+http://([A-Za-z0-9_.-]+)\s*;', stripped)
)
if upstream:
upstream_name = upstream.group(1)
panel = (getattr(report, "panel_type", "") or "").lower()
if "ultrastack" in panel:
return resolve_upstream_port(upstream_name)
return None
return None
domain = (getattr(report, "managed_domain", "") or "").strip().lower()
try:
matched_info = None
for conf_path in iter_vhosts(report):
try:
with open(conf_path, encoding="utf-8") as fh:
lines = fh.readlines()
except Exception:
logger.exception("Cannot read vhost %s", conf_path)
continue
for block in extract_server_blocks(lines):
if block_matches(block, domain):
matched_info = (block, conf_path)
break
if matched_info:
break
if not matched_info:
return None
report.nginx_matched_vhost = matched_info[1]
logger.debug("NGINX vhost match found in %s", matched_info[1])
location = extract_location_block(matched_info[0])
if not location:
return None
return parse_proxy_pass(report, location)
except Exception:
logger.exception("NGINX parsing error for %s", report.managed_domain)
return None
def get_nginx_docroot(report):
path = get_nginx_vhost_path(report)
if not os.path.isfile(path):
return None
try:
with open(path, encoding="utf-8") as f:
content = f.read()
dom = getattr(report, "managed_domain", "")
if not re.search(rf'\bserver_name\b[^\n;]*\b({re.escape(dom)}|www\.{re.escape(dom)})\b', content):
return None
loc_block = re.search(r'location\s+/\s*\{([^{}]|\{[^{}]*\})*\}', content, re.S)
if loc_block:
m = re.search(r'root\s+([^\s;]+);', loc_block.group(0))
if m:
report.docroot = m.group(1).strip()
return report.docroot
m = re.search(r'root\s+([^\s;]+);', content)
if m:
report.docroot = m.group(1).strip()
logger.info("Found NGINX docroot for %s: %s", dom, report.docroot)
return report.docroot
except Exception:
logger.exception("Failed to parse NGINX docroot for %s", report.managed_domain)
return None
def get_varnish_backend_port(report):
path = f"/etc/varnish/conf.d/vhosts/{report.managed_domain}.conf"
if not os.path.isfile(path):
logger.warning("Varnish vhost not found: %s", path)
return None
try:
with open(path, encoding="utf-8") as f:
content = f.read()
match = re.search(r'\.port\s*=\s*"(\d+)"', content)
if match:
port = match.group(1)
logger.debug("Found Varnish backend port %s in %s", port, path)
return port
logger.warning("No backend port found in Varnish vhost: %s", path)
return None
except Exception:
logger.exception("Failed to parse Varnish vhost for %s", report.managed_domain)
return None
def check_for_ultrastack(report):
"""
- panel_type contains "Ultrastack ONE" -> True
- panel_type is Control Web Panel (CWP) -> False (not installable)
- panel_type is cPanel -> True if imh-ultrastack-ded is installed
- anything else -> False
"""
panel_type_raw = getattr(report, "panel_type", "") or ""
panel_type_norm = panel_type_raw.strip().lower()
if not panel_type_norm:
logger.info("Panel type unknown while checking for UltraStack; assuming not installed.")
report.has_ultrastack = False
return report.has_ultrastack
if "ultrastack one" in panel_type_norm:
logger.info("Panel type indicates UltraStack ONE, marking UltraStack as present.")
report.has_ultrastack = True
return report.has_ultrastack
if "control web panel" in panel_type_norm or panel_type_norm == "cwp":
logger.info("Panel type is Control Web Panel; UltraStack is not installable.")
report.has_ultrastack = False
return report.has_ultrastack
if "cpanel" in panel_type_norm:
has_ultrastack_pkg = False
try:
if shutil.which("rpm"):
proc = subprocess.run(
["rpm", "-q", "imh-ultrastack-ded"],
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
text=True,
check=False,
)
has_ultrastack_pkg = proc.returncode == 0
elif shutil.which("dpkg-query"):
proc = subprocess.run(
["dpkg-query", "-W", "-f=${Status}", "imh-ultrastack-ded"],
stdout=subprocess.PIPE,
stderr=subprocess.DEVNULL,
text=True,
check=False,
)
output = (proc.stdout or "").lower()
has_ultrastack_pkg = "install ok installed" in output
else:
logger.info("No known package manager found while checking for UltraStack.")
except Exception:
logger.exception("Failed to detect UltraStack package via system package manager.")
report.has_ultrastack = has_ultrastack_pkg
return report.has_ultrastack
logger.info("Unrecognized panel_type '%s'; assuming UltraStack is not installed.", panel_type_raw)
report.has_ultrastack = False
return report.has_ultrastack
def get_web_stats(report):
bot_names = (
"bot", "crawl", "crawler", "spider", "slurp", "bingpreview", "bingbot", "googlebot",
"google-inspectiontool", "googleother", "duckduckbot", "yandexbot", "yandeximages",
"baiduspider", "sogou", "seznambot", "semrushbot", "mj12bot", "ahrefs", "ahrefsbot",
"applebot", "amazonbot", "petalbot", "bytedance", "tiktokbot", "claudebot",
"opensiteexplorer", "blexbot", "uptimerobot", "pingdom", "site24x7", "netcraft",
"dotbot", "linkchecker", "integritybot", "facebookexternalhit", "facebot", "twitterbot",
"discordbot", "linkedinbot", "slackbot", "whatsapp", "telegrambot", "redditbot",
"ia_archiver", "archive.org_bot", "wayback", "python-requests", "aiohttp", "curl",
"wget", "libwww-perl", "go-http-client", "okhttp", "gptbot", "chatgpt-user", "chatgpt",
"bard", "google-extended", "anthropic-ai", "cohere-ai", "perplexitybot",
"perplexityai", "openai", "openai-shared", "meta-externalagent", "facebookbot",
"miuibot", "zoominfo", "majestic", "majestic12", "monitis", "newrelicpinger",
"datadog", "cloudflarehealthchecks", "cloudflare-verifiedbot", "cloudflarebot",
"statuscake",
)
state = {
"status": Counter(),
"totals": Counter(),
"bots": Counter(),
}
date_pattern = dt.datetime.now().strftime("%d/%b/%Y:")
def process_log_file(path, source_name, domain_filter=None, max_lines=100000):
if not path or not os.path.exists(path):
return
try:
with open(path, "rt", encoding="utf-8", errors="replace") as handle:
for line_number, line in enumerate(handle, start=1):
if max_lines and line_number > max_lines:
break
if date_pattern and date_pattern not in line:
continue
if domain_filter and domain_filter not in line:
continue
status_match = re.search(r'"\s+([0-9]{3})\s+', line)
if status_match:
status_code = status_match.group(1)
state["status"][(source_name, status_code)] += 1
state["totals"][source_name] += 1
lower_line = line.lower()
for bot_name in bot_names:
if bot_name in lower_line:
state["bots"][(source_name, bot_name)] += 1
except Exception:
logger.debug("Failed processing %s log", source_name)
def resolve_log_plan():
panel_type = (getattr(report, "panel_type", "") or "").strip()
managed_domain = (getattr(report, "managed_domain", "") or "").strip()
apache_log_path = None
nginx_log_path = None
nginx_domain_filter = None
if panel_type == "Control Web Panel":
if managed_domain:
apache_log_path = f"/usr/local/apache/domlogs/{managed_domain}.log"
nginx_log_path = apache_log_path
elif panel_type == "cPanel":
if managed_domain:
primary = f"/usr/local/apache/logs/domlogs/{managed_domain}-ssl_log"
alt = f"/usr/local/apache/logs/domlogs/{managed_domain}"
if os.path.exists(primary):
apache_log_path = primary
elif os.path.exists(alt):
apache_log_path = alt
nginx_log_path = "/var/log/nginx/access.log"
nginx_domain_filter = managed_domain
elif panel_type == "UltraStack ONE":
if managed_domain:
ssl_log = f"/var/log/httpd/ssl-{managed_domain}-access.log"
non_ssl_log = f"/var/log/httpd/{managed_domain}-access.log"
if os.path.exists(ssl_log):
apache_log_path = ssl_log
elif os.path.exists(non_ssl_log):
apache_log_path = non_ssl_log
nginx_log_path = "/var/log/nginx/access.log"
nginx_domain_filter = managed_domain
else:
if os.path.exists("/var/log/httpd/access_log"):
apache_log_path = "/var/log/httpd/access_log"
elif os.path.exists("/var/log/apache2/access.log"):
apache_log_path = "/var/log/apache2/access.log"
if os.path.exists("/var/log/nginx/access.log"):
nginx_log_path = "/var/log/nginx/access.log"
nginx_domain_filter = managed_domain
return {
"apache_log": apache_log_path,
"nginx_log": nginx_log_path,
"nginx_domain_filter": nginx_domain_filter,
}
log_plan = resolve_log_plan()
web_stack = getattr(report, "web_stack", None) or []
port_80_service = (getattr(report, "port_80_service", "") or "").lower()
stack_text = " ".join(str(item).lower() for item in web_stack)
has_apache = ("apache" in stack_text or "httpd" in stack_text or port_80_service in ("apache", "httpd"))
has_nginx = ("nginx" in stack_text or port_80_service == "nginx")
if has_apache and log_plan.get("apache_log"):
process_log_file(log_plan["apache_log"], "apache")
if has_nginx and log_plan.get("nginx_log") and os.path.exists(log_plan["nginx_log"]):
process_log_file(
log_plan["nginx_log"],
"nginx",
domain_filter=log_plan.get("nginx_domain_filter"),
)
if not state["totals"]:
return "No recent access log entries detected for today.\n\n"
def render_source_summary(source_name):
total = state["totals"].get(source_name, 0)
if not total:
return ""
lines = []
lines.append(f"### {source_name.upper()} Access Log Summary\nStatus Codes:\n")
codes = sorted(
{code for (src, code) in state["status"].keys() if src == source_name},
key=int,
)
for code in codes:
lines.append(f" {code + ':':<6} {state['status'][(source_name, code)]:7d}\n")
lines.append(f" {'Total:':<6} {total:7d}\n\n")
lines.append("Bot Activity by User-Agent:\n")
bot_keys = [key for key in state["bots"].keys() if key[0] == source_name]
if not bot_keys:
lines.append(" (none)\n\n\n")
return "".join(lines)
for _, bot_name in sorted(bot_keys, key=lambda item: item[1]):
lines.append(f" {bot_name + ':':<22} {state['bots'][(source_name, bot_name)]:7d}\n")
lines.append("\n\n")
return "".join(lines)
return render_source_summary("apache") + render_source_summary("nginx")
def get_cdn_stats(report):
def _apply_first_style(text: str, styles):
for style in styles:
colored = report.apply_color(style, text)
if colored != text:
return colored
return text
def _format_label_value_bold_label(label, value, none_token="none detected"):
label_text = f"{str(label).rstrip(':')}:"
label_bold = report.apply_color("bold", label_text)
label_colored = report.apply_color("white", label_bold)
raw_value = "" if value is None else str(value).strip()
if not raw_value:
raw_value = "None detected"
is_none = raw_value.lower() == none_token
if is_none:
value_colored = _apply_first_style(raw_value, ("amber", "yellow"))
else:
value_colored = report.apply_color("green", raw_value)
return f"{label_colored} {value_colored}"
def _extract_final_header_block(raw_header_text: str) -> str:
current_block = []
final_block = []
for raw_line in (raw_header_text or "").splitlines():
line = (raw_line or "").rstrip("\r")
if line.lower().startswith("http/"):
current_block = [line]
continue
if not current_block:
continue
if not line.strip():
if current_block:
final_block = current_block
current_block = []
continue
current_block.append(line)
if current_block:
final_block = current_block
return "\n".join(final_block)
def _curl_head(url: str):
cmd = [
"curl",
"-sS",
"-I",
"-L",
"--max-time",
"15",
"-A",
"Mozilla/5.0 (compatible; SPARTA)",
url,
]
return subprocess.run(
cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True,
timeout=20,
check=False,
)
domain = (getattr(report, "managed_domain", "") or "").strip()
if not domain:
return "[Error] No managed_domain set"
if not shutil.which("curl"):
return "[Error] curl not found"
urls_to_try = [
f"https://{domain}/robots.txt",
f"http://{domain}/robots.txt",
f"https://{domain}",
f"http://{domain}",
]
last_proc = None
final_headers_block = ""
raw_headers = ""
stderr_text = ""
for url in urls_to_try:
try:
proc = _curl_head(url)
last_proc = proc
raw_headers = proc.stdout or ""
stderr_text = proc.stderr or ""
final_headers_block = _extract_final_header_block(raw_headers)
if final_headers_block:
break
except Exception:
continue
if not last_proc:
return "[Error] CDN check failed"
if last_proc.returncode != 0 and not final_headers_block:
stderr_tail = (stderr_text or "").splitlines()[-20:]
return "CDN check failed:\n" + "\n".join(stderr_tail) if stderr_tail else "[Error] CDN check failed"
headers_lower = (final_headers_block or raw_headers or "").lower()
is_cloudflare = (
"server: cloudflare" in headers_lower
or "cf-ray:" in headers_lower
or "cf-cache-status:" in headers_lower
or "cf-mitigated:" in headers_lower
)
is_bunny = (
"server: bunnycdn" in headers_lower
or "x-bunnycdn-request-id:" in headers_lower
or "cdn-cache:" in headers_lower
)
report.has_cloudflare = bool(is_cloudflare)
report.cdn_provider = "Cloudflare" if is_cloudflare else ("BunnyCDN" if is_bunny else None)
report.cdn_cache_status = None
report.cdn_cache_hit = None
for line in (final_headers_block or "").splitlines():
lower_line = line.lower()
if lower_line.startswith("cf-cache-status:"):
report.cdn_cache_status = lower_line.split(":", 1)[1].strip().upper()
break
if lower_line.startswith("cdn-cache:"):
report.cdn_cache_status = lower_line.split(":", 1)[1].strip().upper()
break
if not report.cdn_cache_status and "cf-mitigated: challenge" in headers_lower:
report.cdn_cache_status = "CHALLENGE"
report.cdn_cache_hit = None
elif report.cdn_cache_status:
report.cdn_cache_hit = "HIT" in report.cdn_cache_status
provider_str = report.cdn_provider or "None detected"
status_str = report.cdn_cache_status or "None detected"
hit_str = "Hit" if report.cdn_cache_hit else ("Not hit" if report.cdn_cache_hit is False else "None detected")
return (
f"{_format_label_value_bold_label('Provider', provider_str)}\n"
f"{_format_label_value_bold_label('Cache Status', status_str)}\n"
f"{_format_label_value_bold_label('Cache Hit', hit_str)}\n"
)
def get_largest_traffic(report):
result_message = ""
web_stack = getattr(report, "web_stack", []) or []
if isinstance(web_stack, str):
web_stack = [web_stack]
has_apache = any(
any(term in str(item).lower() for term in ("apache", "httpd"))
for item in web_stack
)
if not has_apache:
logger.info("Apache not detected in web stack")
result_message = "Apache not detected in web stack. Skipping traffic log analysis."
return result_message
panel_type = (getattr(report, "panel_type", "") or "").strip().lower()
log_dir = None
if panel_type == "cpanel":
log_dir = "/usr/local/apache/logs/domlogs"
elif panel_type in ("cwp", "control web panel"):
log_dir = "/usr/local/apache/domlogs"
elif panel_type in ("ultrastack one", "ultrastack"):
if os.path.isdir("/var/log/httpd"):
log_dir = "/var/log/httpd"
else:
for candidate in ("/usr/local/apache/logs/domlogs", "/usr/local/apache/domlogs"):
if os.path.isdir(candidate):
log_dir = candidate
break
else:
for candidate in ("/usr/local/apache/logs/domlogs", "/usr/local/apache/domlogs", "/var/log/httpd"):
if os.path.isdir(candidate):
log_dir = candidate
break
if not log_dir or not os.path.isdir(log_dir):
logger.warning("Domlog directory not found for panel_type=%s", panel_type)
result_message = "Apache domlog directory not found."
return result_message
def format_size(size_bytes):
units = ["B", "K", "M", "G", "T"]
size = float(size_bytes)
for unit in units:
if size < 1024 or unit == units[-1]:
return f"{size:.0f}{unit}"
size /= 1024.0
return f"{size_bytes}B"
entries = []
try:
for entry in os.scandir(log_dir):
if not entry.is_file():
continue
name = entry.name.lower()
if name.endswith(".gz") or "bytes" in name or "error" in name:
continue
try:
size_bytes = entry.stat().st_size
except OSError:
continue
entries.append((size_bytes, entry.name))
except Exception as exc:
logger.exception("Failed scanning domlogs in %s", log_dir)
result_message = f"[Error] Failed scanning Apache domlogs: {exc}"
return result_message
if not entries:
result_message = f"No usable Apache domlogs found in {log_dir}"
return result_message
entries.sort(key=lambda item: item[0], reverse=True)
top_entries = entries[:10]
lines = [f"- {format_size(size_bytes)} {name}" for size_bytes, name in top_entries]
report.largest_traffic_results = top_entries
report.largest_traffic_log_dir = log_dir
result_message = f"Top Apache traffic logs (by size) in {log_dir}:\n" + "\n".join(lines)
return result_message
def traffic_build_section(report):
def _format_label_value_bold_label(label, value):
label_text = f"{str(label).rstrip(':')}:"
label_bold = report.apply_color("bold", label_text)
label_colored = report.apply_color("white", label_bold)
value_text = "" if value is None else str(value)
if not value_text:
value_text = "Unknown"
value_colored = report.apply_color("white", value_text)
return f"{label_colored} {value_colored}"
logger.debug("Building 'Traffic' section")
section_progress = SectionProgress(
report.progress,
(report.section_index, report.sections_total),
6,
report.section_name,
)
try:
section_outputs = {}
section_progress.tick("Detecting web stack")
if not (getattr(report, "web_stack", []) or []) and not (getattr(report, "port_80_service", "") or ""):
try:
logger.debug("traffic: web_stack empty, calling get_web_stack")
get_web_stack(report)
except Exception:
logger.exception("get_web_stack failed while building Traffic section")
section_progress.tick("Aggregating status codes")
logger.debug("traffic: about to call get_web_stats")
section_outputs["web_stats_summary"] = get_web_stats(report)
logger.debug("traffic: finished get_web_stats")
section_progress.tick("Detecting web server versions")
logger.debug("traffic: determining Apache/NGINX versions")
current_web_stack = getattr(report, "web_stack", []) or []
if isinstance(current_web_stack, str):
current_web_stack = [current_web_stack]
port_80_service = (getattr(report, "port_80_service", "") or "").lower()
has_apache = any(
any(term in str(item).lower() for term in ("apache", "httpd"))
for item in current_web_stack
) or port_80_service in ("apache", "httpd")
has_nginx = any("nginx" in str(item).lower() for item in current_web_stack) or port_80_service == "nginx"
report.apache_version_str = get_apache_version(report) if has_apache else None
report.nginx_version_str = get_nginx_version(report) if has_nginx else None
section_outputs["web_version_block"] = ""
if report.apache_version_str or report.nginx_version_str:
version_lines = []
if report.apache_version_str:
version_lines.append(_format_label_value_bold_label("Apache", report.apache_version_str))
if report.nginx_version_str:
version_lines.append(_format_label_value_bold_label("NGINX", report.nginx_version_str))
section_outputs["web_version_block"] = (
f"{report.format_subheading('### Web Server Versions')}\n"
+ "\n".join(version_lines)
+ "\n"
)
section_progress.tick("Collecting Apache configuration variables")
logger.debug("traffic: about to call get_apache_conf")
section_outputs["apache_conf_summary"] = get_apache_conf(report)
logger.debug("traffic: finished get_apache_conf")
section_progress.tick("Checking for UltraStack")
logger.debug("traffic: about to call check_for_ultrastack")
report.has_ultrastack_flag = check_for_ultrastack(report)
logger.debug("traffic: finished check_for_ultrastack")
section_progress.tick("Detecting CDN/proxy")
if getattr(report, "is_registered", None) is None:
try:
get_registration_info(report)
except Exception:
logger.exception("traffic: get_registration_info() failed")
if getattr(report, "domain_resolves", None) is None:
try:
get_dns_info(report)
except Exception:
logger.exception("traffic: get_dns_info() failed")
if getattr(report, "domain_resolves", None) is not True:
logger.info(
"Skipping CDN/proxy detection for %s (is_registered=%r, domain_resolves=%r)",
getattr(report, "managed_domain", ""),
getattr(report, "is_registered", None),
getattr(report, "domain_resolves", None),
)
section_outputs["cdn_status_summary"] = (
(
f"{report.managed_domain} does not resolve to an IP."
if getattr(report, "domain_resolves", None) is False
else f"{report.managed_domain} DNS resolution status is unknown."
)
+ "\n"
+ (
"Domain registration check indicates the domain is not registered."
if getattr(report, "is_registered", None) is False
else (
"Domain registration check indicates the domain is registered."
if getattr(report, "is_registered", None) is True
else "Domain registration status is unknown."
)
)
+ "\nCDN/proxy detection skipped."
)
else:
logger.debug("traffic: about to call get_cdn_stats")
section_outputs["cdn_status_summary"] = get_cdn_stats(report)
logger.debug("traffic: finished get_cdn_stats")
section_progress.tick("Finding highest traffic logs")
logger.debug("traffic: about to call get_largest_traffic")
section_outputs["largest_traffic_summary"] = get_largest_traffic(report)
logger.debug("traffic: finished get_largest_traffic")
ultrastack_value = "Yes" if getattr(report, "has_ultrastack_flag", False) else "No"
return (
f"{report.format_heading('## Traffic & Web Server Summary')}\n\n"
f"{report.format_subheading('### Web Server Status Codes and Bot Traffic Stats')}\n"
"Based on 100,000 most-recent lines in access log.\n\n"
f"{report.format_block_dim(section_outputs.get('web_stats_summary', ''))}\n"
f"{section_outputs.get('web_version_block', '')}"
f"{_format_label_value_bold_label('Has UltraStack', ultrastack_value)}\n\n"
f"{report.format_subheading('### Apache Web Server Configuration:')}\n"
f"{section_outputs.get('apache_conf_summary', '')}\n\n"
f"{report.format_subheading('### CDN/Proxy Detection')}\n"
f"{section_outputs.get('cdn_status_summary', '')}\n\n"
f"{report.format_subheading('### Highest Traffic Apache Logs')}\n"
f"{report.format_block_dim(section_outputs.get('largest_traffic_summary', ''))}"
)
except Exception:
logger.exception("Failed to build 'Traffic' section")
error_text = (
"[Error] Failed to build traffic section. Please perform a manual review of "
"web server status codes, traffic legitimacy, Apache configuration, CDN/proxy "
"configuration, and Apache traffic logs. Please report this as a bug."
)
return report.format_error(error_text)
# --- PHP
_PHPFPM_MAX_CHILDREN_REGEX = re.compile(
r"reached\s+max_children\s+setting|max_children",
re.I,
)
_PHPFPM_TIMESTAMP_BRACKETED = re.compile(r"\[(\d{2}-[A-Za-z]{3}-\d{4})\s+\d{2}:\d{2}:\d{2}\]")
_PHPFPM_TIMESTAMP_ISO_BRACKETED = re.compile(r"\[(\d{4}-\d{2}-\d{2})\s+\d{2}:\d{2}:\d{2}\]")
_PHPFPM_TIMESTAMP_ISO = re.compile(r"^(\d{4}-\d{2}-\d{2})\s+\d{2}:\d{2}:\d{2}\b")
def _php_short_to_semver(two_digit_version: str) -> str:
two_digit_version = str(two_digit_version)
if re.fullmatch(r"\d{2}", two_digit_version):
return f"{two_digit_version[0]}.{two_digit_version[1]}"
return two_digit_version
def _php_try_cli_version() -> str:
for php_binary_path in ("php", "/usr/bin/php", "/usr/local/bin/php"):
if not shutil.which(php_binary_path):
continue
try:
system_version_output = subprocess.check_output(
[php_binary_path, "-r", "echo PHP_VERSION;"],
text=True,
timeout=3,
stderr=subprocess.STDOUT,
).strip()
if re.match(r"^\d+\.\d+\.\d+", system_version_output):
return system_version_output
except Exception:
continue
return "[Unknown]"
def _php_write_probe_file(probe_path: str) -> bool:
try:
with open(probe_path, "w", encoding="utf-8") as file_handle:
file_handle.write("<?php echo PHP_VERSION; ?>")
return True
except Exception as exc:
logger.exception("Failed writing PHP probe file %s: %s", probe_path, exc)
return False
def _php_remove_file(path_to_remove: str) -> None:
try:
if path_to_remove and os.path.exists(path_to_remove):
os.remove(path_to_remove)
except Exception as exc:
logger.warning("Failed to remove temporary file %s: %s", path_to_remove, exc)
def _php_cwp_http_probe(managed_domain: str, document_root: str) -> Optional[str]:
if not managed_domain or not document_root or not os.path.isdir(document_root):
return None
temp_filename = f".spar_phpdetect_{os.getpid()}.php"
temp_filepath = os.path.join(document_root, temp_filename)
if not _php_write_probe_file(temp_filepath):
return None
try:
http_probe_commands = [
[
"curl",
"-4",
"-sS",
"-m",
"4",
"--fail",
"--resolve",
f"{managed_domain}:80:127.0.0.1",
f"http://{managed_domain}/{temp_filename}",
],
[
"curl",
"-4",
"-sS",
"-m",
"4",
"--fail",
"-H",
f"Host: {managed_domain}",
f"http://127.0.0.1/{temp_filename}",
],
[
"curl",
"-4",
"-sS",
"-m",
"4",
"--fail",
"-k",
"--resolve",
f"{managed_domain}:443:127.0.0.1",
f"https://{managed_domain}/{temp_filename}",
],
]
for probe_command in http_probe_commands:
try:
probe_output = subprocess.check_output(
probe_command,
text=True,
stderr=subprocess.STDOUT,
).strip()
if re.match(r"^\d+\.\d+\.\d+", probe_output):
return probe_output
except subprocess.CalledProcessError as cpe:
logger.debug(
"HTTP probe failed: %s -> %s", " ".join(probe_command), cpe
)
except Exception as exc:
logger.debug(
"HTTP probe exception: %s -> %s", " ".join(probe_command), exc
)
finally:
_php_remove_file(temp_filepath)
return None
def _php_cwp_nginx_vhost_probe(report) -> Optional[str]:
web_stack = getattr(report, "web_stack", []) or []
if "nginx" not in web_stack or "apache" in web_stack:
return None
vhost_path = getattr(report, "vhost_path", "") or get_nginx_vhost_path(report)
if not vhost_path or not os.path.isfile(vhost_path):
return None
try:
with open(vhost_path, "r", encoding="utf-8", errors="ignore") as file_handle:
vhost_text = file_handle.read()
version_match = re.search(
r"fastcgi_pass\s+unix:/opt/(?:alt/)?php-fpm(\d{2})/usr/var/sockets/[^\s;]+\.sock\s*;",
vhost_text,
re.I,
)
if not version_match:
return None
return _php_short_to_semver(version_match.group(1))
except Exception as exc:
logger.exception("Failed parsing NGINX vhost for PHP version: %s", exc)
return None
def _php_cwp_apache_vhost_probe(report) -> Optional[str]:
web_stack = getattr(report, "web_stack", []) or []
if "apache" not in web_stack:
return None
vhost_path = getattr(report, "vhost_path", "") or ""
if not vhost_path or not os.path.isfile(vhost_path):
return None
try:
with open(vhost_path, "r", encoding="utf-8", errors="ignore") as file_handle:
vhost_text = file_handle.read()
version_match = (
re.search(
r'SetHandler\s+"?proxy:unix:/opt/alt/php-fpm(\d{2})/usr/var/sockets/[^"|]+\.sock\|fcgi://localhost"?',
vhost_text,
re.I,
)
or re.search(
r"/opt/alt/php-fpm(\d{2})/usr/var/sockets/\S+\.sock", vhost_text, re.I
)
or re.search(r"/opt/alt/php(\d{2})/usr/bin/php-cgi", vhost_text, re.I)
or re.search(r"application/x-httpd-php(\d{2})", vhost_text, re.I)
)
if not version_match:
return None
return _php_short_to_semver(version_match.group(1))
except Exception as exc:
logger.exception("Failed parsing Apache vhost for PHP version: %s", exc)
return None
def _php_cwp_pool_probe(report) -> Optional[str]:
domain_owner = getattr(report, "domain_owner", "") or ""
if not domain_owner:
return None
try:
pool_file_candidates = sorted(
glob.glob(f"/opt/alt/php-fpm*/usr/etc/php-fpm.d/users/{domain_owner}.conf")
)
if not pool_file_candidates:
return None
socket_versions: Set[str] = set()
try:
nginx_vhost_path = getattr(report, "vhost_path", "") or get_nginx_vhost_path(
report
)
candidate_paths = [
nginx_vhost_path,
nginx_vhost_path.replace(".conf", ".ssl.conf") if nginx_vhost_path else "",
]
for path_item in candidate_paths:
if not path_item or not os.path.isfile(path_item):
continue
with open(path_item, "r", encoding="utf-8", errors="ignore") as file_handle:
vhost_text = file_handle.read()
for match in re.finditer(
r"/opt/(?:alt/)?php-fpm(\d{2})/usr/var/sockets/[^\s;]+\.sock",
vhost_text,
re.I,
):
socket_versions.add(match.group(1))
except Exception:
pass
chosen = None
if socket_versions:
for candidate_path in pool_file_candidates:
match = re.search(r"/opt/alt/php-fpm(\d{2})/", candidate_path)
if match and match.group(1) in socket_versions:
chosen = candidate_path
break
if not chosen:
chosen = pool_file_candidates[0]
pool_version_match = re.search(r"/opt/alt/php-fpm(\d{2})/", chosen)
if not pool_version_match:
return None
return _php_short_to_semver(pool_version_match.group(1))
except Exception as exc:
logger.exception("Error scanning CWP PHP-FPM pool files: %s", exc)
return None
def _php_cwp_json_probe(report) -> Optional[str]:
domain_owner = getattr(report, "domain_owner", "") or ""
managed_domain = getattr(report, "managed_domain", "") or ""
if not domain_owner or not managed_domain:
return None
cwp_json_path = f"/home/{domain_owner}/.conf/webservers/{managed_domain}.conf"
if not os.path.isfile(cwp_json_path):
return None
try:
with open(cwp_json_path, "r", encoding="utf-8", errors="ignore") as file_handle:
cwp_json = json.load(file_handle)
json_version_value = cwp_json.get("php-fpm_ver")
if not json_version_value:
return None
return _php_short_to_semver(str(json_version_value))
except Exception as exc:
logger.exception("Failed reading CWP JSON %s: %s", cwp_json_path, exc)
return None
def _php_cli_docroot_probe(docroot_path: str) -> Optional[str]:
if not docroot_path or not os.path.isdir(docroot_path):
return None
temp_filename = f".spar_phpdetect_{os.getpid()}.php"
temp_filepath = os.path.join(docroot_path, temp_filename)
if not _php_write_probe_file(temp_filepath):
return None
try:
for php_binary_path in ("php", "/usr/bin/php", "/usr/local/bin/php"):
if not shutil.which(php_binary_path):
continue
try:
cli_output = subprocess.check_output(
[php_binary_path, "-f", temp_filepath],
text=True,
timeout=3,
stderr=subprocess.STDOUT,
).strip()
if re.match(r"^\d+\.\d+\.\d+", cli_output):
return cli_output
except subprocess.CalledProcessError as cli_cpe:
logger.debug("%s failed: %s", php_binary_path, cli_cpe)
except Exception as cli_exception:
logger.debug("%s exception: %s", php_binary_path, cli_exception)
finally:
_php_remove_file(temp_filepath)
return None
def get_php_version(report):
logger.info("Detecting PHP version for: %s", report.managed_domain)
report.system_php_version = _php_try_cli_version()
logger.info("System PHP Version (CLI): %s", report.system_php_version)
if report.panel_type == "Control Web Panel":
managed_domain = (getattr(report, "managed_domain", "") or "").strip()
document_root = getattr(report, "docroot", "") or ""
detected = _php_cwp_http_probe(managed_domain, document_root)
if detected:
report.php_version = detected
logger.info("Detected PHP version %s via HTTP probe", detected)
return report.php_version
detected = _php_cwp_nginx_vhost_probe(report)
if detected:
report.php_version = detected
logger.info("Detected PHP %s from NGINX vhost fastcgi_pass", detected)
return report.php_version
detected = _php_cwp_apache_vhost_probe(report)
if detected:
report.php_version = detected
logger.info("Detected PHP %s from Apache vhost", detected)
return report.php_version
detected = _php_cwp_pool_probe(report)
if detected:
report.php_version = detected
logger.info("Detected PHP %s from CWP pool file", detected)
return report.php_version
detected = _php_cwp_json_probe(report)
if detected:
report.php_version = detected
logger.info("Detected PHP %s from CWP JSON (advisory)", detected)
return report.php_version
logger.debug("CWP detection fell through; using CLI probe")
docroot_path = getattr(report, "docroot", "") or ""
if not docroot_path or not os.path.isdir(docroot_path):
error_message = "[Error] Docroot is invalid or not set. Do this manually."
logger.error(error_message)
report.php_version = error_message
return error_message
detected = _php_cli_docroot_probe(docroot_path)
if detected:
report.php_version = detected
if report.system_php_version == "[Unknown]":
report.system_php_version = detected
logger.info("System PHP Version (CLI): %s", report.system_php_version)
logger.info("Detected PHP version %s via CLI probe", detected)
return report.php_version
report.php_version = "[Error] CLI detection failed, try manual method."
logger.warning(report.php_version)
return report.php_version
def get_php_handler(report):
logger.info("Detecting PHP handler for: %s", report.managed_domain)
if report.panel_type == "Control Web Panel":
config_path = f"/home/{report.domain_owner}/.conf/webservers/{report.managed_domain}.conf"
if os.path.isfile(config_path):
report.php_handler = "php-fpm"
report.has_php_fpm = True
logger.info("Detected PHP-FPM in use via config file: %s", config_path)
return report.php_handler
report.has_php_fpm = False
logger.info("PHP-FPM config not found. Checking active web service to determine handler...")
try:
if not report.web_stack:
report.php_handler = "[Error] Web stack not detected"
logger.error(report.php_handler)
return report.php_handler
if "nginx" in report.web_stack:
# CWP + NGINX implies PHP-FPM
report.php_handler = "php-fpm"
report.has_php_fpm = True
logger.info("NGINX configuration on CWP. Only php-fpm can be used in this case.")
return report.php_handler
if "apache" in report.web_stack:
if not report.vhost_path or not os.path.isfile(report.vhost_path):
report.php_handler = "[Error] Apache vhost file not found"
report.has_php_fpm = False
logger.warning(report.php_handler)
return report.php_handler
try:
with open(report.vhost_path, "r", encoding="utf-8", errors="replace") as vhost_file:
vhost_content = vhost_file.read()
if "mod_suphp.c" in vhost_content or "suPHP_UserGroup" in vhost_content:
report.php_handler = "suPHP"
report.has_php_fpm = False
logger.info("Detected suPHP handler from Apache vhost.")
return report.php_handler
if "SetHandler" in vhost_content and "application/x-httpd-php" in vhost_content:
report.php_handler = "dso"
report.has_php_fpm = False
logger.info("Detected DSO handler from Apache vhost.")
return report.php_handler
if "AddHandler" in vhost_content and "application/x-httpd-php" in vhost_content:
report.php_handler = "cgi"
report.has_php_fpm = False
logger.info("Detected CGI handler from Apache vhost.")
return report.php_handler
report.php_handler = "[Unknown] Handler not matched in Apache vhost"
report.has_php_fpm = False
logger.warning(report.php_handler)
return report.php_handler
except Exception:
report.php_handler = "[Error] Failed to read Apache vhost"
report.has_php_fpm = False
logger.exception(report.php_handler)
return report.php_handler
report.php_handler = f"[Unknown] Web service '{(report.port_80_service or '').lower()}' not handled"
report.has_php_fpm = False
logger.warning(report.php_handler)
return report.php_handler
except Exception:
report.php_handler = "[Error] Exception during handler detection"
report.has_php_fpm = False
logger.exception(report.php_handler)
return report.php_handler
if report.panel_type == "cPanel":
logger.debug("Retrieving PHP handler for cPanel system")
if not report.php_version:
report.php_handler = "PHP version not set before handler detection, find this info manually."
report.has_php_fpm = None
logger.error("PHP version not set before handler detection, unable to continue with PHP Handler detection.")
return report.php_handler
try:
fpm_config_path = f"/var/cpanel/userdata/{report.domain_owner}/{report.managed_domain}.php-fpm.yaml"
if os.path.isfile(fpm_config_path):
report.has_php_fpm = True
report.php_handler = "php-fpm"
logger.info("Detected PHP-FPM in use based on existence of: %s", fpm_config_path)
return report.php_handler
report.has_php_fpm = False
logger.info("PHP-FPM not detected.")
version_match = re.match(r"(\d+)\.(\d+)", report.php_version)
if version_match:
ea_version = f"ea-php{version_match.group(1)}{version_match.group(2)}"
else:
report.php_handler = "Invalid PHP version format detected. You must find this information manually."
report.has_php_fpm = None
logger.error("Invalid PHP version format detected.")
return report.php_handler
logger.debug("Converted PHP version string to EA format %s", ea_version)
whmapi_output = subprocess.check_output(
["whmapi1", "php_get_handlers", "--output=json"],
text=True,
stderr=subprocess.STDOUT,
timeout=30,
)
whmapi_data = json.loads(whmapi_output)
version_handlers = whmapi_data.get("data", {}).get("version_handlers", [])
for handler_entry in version_handlers:
if handler_entry.get("version") == ea_version:
current_handler = handler_entry.get("current_handler")
if current_handler:
report.php_handler = current_handler
report.has_php_fpm = current_handler.lower() == "php-fpm"
logger.info("Detected PHP handler for %s: %s", ea_version, report.php_handler)
return report.php_handler
break # Version found, but no handler assigned
report.php_handler = f"[Error] Handler not found for PHP version: {ea_version}"
report.has_php_fpm = None
logger.warning(report.php_handler)
return report.php_handler
except subprocess.CalledProcessError:
report.php_handler = "[Error] Failed to run whmapi1 php_get_handlers"
report.has_php_fpm = None
logger.exception(report.php_handler)
return report.php_handler
except json.JSONDecodeError:
report.php_handler = "[Error] Failed to parse JSON output from whmapi1"
report.has_php_fpm = None
logger.exception(report.php_handler)
return report.php_handler
if report.panel_type == "UltraStack ONE":
report.php_handler = "php-fpm"
report.has_php_fpm = True
logger.info("Detected PHP handler for UltraStack ONE: php-fpm")
return report.php_handler
if report.panel_type == "Baremetal":
report.php_handler = "[Unknown]"
report.has_php_fpm = None
logger.warning("Panel type is Baremetal - PHP handler detection skipped.")
return report.php_handler
if isinstance(getattr(report, "php_handler", None), str) and report.php_handler.lower() == "php-fpm" and not report.has_php_fpm:
logger.warning("Handler is php-fpm but has_php_fpm=False; correcting boolean for consistency.")
report.has_php_fpm = True
report.php_handler = "Unknown PHP Handler"
logger.error("Unhandled panel type: %s", report.panel_type)
return report.php_handler
def _phpfpm_read_yaml(path_to_yaml):
try:
if not path_to_yaml or not os.path.isfile(path_to_yaml):
return {}
with open(path_to_yaml, "r", encoding="utf-8", errors="ignore") as file_handle:
return yaml.safe_load(file_handle) or {}
except Exception:
logger.exception("Failed reading YAML: %s", path_to_yaml)
return {}
def _phpfpm_read_text(path_to_file):
try:
if not path_to_file or not os.path.isfile(path_to_file):
return ""
with open(path_to_file, "r", encoding="utf-8", errors="ignore") as file_handle:
return file_handle.read()
except Exception:
logger.exception("Failed reading file: %s", path_to_file)
return ""
def _phpfpm_get_any(mapping, *keys):
for key_name in keys:
if isinstance(mapping, dict) and key_name in mapping:
return mapping.get(key_name)
return None
def _phpfpm_extract_int(pattern, text_value):
try:
match = re.search(pattern, text_value or "")
return int(match.group(1)) if match else None
except Exception:
return None
def _phpfpm_cwp_effective(report):
total_max_children = None
total_fpm_pools = None
if not getattr(report, "php_version", None) or not getattr(report, "domain_owner", None):
logger.error("PHP version or domain owner not set - cannot locate CWP PHP-FPM config.")
return "PHP version or domain's user owner not set.", total_max_children, total_fpm_pools
short_ver = _cwp_php_short_ver(getattr(report, "php_version"))
conf_path = f"/opt/alt/php-fpm{short_ver}/usr/etc/php-fpm.d/users/{report.domain_owner}.conf"
content = _phpfpm_read_text(conf_path)
if not content:
logger.error("CWP PHP-FPM config not found or unreadable: %s", conf_path)
return f"PHP-FPM config file not found at {conf_path}", total_max_children, total_fpm_pools
report.php_fpm_max_children = _phpfpm_extract_int(r"pm\.max_children\s*=\s*(\d+)", content)
report.php_fpm_max_requests = _phpfpm_extract_int(r"pm\.max_requests\s*=\s*(\d+)", content)
report.php_fpm_max_idle_timeout = _phpfpm_extract_int(r"pm\.process_idle_timeout\s*=\s*(\d+)", content)
summary = (
f"Max Children: {report.php_fpm_max_children}\n"
f"Max Requests: {report.php_fpm_max_requests}\n"
f"Process Idle Timeout: {report.php_fpm_max_idle_timeout}"
)
user_list = getattr(report, "user_list", []) or []
if not user_list:
logger.warning("CWP user_list is empty on report; PHP-FPM totals may be incomplete.")
return summary, total_max_children, total_fpm_pools
total_children = 0
pool_count = 0
for username in user_list:
pattern = f"/opt/alt/php-fpm*/usr/etc/php-fpm.d/users/{username}.conf"
for pool_path in glob.glob(pattern):
pool_text = _phpfpm_read_text(pool_path)
if not pool_text:
continue
max_children_value = _phpfpm_extract_int(r"pm\.max_children\s*=\s*(\d+)", pool_text)
if max_children_value is None:
continue
total_children += int(max_children_value)
pool_count += 1
if pool_count > 0:
total_max_children = total_children
total_fpm_pools = pool_count
logger.info("CWP PHP-FPM totals: pools=%d, total pm.max_children=%d", pool_count, total_children)
else:
logger.warning("No CWP PHP-FPM pools detected while computing totals.")
return summary, total_max_children, total_fpm_pools
def _phpfpm_cpanel_resolve(domain_mapping, pool_yaml, global_yaml):
def fallback(key_underscore, key_dotted=None):
keys = (key_underscore,) if not key_dotted else (key_underscore, key_dotted)
value = _phpfpm_get_any(domain_mapping, *keys)
if value is None:
value = _phpfpm_get_any(pool_yaml, *keys)
if value is None:
value = _phpfpm_get_any(global_yaml, *keys)
return value
mc_val = fallback("pm_max_children", "pm.max_children")
mr_val = fallback("pm_max_requests", "pm.max_requests")
it_val = fallback("pm_process_idle_timeout", "pm.process_idle_timeout")
return mc_val, mr_val, it_val
def _phpfpm_cpanel_effective(report):
domain_owner = getattr(report, "domain_owner", None)
managed_domain = getattr(report, "managed_domain", None)
if not domain_owner or not managed_domain:
return (
"Domain owner or domain not set; cannot locate cPanel PHP-FPM config.",
getattr(report, "total_max_children", None),
getattr(report, "total_fpm_pools", None),
)
report.phpfpm_domain_yaml_path = f"/var/cpanel/userdata/{domain_owner}/{managed_domain}.php-fpm.yaml"
report.phpfpm_domain_yaml = _phpfpm_read_yaml(report.phpfpm_domain_yaml_path)
report.phpfpm_pool_defaults_yaml = _phpfpm_read_yaml("/var/cpanel/ApachePHPFPM/system_pool_defaults.yaml")
report.phpfpm_globals_yaml = _phpfpm_read_yaml("/var/cpanel/ApachePHPFPM/system.yaml")
if not report.phpfpm_domain_yaml and not report.phpfpm_pool_defaults_yaml and not report.phpfpm_globals_yaml:
logger.error("No PHP-FPM YAML sources found (domain, pool defaults, or system).")
return (
f"PHP-FPM config file not found at {report.phpfpm_domain_yaml_path}",
getattr(report, "total_max_children", None),
getattr(report, "total_fpm_pools", None),
)
report.php_fpm_max_children, report.php_fpm_max_requests, report.php_fpm_max_idle_timeout = _phpfpm_cpanel_resolve(
report.phpfpm_domain_yaml or {},
report.phpfpm_pool_defaults_yaml,
report.phpfpm_globals_yaml,
)
report.phpfpm_total_children_sum = 0
report.phpfpm_pool_count = 0
for pool_path in glob.glob("/var/cpanel/userdata/*/*.php-fpm.yaml"):
report.phpfpm_pool_domain_yaml = _phpfpm_read_yaml(pool_path)
report.phpfpm_pool_max_children, _, _ = _phpfpm_cpanel_resolve(
report.phpfpm_pool_domain_yaml or {},
report.phpfpm_pool_defaults_yaml,
report.phpfpm_globals_yaml,
)
if report.phpfpm_pool_max_children is None:
continue
try:
report.phpfpm_total_children_sum += int(report.phpfpm_pool_max_children)
report.phpfpm_pool_count += 1
except Exception:
continue
if report.phpfpm_pool_count > 0:
report.total_max_children = report.phpfpm_total_children_sum
report.total_fpm_pools = report.phpfpm_pool_count
logger.info(
"cPanel PHP-FPM totals: pools=%d, total pm.max_children=%d",
report.phpfpm_pool_count,
report.phpfpm_total_children_sum,
)
else:
logger.warning("No cPanel PHP-FPM pools detected while computing totals.")
summary = (
f"Max Children: {report.php_fpm_max_children}\n"
f"Max Requests: {report.php_fpm_max_requests}\n"
f"Process Idle Timeout: {report.php_fpm_max_idle_timeout}"
)
return summary, report.total_max_children, report.total_fpm_pools
def _phpfpm_ultrastack_effective(report):
total_max_children = None
total_fpm_pools = None
conf_path = f"/etc/php-fpm.d/{report.managed_domain}.conf"
content = _phpfpm_read_text(conf_path)
if not content:
logger.error("UltraStack PHP-FPM config not found: %s", conf_path)
return f"[Error] PHP-FPM config file not found at {conf_path}", total_max_children, total_fpm_pools
report.php_fpm_max_children = _phpfpm_extract_int(r"pm\.max_children\s*=\s*(\d+)", content)
report.php_fpm_max_requests = _phpfpm_extract_int(r"pm\.max_requests\s*=\s*(\d+)", content)
report.php_fpm_max_idle_timeout = _phpfpm_extract_int(r"pm\.process_idle_timeout\s*=\s*(\d+)", content)
summary = (
f"Max Children: {report.php_fpm_max_children}\n"
f"Max Requests: {report.php_fpm_max_requests}\n"
f"Process Idle Timeout: {report.php_fpm_max_idle_timeout}"
)
if report.php_fpm_max_children is not None:
total_max_children = int(report.php_fpm_max_children)
total_fpm_pools = 1
return summary, total_max_children, total_fpm_pools
def get_php_fpm_config(report):
logger.info(
"Gathering PHP-FPM configuration for %s on [%s]",
report.managed_domain,
report.panel_type,
)
if (
isinstance(getattr(report, "php_handler", None), str)
and report.php_handler.lower() == "php-fpm"
and not getattr(report, "has_php_fpm", False)
):
logger.warning("php-fpm handler detected but has_php_fpm is False/None; correcting to True.")
report.has_php_fpm = True
if not getattr(report, "has_php_fpm", False):
logger.warning("PHP-FPM is not in use for this site - skipping config retrieval.")
return "[Notice] PHP-FPM not detected for this account."
total_max_children = None
total_fpm_pools = None
try:
panel_type = getattr(report, "panel_type", "") or ""
if panel_type == "Control Web Panel":
summary, total_max_children, total_fpm_pools = _phpfpm_cwp_effective(report)
elif panel_type == "cPanel":
summary, total_max_children, total_fpm_pools = _phpfpm_cpanel_effective(report)
elif panel_type == "UltraStack ONE":
summary, total_max_children, total_fpm_pools = _phpfpm_ultrastack_effective(report)
elif panel_type == "Baremetal":
logger.warning("Panel type is Baremetal - PHP-FPM configuration detection skipped.")
summary = "[Notice] Baremetal PHP-FPM config detection not implemented."
else:
logger.error("Unknown panel type: %s", panel_type)
summary = f"[Error] Unsupported panel type: {panel_type}"
except Exception as exc:
logger.exception("Failed to retrieve PHP-FPM configuration.")
summary = f"[Error] Exception while retrieving PHP-FPM config: {exc}"
if total_max_children is not None:
report.total_max_children = total_max_children
if total_fpm_pools is not None:
report.total_fpm_pools = total_fpm_pools
return summary
def _phpfpm_extract_date_from_log_line(log_line: str) -> str:
for pattern in (_PHPFPM_TIMESTAMP_BRACKETED, _PHPFPM_TIMESTAMP_ISO_BRACKETED, _PHPFPM_TIMESTAMP_ISO):
match_obj = pattern.search(log_line or "")
if match_obj:
return match_obj.group(1)
return "unknown-date"
def _phpfpm_cpanel_package_from_version(version_string: str):
version_string = (version_string or "").strip()
if not version_string:
return None
if version_string.startswith("ea-php"):
return version_string
match_mm = re.match(r"(\d)\.(\d)", version_string)
if match_mm:
return f"ea-php{match_mm.group(1)}{match_mm.group(2)}"
match_dd = re.match(r"^(\d{2})$", version_string)
if match_dd:
return f"ea-php{match_dd.group(1)}"
return None
def _phpfpm_list_log_files(glob_patterns):
return sorted(
{
path
for pattern in (glob_patterns or [])
for path in glob.glob(pattern)
if os.path.isfile(path) and not path.endswith(".gz")
}
)
def _phpfpm_scan_error_logs(log_file_paths, pool_regex, domain_regex, date_to_hit_count):
total_event_count = 0
for log_file_path in (log_file_paths or []):
try:
logger.info("Scanning file: %s", log_file_path)
with open(log_file_path, "r", encoding="utf-8", errors="ignore") as file_handle:
for line in file_handle:
if not _PHPFPM_MAX_CHILDREN_REGEX.search(line):
continue
if not (((pool_regex and pool_regex.search(line)) or False) or domain_regex.search(line)):
continue
date_str = _phpfpm_extract_date_from_log_line(line)
date_to_hit_count[date_str] = date_to_hit_count.get(date_str, 0) + 1
total_event_count += 1
except Exception:
logger.exception("Failed reading %s", log_file_path)
return total_event_count
def get_php_fpm_errors(report):
try:
if not getattr(report, "has_php_fpm", False):
return None
managed_domain = (getattr(report, "managed_domain", "") or "").strip()
if not managed_domain:
return "[Error] managed_domain not set."
php_version = (getattr(report, "php_version", "") or "").strip()
panel = getattr(report, "panel_type", "") or ""
owner = (getattr(report, "domain_owner", "") or "").strip()
underscored = managed_domain.replace(".", "_")
pool_candidates = {candidate for candidate in (owner, underscored, f"www_{underscored}") if candidate}
pool_regex = (
re.compile(r"\[pool\s+(?:%s)\]" % "|".join(map(re.escape, pool_candidates)), re.I)
if pool_candidates
else None
)
domain_regex = re.compile(re.escape(managed_domain), re.I)
date_to_hit_count = {}
log_patterns = []
if panel == "cPanel":
report.phpfpm_package_stem = _phpfpm_cpanel_package_from_version(php_version)
if not report.phpfpm_package_stem:
logger.warning("PHP version not available; cannot resolve cPanel PHP-FPM log path precisely.")
return f"No PHP-FPM error logs found for {managed_domain}."
log_patterns = [
f"/opt/cpanel/{report.phpfpm_package_stem}/root/usr/var/log/php-fpm/error.log",
f"/opt/cpanel/{report.phpfpm_package_stem}/root/usr/var/log/php-fpm/error.log*",
]
logger.info("PHP-FPM error log patterns (cPanel, %s): %s", report.phpfpm_package_stem, log_patterns)
elif panel == "Control Web Panel":
log_patterns = [
"/opt/alt/php-fpm*/usr/var/log/php-fpm.log",
"/opt/alt/php-fpm*/usr/var/log/php-fpm.log*",
]
logger.info("PHP-FPM error log patterns (Control Web Panel): %s", log_patterns)
elif panel == "UltraStack ONE":
log_patterns = [
"/var/log/php-fpm/error.log",
"/var/log/php-fpm/error.log*",
]
logger.info("PHP-FPM error log patterns (UltraStack ONE): %s", log_patterns)
else:
log_patterns = [
"/opt/cpanel/ea-php*/root/usr/var/log/php-fpm/error.log",
"/opt/cpanel/ea-php*/root/usr/var/log/php-fpm/error.log*",
"/opt/alt/php-fpm*/usr/var/log/php-fpm.log",
"/opt/alt/php-fpm*/usr/var/log/php-fpm.log*",
"/var/log/php-fpm/error.log",
"/var/log/php-fpm/error.log*",
]
logger.info("PHP-FPM error log patterns (fallback): %s", log_patterns)
report.phpfpm_error_log_files = _phpfpm_list_log_files(log_patterns)
logger.info(
"Scanning %d PHP-FPM error log file(s) for %s",
len(report.phpfpm_error_log_files),
managed_domain,
)
if not report.phpfpm_error_log_files:
return f"No PHP-FPM error logs found for {managed_domain}."
report.phpfpm_total_max_children_events = _phpfpm_scan_error_logs(
report.phpfpm_error_log_files,
pool_regex,
domain_regex,
date_to_hit_count,
)
logger.info(
"Total PHP-FPM pm.max_children events for %s: %d",
managed_domain,
report.phpfpm_total_max_children_events,
)
if report.phpfpm_total_max_children_events == 0:
return f"No PHP-FPM Max Children events for {managed_domain} in current or rotated logs."
report.phpfpm_sorted_date_counts = sorted(date_to_hit_count.items(), key=lambda kv: (-kv[1], kv[0]))
output_lines = [
"### PHP-FPM Max Children Events",
f"Total hits: {report.phpfpm_total_max_children_events}",
"By date:",
]
output_lines.extend(
f"- {date_string}: {count_value}"
for date_string, count_value in report.phpfpm_sorted_date_counts
)
return "\n".join(output_lines)
except Exception:
logger.exception("Unhandled exception in get_php_fpm_errors()")
return "Unable to evaluate PHP-FPM errors from error logs; please review manually."
def get_php_fpm_max(report):
logger.info("Estimating safe PHP-FPM pm.max_children from memory usage")
try:
total_memory_mb = None
if getattr(report, "mem_total_gb", None):
total_memory_mb = float(report.mem_total_gb) * 1024.0
if total_memory_mb is None:
meminfo_path = "/proc/meminfo"
with open(meminfo_path, encoding="utf-8") as meminfo_file:
meminfo = dict(
line.strip().split(":", 1)
for line in meminfo_file
if ":" in line
)
mem_total_kb_str = meminfo.get("MemTotal", "0 kB").split()[0]
total_memory_mb = int(mem_total_kb_str) / 1024.0
sixty_percent_memory_mb = total_memory_mb * 0.60
php_fpm_process_names = [
"php-fpm",
"php-fpm80",
"php-fpm81",
"php-fpm82",
"php-fpm83",
"php-fpm84",
]
rss_kb_values = []
ps_command = [
"ps",
"--no-headers",
"-o",
"rss",
"-C",
",".join(php_fpm_process_names),
]
try:
ps_output = subprocess.check_output(
ps_command,
text=True,
stderr=subprocess.STDOUT,
timeout=5,
)
for value in ps_output.split():
if value.isdigit():
rss_kb_values.append(int(value))
except (subprocess.CalledProcessError, FileNotFoundError, subprocess.TimeoutExpired):
logger.debug(
"ps command failed or no PHP-FPM processes found while "
"estimating pm.max_children"
)
if rss_kb_values:
avg_process_memory_mb = (
sum(rss_kb_values) / len(rss_kb_values) / 1024.0
)
else:
avg_process_memory_mb = 0.0
if avg_process_memory_mb > 0:
report.max_fpm_children = int(
sixty_percent_memory_mb / avg_process_memory_mb
)
else:
report.max_fpm_children = None
recommended_str = (
str(report.max_fpm_children)
if report.max_fpm_children is not None
else "N/A"
)
summary_lines = [
f"Total RAM: {total_memory_mb:.2f} MB",
f"Budget for PHP-FPM (60 percent): {sixty_percent_memory_mb:.2f} MB",
f"Average PHP-FPM process memory: {avg_process_memory_mb:.2f} MB",
f"Recommended pm.max_children (single busy site): {recommended_str}",
]
return "\n".join(summary_lines)
except Exception:
logger.exception("Failed to calculate PHP-FPM max children")
return "[Error] Exception while estimating safe pm.max_children."
def _phpini_command_exists(command_name: str) -> bool:
return shutil.which(command_name) is not None
def _phpini_run_shell(command_text: str, timeout: int = 6) -> subprocess.CompletedProcess:
return subprocess.run(
command_text,
shell=True,
text=True,
capture_output=True,
timeout=timeout,
check=False,
)
def _phpini_detect_server_ip(report) -> Optional[str]:
server_ip_value = getattr(report, "server_ip", None)
if server_ip_value:
return server_ip_value
try:
output_text = _phpini_run_shell(
"ip route get 1.1.1.1 | awk -F'src ' 'NR==1{split($2,a,\" \");print a[1]}'",
timeout=3,
).stdout.strip()
return output_text or None
except Exception:
logger.debug("get_php_ini(): failed to detect server IP", exc_info=True)
return None
def _phpini_parse_tsv(tsv_content: str, directives: List[str]):
effective_values, meta = {}, {}
for line_text in (tsv_content or "").splitlines():
if "\t" not in line_text:
continue
key, value = line_text.split("\t", 1)
if key in ("php_ini_loaded_file", "user_ini.filename", "sapi"):
meta[key] = value
elif key in directives:
effective_values[key] = value
return effective_values, meta
def _phpini_nearest_user_ini(start_dir: str) -> Optional[str]:
current_dir = os.path.abspath(start_dir)
while True:
candidate_path = os.path.join(current_dir, ".user.ini")
if os.path.isfile(candidate_path):
return candidate_path
parent_dir = os.path.dirname(current_dir)
if parent_dir == current_dir:
return None
current_dir = parent_dir
def _phpini_htaccess_sets_php_values(htaccess_path: str) -> bool:
if not os.path.isfile(htaccess_path):
return False
try:
with open(htaccess_path, "r", encoding="utf-8", errors="ignore") as htaccess_file:
for line_text in htaccess_file:
if re.search(r"^\s*php_(?:value|flag)\s+\S+", line_text):
return True
except Exception:
logger.debug("get_php_ini(): failed reading .htaccess at %s", htaccess_path, exc_info=True)
return False
def _phpini_build_probe_body(directives: List[str]) -> str:
keys_php = ", ".join(f'"{directive}"' for directive in directives)
return textwrap.dedent(
f"""\
<?php
@header('Content-Type: text/plain');
function norm($k, $v) {{
if ($v === false || $v === '') return 'Off';
$lv = strtolower(trim((string)$v));
if ($lv === '1' || $lv === 'on' || $lv === 'true' || $lv === 'yes') return 'On';
if ($lv === '0' || $lv === 'off' || $lv === 'false' || $lv === 'no') return 'Off';
return (string)$v;
}}
$keys = array({keys_php});
foreach ($keys as $k) {{
$raw = ini_get($k);
echo $k, "\\t", norm($k, $raw), "\\n";
}}
echo "php_ini_loaded_file\\t", (function_exists('php_ini_loaded_file') ? (php_ini_loaded_file() ?: "N/A") : "N/A"), "\\n";
echo "user_ini.filename\\t", (ini_get('user_ini.filename') !== false && ini_get('user_ini.filename') !== '' ? ini_get('user_ini.filename') : "N/A"), "\\n";
echo "sapi\\t", PHP_SAPI, "\\n";
"""
)
def _phpini_write_probe(probe_path: str, probe_body: str) -> Optional[str]:
try:
with open(probe_path, "w", encoding="utf-8") as probe_file:
probe_file.write(probe_body)
os.chmod(probe_path, 0o644)
return None
except Exception as exc:
return str(exc)
def _phpini_fetch_over_http(domain: str, probe_name: str, user_agent: str, server_ip: Optional[str]):
runtime_raw = ""
http_code = None
if not domain or not _phpini_command_exists("curl"):
return runtime_raw, http_code
for scheme in ("https", "http"):
try:
resolve_flag = ""
if server_ip:
resolve_flag = f"--resolve {domain}:443:{server_ip} --resolve {domain}:80:{server_ip}"
command_text = (
f'curl -fsS -A "{user_agent}" '
f'-H "Accept: text/plain,*/*;q=0.1" '
f'-H "Referer: {scheme}://{domain}/" '
f'-w "\\n__CODE__=%{{http_code}}" --connect-timeout 3 --max-time 6 {resolve_flag} '
f'"{scheme}://{domain}/{probe_name}"'
)
curl_result = _phpini_run_shell(command_text, timeout=8)
stdout_text = curl_result.stdout or ""
if "\n__CODE__=" in stdout_text:
body, code_line = stdout_text.rsplit("\n__CODE__=", 1)
http_code = code_line.strip()
else:
body = stdout_text
if body.strip():
runtime_raw = body
logger.debug("get_php_ini(): HTTP %s code=%s", scheme.upper(), http_code or "N/A")
break
logger.debug(
"get_php_ini(): HTTP %s failed (code=%s, rc=%s, stderr=%r)",
scheme.upper(),
http_code,
curl_result.returncode,
(curl_result.stderr or "")[:200],
)
except Exception:
logger.debug("get_php_ini(): HTTP %s probe threw", scheme.upper(), exc_info=True)
return runtime_raw, http_code
def _phpini_guess_fpm_socket(report, docroot_path: str) -> Optional[str]:
fpm_socket_path = getattr(report, "fpm_socket", None)
if fpm_socket_path:
return fpm_socket_path
docroot_match = re.match(r"^/home/([^/]+)/", docroot_path)
user_name = docroot_match.group(1) if docroot_match else None
socket_candidates = []
socket_candidates += glob.glob("/opt/cpanel/ea-php*/root/usr/var/run/php-fpm/*.sock")
socket_candidates += glob.glob("/opt/cpanel/ea-php*/root/var/run/php-fpm/*.sock")
if user_name:
socket_candidates += glob.glob(f"/opt/alt/php-fpm*/usr/var/sockets/{user_name}.sock")
socket_candidates += glob.glob("/run/php-fpm/*.sock")
socket_candidates += glob.glob("/var/run/php-fpm/*.sock")
return socket_candidates[0] if socket_candidates else None
def _phpini_fetch_over_fcgi(probe_path: str, probe_name: str, fpm_socket_path: str) -> str:
if not fpm_socket_path or not _phpini_command_exists("cgi-fcgi"):
return ""
try:
env = os.environ.copy()
env.update(
{
"REQUEST_METHOD": "GET",
"SCRIPT_FILENAME": probe_path,
"SCRIPT_NAME": "/" + probe_name,
}
)
fcgi_result = subprocess.run(
["cgi-fcgi", "-bind", "-connect", fpm_socket_path],
text=True,
capture_output=True,
timeout=8,
env=env,
check=False,
)
body = fcgi_result.stdout or ""
if "\r\n\r\n" in body:
body = body.split("\r\n\r\n", 1)[1]
elif "\n\n" in body:
body = body.split("\n\n", 1)[1]
return body.strip()
except Exception:
logger.debug("get_php_ini(): FastCGI probe threw", exc_info=True)
return ""
def _phpini_cleanup_probe(probe_path: str) -> None:
try:
os.remove(probe_path)
logger.debug("get_php_ini(): removed probe file %s", probe_path)
except Exception:
logger.debug("get_php_ini(): failed to remove probe file %s", probe_path, exc_info=True)
def _phpini_determine_active_path(docroot_path: str, meta: dict) -> str:
sapi = (meta.get("sapi") or "").strip().lower()
master_ini = meta.get("php_ini_loaded_file", "N/A")
user_ini_enabled = meta.get("user_ini.filename") not in (None, "", "N/A", "0")
user_ini_path = _phpini_nearest_user_ini(docroot_path) if user_ini_enabled else None
htaccess_path = os.path.join(docroot_path, ".htaccess")
if sapi == "apache2handler":
return htaccess_path if _phpini_htaccess_sets_php_values(htaccess_path) else master_ini
return user_ini_path or master_ini
def get_php_ini(report):
docroot_path = getattr(report, "docroot", None)
if not docroot_path or not os.path.isdir(docroot_path):
logger.error("get_php_ini(): invalid report.docroot=%r", docroot_path)
return "[Error] get_php_ini(): invalid report.docroot"
domain_name = getattr(report, "managed_domain", None) or getattr(report, "domain", None)
logger.debug(
"get_php_ini(): start (docroot=%s, domain=%s)",
docroot_path,
domain_name,
)
directives = [
"memory_limit",
"post_max_size",
"upload_max_filesize",
"max_execution_time",
"max_input_time",
"max_input_vars",
"display_errors",
"zlib.output_compression",
"allow_url_fopen",
]
probe_name = f"spar_iniget_{os.getpid()}.php"
probe_path = os.path.join(docroot_path, probe_name)
write_error = _phpini_write_probe(probe_path, _phpini_build_probe_body(directives))
if write_error:
logger.exception("get_php_ini(): cannot write probe")
return f"[Error] get_php_ini(): cannot write probe: {write_error}"
logger.debug("get_php_ini(): wrote probe file %s", probe_path)
runtime_raw = ""
http_code = None
try:
runtime_raw, http_code = _phpini_fetch_over_http(
domain_name,
probe_name,
getattr(
report,
"http_user_agent",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
"AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/124.0.0.0 Safari/537.36",
),
_phpini_detect_server_ip(report),
)
if not runtime_raw:
fpm_socket_path = _phpini_guess_fpm_socket(report, docroot_path)
if fpm_socket_path:
logger.debug("get_php_ini(): trying FastCGI probe (socket=%s)", fpm_socket_path)
runtime_raw = _phpini_fetch_over_fcgi(probe_path, probe_name, fpm_socket_path)
if runtime_raw:
logger.debug("get_php_ini(): FastCGI probe succeeded")
else:
logger.debug("get_php_ini(): FastCGI probe empty")
else:
logger.debug("get_php_ini(): skipping FastCGI (no socket)")
finally:
_phpini_cleanup_probe(probe_path)
if not runtime_raw:
logger.warning("get_php_ini(): probe failed. http_code=%s domain=%s", http_code, domain_name)
return "[Error] Unable to fetch effective php.ini values"
effective_values, meta = _phpini_parse_tsv(runtime_raw, directives)
logger.info(
"get_php_ini(): got %d directives (sapi=%s, master_ini=%s)",
len(effective_values),
meta.get("sapi", "N/A"),
meta.get("php_ini_loaded_file", "N/A"),
)
active_path = _phpini_determine_active_path(docroot_path, meta)
logger.debug(
"get_php_ini(): SAPI=%s; active=%s",
((meta.get("sapi") or "").strip().lower() or "unknown"),
active_path or "N/A",
)
logger.debug("get_php_ini(): summary built")
return report.phpini_render_summary(directives, effective_values, active_path)
def php_build_section(report):
logger.debug("Building 'PHP' section")
section_progress = SectionProgress(
report.progress,
(report.section_index, report.sections_total),
5 if getattr(report, "has_php_fpm", False) else 3,
report.section_name,
)
def _format_label_only(label_text: str) -> str:
label = f"{str(label_text).rstrip(':')}:"
return report.apply_color("white", report.apply_color("bold", label))
def _format_value(value, value_style: str) -> str:
value_text = "" if value is None else str(value)
if not value_text:
value_text = "Unknown"
return report.apply_color(value_style, value_text)
def _format_kv_line(line_text: str, value_style: str = "white") -> str:
if line_text is None:
return ""
original = str(line_text).rstrip("\n")
if not original.strip():
return original
prefix_match = re.match(r"^(\s*(?:[-*]\s+)?)", original)
prefix = prefix_match.group(1) if prefix_match else ""
remainder = original[len(prefix):]
if ":" in remainder:
left, right = remainder.split(":", 1)
if right.strip():
label_colored = _format_label_only(left)
value_colored = _format_value(right.strip(), value_style)
return f"{prefix}{label_colored} {value_colored}"
spaced_match = re.match(r"^(\S+)\s{2,}(.+)$", remainder)
if spaced_match:
label_colored = report.apply_color("white", report.apply_color("bold", spaced_match.group(1)))
value_colored = _format_value(spaced_match.group(2).strip(), value_style)
return f"{prefix}{label_colored} {value_colored}"
return report.apply_color(value_style, original)
def _format_kv_block(block_text: str, value_style: str = "white") -> str:
text = "" if block_text is None else str(block_text)
return "\n".join(_format_kv_line(line, value_style=value_style) for line in text.splitlines())
try:
if not getattr(report, "docroot", None) or not os.path.isdir(getattr(report, "docroot")):
logger.info("php_build_section: docroot not set or invalid, attempting discovery")
try:
if getattr(report, "panel_type", "") == "cPanel" and not getattr(report, "domain_owner", ""):
get_user_stats(report)
except Exception:
logger.exception("php_build_section: failed to gather user stats before docroot detection")
try:
get_web_stack(report)
except Exception:
logger.exception("php_build_section: get_web_stack() failed during PHP section")
try:
get_docroot(report)
except Exception:
logger.exception("php_build_section: get_docroot() failed during PHP section")
section_progress.tick("Detecting PHP version")
report.site_php_version = get_php_version(report)
section_progress.tick("Detecting PHP handler")
report.php_handler = get_php_handler(report)
section_progress.tick("Fetching effective php.ini values")
report.php_ini_summary = get_php_ini(report)
section_outputs = {"php_fpm_block": ""}
if getattr(report, "has_php_fpm", False):
section_progress.tick("Fetching PHP-FPM config")
report.php_fpm_config = get_php_fpm_config(report)
section_progress.tick("Estimating safe pm.max_children")
report.php_fpm_max_summary = get_php_fpm_max(report)
report.php_fpm_server_summary_text = "\n".join(
line
for line in (
(
f"Total PHP-FPM pools on server: {getattr(report, 'total_fpm_pools', None)}"
if getattr(report, "total_fpm_pools", None) is not None
else ""
),
(
f"Total Max Children: {getattr(report, 'total_max_children', None)}"
if getattr(report, "total_max_children", None) is not None
else ""
),
(
f"Total system users: {getattr(report, 'user_count', None)}"
if getattr(report, "user_count", None) is not None
else ""
),
)
if line
).strip()
report.php_fpm_errors = get_php_fpm_errors(report) or ""
if (report.php_fpm_errors or "").lstrip().startswith("### PHP-FPM Max Children Events"):
report.php_fpm_errors = "\n".join(report.php_fpm_errors.splitlines()[1:]).lstrip()
section_outputs["php_fpm_block"] = (
"\n\n"
+ report.format_subheading("### PHP-FPM Configuration")
+ "\n"
+ _format_kv_block(getattr(report, "php_fpm_config", ""), value_style="white")
+ "\n\n"
+ report.format_subheading("### PHP-FPM Estimated Max Children")
+ "\n"
+ _format_kv_block(getattr(report, "php_fpm_max_summary", ""), value_style="white")
+ (
"\n\n"
+ report.format_subheading("### PHP-FPM Server-wide Summary")
+ "\n"
+ _format_kv_block(getattr(report, "php_fpm_server_summary_text", ""), value_style="dim")
if getattr(report, "php_fpm_server_summary_text", "")
else ""
)
+ (
"\n\n"
+ report.format_subheading("### PHP-FPM Max Children Events")
+ "\n"
+ _format_kv_block(getattr(report, "php_fpm_errors", ""), value_style="dim")
if getattr(report, "php_fpm_errors", "")
else ""
)
)
return (
"\n".join(
[
report.format_heading("## PHP Summary"),
report.format_label_value(
"System PHP Version",
getattr(report, "system_php_version", getattr(report, "site_php_version", "Unknown")),
),
report.format_label_value(
"Site PHP Version",
getattr(report, "site_php_version", "Unknown"),
),
report.format_label_value(
"PHP Handler",
getattr(report, "php_handler", "Unknown"),
),
"",
report.format_subheading("### PHP INI Values"),
_format_kv_block(getattr(report, "php_ini_summary", ""), value_style="white"),
]
)
+ section_outputs["php_fpm_block"]
)
except Exception:
logger.exception("Failed to build PHP section")
return report.format_error(
"[Error] Failed to build PHP section. Please review PHP version, handler, "
"PHP-FPM configuration, and php.ini values manually. Please report this as a bug."
)
# --- Database
def get_dbms_version(report):
def find_first_binary(*candidate_binaries):
for binary_name in candidate_binaries:
binary_path = shutil.which(binary_name)
if binary_path:
logger.debug("Found database binary: %s (%s)", binary_name, binary_path)
return binary_name
return None
def execute_command(command, timeout=8):
try:
logger.debug("Running command: %s", " ".join(command))
result = subprocess.run(
command,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True,
timeout=timeout,
check=False,
)
command_output = (result.stdout or result.stderr or "").strip()
logger.debug("Command exit=%s, bytes=%s", result.returncode, len(command_output))
return command_output
except subprocess.TimeoutExpired:
logger.warning("Command timed out: %s", " ".join(command))
return ""
except Exception as exc:
logger.exception("Command failed: %s (%s)", " ".join(command), exc)
return ""
def extract_version_info(command_output):
if not command_output:
return None
dbms_family = "MariaDB" if re.search(r"mariadb", command_output, re.I) else "MySQL"
version_match = re.search(r"(\d+\.\d+\.\d+|\d+\.\d+)", command_output)
if not version_match:
logger.debug("No version string found in output.")
return None
version_string = version_match.group(1)
major_minor_version = ".".join(version_string.split(".")[:2])
return dbms_family, major_minor_version
server_binary = find_first_binary("mariadbd", "mysqld")
if server_binary:
version_info = extract_version_info(execute_command([server_binary, "--version"]))
if version_info:
dbms_family, version = version_info
report.dbms_version = f"{dbms_family} {version}"
logger.info("Detected DBMS from server binary '%s': %s", server_binary, report.dbms_version)
return report.dbms_version
logger.debug("No parsable version from '%s' output.", server_binary)
client_binary = find_first_binary("mariadb", "mysql")
if client_binary:
version_info = extract_version_info(execute_command([client_binary, "--version"]))
if version_info:
dbms_family, version = version_info
report.dbms_version = f"{dbms_family} {version}"
logger.info("Detected DBMS from client binary '%s': %s", client_binary, report.dbms_version)
return report.dbms_version
logger.debug("No parsable version from '%s' output.", client_binary)
admin_binary = find_first_binary("mariadb-admin", "mysqladmin")
if admin_binary:
version_info = extract_version_info(execute_command([admin_binary, "version"]))
if version_info:
dbms_family, version = version_info
report.dbms_version = f"{dbms_family} {version}"
logger.info("Detected DBMS from admin tool '%s': %s", admin_binary, report.dbms_version)
return report.dbms_version
logger.debug("No parsable version from '%s' output.", admin_binary)
logger.warning("Database server version could not be determined.")
return "Unable to detect, do this manually."
def get_mysql_connection(database=None):
logger.info("Attempting database connection to %s", database)
return pymysql.connect(
read_default_file="/root/.my.cnf",
database=database,
cursorclass=pymysql.cursors.DictCursor
)
def get_mysqltuner_opt_in(report):
"""
Determine whether MySQLTuner should be installed and/or run.
Behavior:
- If mysqltuner is installed: prompt to run it.
- If mysqltuner is not installed: prompt to install and run it.
Stores the result in:
report.mysqltuner_opt_in (True/False)
report.mysqltuner_install (True/False)
"""
installed = find_mysqltuner_path() is not None
if installed:
prompt = "MySQLTuner is installed. Run it? [y/N]: "
else:
prompt = "MySQLTuner is not installed. Install and run it? [y/N]: "
try:
choice = input(prompt).strip().lower()
except EOFError:
choice = ""
yes_values = {"y", "yes"}
if installed:
report.mysqltuner_install = False
report.mysqltuner_opt_in = choice in yes_values
return
if choice in yes_values:
report.mysqltuner_install = True
report.mysqltuner_opt_in = True
else:
report.mysqltuner_install = False
report.mysqltuner_opt_in = False
def find_mysqltuner_path():
tuner_path = shutil.which("mysqltuner")
if tuner_path:
return tuner_path
for candidate_path in (
"/usr/local/bin/mysqltuner",
"/usr/bin/mysqltuner",
"/usr/local/sbin/mysqltuner",
"/usr/sbin/mysqltuner",
):
if os.path.exists(candidate_path) and os.access(candidate_path, os.X_OK):
return candidate_path
return None
def _mysqltuner_strip_empty_sections(text):
ansi_re = re.compile(r"\x1b\[[0-9;]*m")
header_re = re.compile(r"^-{4,}\s*.+?\s*-{4,}\s*$")
lines = (text or "").splitlines()
kept = []
index = 0
while index < len(lines):
line = lines[index]
plain = ansi_re.sub("", line)
if header_re.match(plain):
next_index = index + 1
block = []
while next_index < len(lines):
next_plain = ansi_re.sub("", lines[next_index])
if header_re.match(next_plain):
break
block.append(lines[next_index])
next_index += 1
has_content = any(ansi_re.sub("", b).strip() for b in block)
if has_content:
kept.append(line)
kept.extend(block)
index = next_index
continue
kept.append(line)
index += 1
cleaned = []
blank_run = 0
for line in kept:
if ansi_re.sub("", line).strip() == "":
blank_run += 1
if blank_run <= 1:
cleaned.append(line)
else:
blank_run = 0
cleaned.append(line)
return "\n".join(cleaned).strip()
def get_mysql_tuner(report):
install_requested = getattr(report, "mysqltuner_install", False)
run_requested = getattr(report, "mysqltuner_opt_in", False)
if not install_requested and not run_requested:
return "MySQLTuner was skipped at user request."
tuner_path = find_mysqltuner_path()
sparta_tuner_path = "/usr/local/bin/sparta-mysqltuner"
if not tuner_path and os.path.isfile(sparta_tuner_path) and os.access(sparta_tuner_path, os.X_OK):
tuner_path = sparta_tuner_path
if install_requested and not tuner_path:
pkg_manager_path = shutil.which("apt-get") or shutil.which("dnf") or shutil.which("yum")
def install_curl_if_missing():
if shutil.which("curl"):
return True
if not pkg_manager_path:
return False
try:
if os.path.basename(pkg_manager_path) == "apt-get":
apt_env = dict(os.environ, DEBIAN_FRONTEND="noninteractive")
subprocess.run(
["apt-get", "update"],
text=True,
capture_output=True,
env=apt_env,
check=False,
)
subprocess.run(
["apt-get", "install", "-y", "curl"],
text=True,
capture_output=True,
env=apt_env,
check=False,
)
else:
subprocess.run(
[pkg_manager_path, "install", "-y", "curl"],
text=True,
capture_output=True,
check=False,
)
except Exception:
logger.exception("Failed while attempting to install curl")
return bool(shutil.which("curl"))
if not install_curl_if_missing():
return "curl not found and could not be installed; cannot install mysqltuner."
try:
url = "https://raw.githubusercontent.com/major/MySQLTuner-perl/master/mysqltuner.pl"
curl_result = subprocess.run(
["curl", "-fsSL", url, "-o", sparta_tuner_path],
text=True,
capture_output=True,
check=False,
)
if curl_result.returncode != 0:
combined = (curl_result.stdout or "") + (curl_result.stderr or "")
combined = combined.strip()
return f"Failed to download mysqltuner script from GitHub.\n{combined}" if combined else "Failed to download mysqltuner script from GitHub."
chmod_result = subprocess.run(
["chmod", "755", sparta_tuner_path],
text=True,
capture_output=True,
check=False,
)
if chmod_result.returncode != 0:
combined = (chmod_result.stdout or "") + (chmod_result.stderr or "")
combined = combined.strip()
return f"Failed to chmod mysqltuner to 755.\n{combined}" if combined else "Failed to chmod mysqltuner to 755."
tuner_path = sparta_tuner_path
except Exception as exc:
return f"Error during mysqltuner installation: {exc}"
if not tuner_path:
tuner_path = find_mysqltuner_path()
if not tuner_path and os.path.isfile(sparta_tuner_path) and os.access(sparta_tuner_path, os.X_OK):
tuner_path = sparta_tuner_path
if not tuner_path:
return "MySQLTuner could not be installed."
if not run_requested:
return f"MySQLTuner is installed at {tuner_path}, but user chose not to run it."
logger.info("Running mysqltuner")
try:
mem_mb = max(0, int(round(float(getattr(report, "mem_total_gb", 0) or 0) * 1024)))
except Exception:
mem_mb = 0
try:
swap_mb = max(0, int(round(float(getattr(report, "mem_swap_gb", 0) or 0) * 1024)))
except Exception:
swap_mb = 0
run_result = subprocess.run(
[
tuner_path,
"--forcemem",
str(mem_mb),
"--forceswap",
str(swap_mb),
"--nogood",
"--noinfo",
"--color",
],
text=True,
capture_output=True,
input="\n",
check=False,
)
combined_output = (run_result.stdout or "") + (run_result.stderr or "")
if run_result.returncode != 0:
combined_output = f"MySQLTuner exited with rc={run_result.returncode}\n{combined_output}"
return _mysqltuner_strip_empty_sections(combined_output)
def get_largest_databases(report):
report.largest_databases = []
if not os.path.isdir("/var/lib/mysql"):
logger.warning("MySQL data directory not found: %s", "/var/lib/mysql")
return "[data dir missing]"
try:
report.database_sizes_mb = []
with os.scandir("/var/lib/mysql") as directory_entries:
for entry in directory_entries:
if not entry.is_dir(follow_symlinks=False):
continue
if entry.name.startswith(".") or entry.name in {
"performance_schema",
"information_schema",
"mysql",
"sys",
"#innodb_temp",
}:
logger.debug("Skipping system/hidden database directory: %s", entry.name)
continue
logger.debug("Measuring database directory: %s", entry.name)
report.database_total_bytes = 0
for root_path, _, file_names in os.walk(entry.path, followlinks=False):
for file_name in file_names:
try:
report.database_total_bytes += os.path.getsize(os.path.join(root_path, file_name))
except OSError:
logger.debug("Unreadable file skipped: %s", os.path.join(root_path, file_name))
report.database_sizes_mb.append(
(entry.name, round(report.database_total_bytes / (1024 * 1024), 2))
)
report.database_sizes_mb.sort(key=lambda item: item[1], reverse=True)
report.largest_databases = report.database_sizes_mb[:10]
if not report.largest_databases:
return "[none detected]"
return "\n".join(
f" {database_name} ({size_mb} MB)"
for database_name, size_mb in report.largest_databases
)
except Exception:
logger.exception("Failed while scanning database sizes.")
report.largest_databases = []
return "[error]"
def get_dbms_config(report):
def parse_size(value):
if not value:
return None
text = str(value).strip().upper()
if text.isdigit():
return int(text)
multiplier = 1
if text.endswith("K"):
multiplier = 1024
text = text[:-1]
elif text.endswith("M"):
multiplier = 1024 * 1024
text = text[:-1]
elif text.endswith("G"):
multiplier = 1024 * 1024 * 1024
text = text[:-1]
return int(text) * multiplier if text.isdigit() else None
def to_mb(value):
return f"{round(value / 1024 / 1024, 2)} MB" if value else "N/A"
def safe_int(value, label):
try:
return int(value)
except (TypeError, ValueError):
logger.warning("Unable to parse %s value: %s", label, value)
return None
try:
connection = get_mysql_connection()
except Exception as exc:
logger.error("Database connection failed: %s", exc)
return "[Error] Unable to connect to MySQL or MariaDB."
try:
with connection:
with connection.cursor() as cursor:
cursor.execute("""
SHOW GLOBAL VARIABLES
WHERE Variable_name IN (
'key_buffer_size',
'innodb_buffer_pool_size',
'max_connections',
'query_cache_type',
'query_cache_size',
'tmp_table_size',
'max_heap_table_size',
'table_open_cache',
'table_definition_cache',
'innodb_log_file_size',
'join_buffer_size',
'sort_buffer_size',
'max_allowed_packet',
'slow_query_log',
'slow_query_log_file',
'long_query_time',
'performance_schema'
);
""")
rows = cursor.fetchall() or []
for row in rows:
variable = (row.get("Variable_name") or "").lower()
value = row.get("Value")
if variable == "key_buffer_size":
report.key_buffer_size = parse_size(value)
elif variable == "innodb_buffer_pool_size":
report.innodb_buffer_pool_size = parse_size(value)
elif variable == "max_connections":
report.max_connections = safe_int(value, "max_connections")
elif variable == "query_cache_type":
report.query_cache_type = value
elif variable == "query_cache_size":
report.query_cache_size = parse_size(value)
elif variable == "tmp_table_size":
report.tmp_table_size = parse_size(value)
elif variable == "max_heap_table_size":
report.max_heap_table_size = parse_size(value)
elif variable == "table_open_cache":
report.table_open_cache = safe_int(value, "table_open_cache")
elif variable == "table_definition_cache":
report.table_definition_cache = safe_int(value, "table_definition_cache")
elif variable == "innodb_log_file_size":
report.innodb_log_file_size = parse_size(value)
elif variable == "join_buffer_size":
report.join_buffer_size = parse_size(value)
elif variable == "sort_buffer_size":
report.sort_buffer_size = parse_size(value)
elif variable == "max_allowed_packet":
report.max_allowed_packet = parse_size(value)
elif variable == "slow_query_log":
report.slow_query_log = value
elif variable == "slow_query_log_file":
report.slow_query_log_file = value
elif variable == "long_query_time":
report.long_query_time = value
elif variable == "performance_schema":
report.performance_schema = value
cursor.execute("SHOW GLOBAL STATUS LIKE 'Threads_connected';")
row = cursor.fetchone()
if row:
report.current_connections = safe_int(row.get("Value"), "Threads_connected")
except Exception as exc:
logger.error("Failed to retrieve DBMS variables: %s", exc)
return "[Error] Failed to retrieve DBMS variables."
summary = (
f"Max Key Buffer Size: {to_mb(getattr(report, 'key_buffer_size', None))}\n"
f"Max InnoDB Buffer Pool Size: {to_mb(getattr(report, 'innodb_buffer_pool_size', None))}\n"
f"Max Connections: {getattr(report, 'max_connections', None) or 'N/A'}\n"
f"Current Connection Threads: {getattr(report, 'current_connections', None) or 'N/A'}\n\n"
"Query Cache:\n"
f" query_cache_type: {getattr(report, 'query_cache_type', None) or 'N/A'}\n"
f" query_cache_size: {to_mb(getattr(report, 'query_cache_size', None))}\n\n"
"Temporary Tables:\n"
f" tmp_table_size: {to_mb(getattr(report, 'tmp_table_size', None))}\n"
f" max_heap_table_size: {to_mb(getattr(report, 'max_heap_table_size', None))}\n\n"
"Table Cache:\n"
f" table_open_cache: {getattr(report, 'table_open_cache', None) or 'N/A'}\n"
f" table_definition_cache: {getattr(report, 'table_definition_cache', None) or 'N/A'}\n\n"
"InnoDB Log:\n"
f" innodb_log_file_size: {to_mb(getattr(report, 'innodb_log_file_size', None))}\n\n"
"Buffers:\n"
f" join_buffer_size: {to_mb(getattr(report, 'join_buffer_size', None))}\n"
f" sort_buffer_size: {to_mb(getattr(report, 'sort_buffer_size', None))}\n\n"
"Limits:\n"
f" max_allowed_packet: {to_mb(getattr(report, 'max_allowed_packet', None))}\n\n"
"Slow Query Log:\n"
f" slow_query_log: {getattr(report, 'slow_query_log', None) or 'N/A'}\n"
f" slow_query_log_file: {getattr(report, 'slow_query_log_file', None) or 'N/A'}\n"
f" long_query_time: {getattr(report, 'long_query_time', None) or 'N/A'}\n\n"
"Instrumentation:\n"
f" performance_schema: {getattr(report, 'performance_schema', None) or 'N/A'}\n"
)
return summary
def database_build_section(report):
logger.debug("Building 'Database' section")
steps = 4
section_progress = SectionProgress(
report.progress,
(report.section_index, report.sections_total),
steps,
report.section_name,
)
try:
section_progress.tick("Detecting DBMS version")
dbms_version = get_dbms_version(report)
section_progress.tick("Collecting DBMS configuration")
dbms_config_summary = get_dbms_config(report)
section_progress.tick("Running MySQLTuner")
mysqltuner_output = get_mysql_tuner(report)
section_progress.tick("Finding largest DBs")
largest_db_summary = get_largest_databases(report)
dbms_block = f"```\n{dbms_config_summary}\n```"
tuner_block = f"```\n{mysqltuner_output}\n```"
largest_block = f"```\n{largest_db_summary}\n```"
return (
f"{report.format_heading('## Database Summary')}\n"
f"{report.format_label_value('DBMS Version', dbms_version)}\n\n"
f"{report.format_subheading('### DBMS Configuration:')}\n\n"
f"{report.format_block_dim(dbms_block)}\n\n"
f"{report.format_subheading('### MySQLTuner:')}\n\n"
f"{report.format_block_dim(tuner_block)}\n\n"
f"{report.format_subheading('### Top 10 Largest Databases:')}\n"
f"{report.format_block_dim(largest_block)}"
)
except Exception:
logger.exception("Failed to build 'Database Tuning' section")
error_text = (
"[Error] Failed to build database section. Manually check for the DBMS version, "
"install MySQL Tuner and run it for results, find the database name and the size "
"of the database, and check to see if it can be optimized. Please report this as a bug."
)
return report.format_error(error_text)
# --- Mail
_EMAIL_BOUNCE_ADDR_REGEX = re.compile(r"[\w\.\+\-]+@[\w\.\-]+", re.I)
def _mail_ip_from_cpanel_mailips(mailips_path, target_domain, fallback_ip):
if not os.path.isfile(mailips_path):
logger.debug("cPanel mailips file not present; using main server IP")
return fallback_ip
try:
with open(mailips_path, "r", encoding="utf-8", errors="ignore") as mailips_file:
mailips_lines = mailips_file.readlines()
except Exception:
logger.exception("Failed reading %s; falling back to main IP", mailips_path)
return fallback_ip
wildcard_ip = None
domain_ip_map = {}
for line in mailips_lines:
if ":" not in line:
continue
entry_domain, entry_ip = line.split(":", 1)
entry_domain = entry_domain.strip().lower()
entry_ip = entry_ip.strip()
if not entry_domain or not entry_ip:
continue
if entry_domain == "*":
wildcard_ip = entry_ip
continue
domain_ip_map[entry_domain] = entry_ip
if target_domain in domain_ip_map:
chosen_ip = domain_ip_map[target_domain]
logger.info("Found domain-specific SMTP IP for %s: %s", target_domain, chosen_ip)
return chosen_ip
if wildcard_ip:
logger.debug("Using wildcard SMTP IP for cPanel: %s", wildcard_ip)
return wildcard_ip
logger.debug("mailips file had no matching or wildcard entry; using main server IP")
return fallback_ip
def _mail_ip_from_cwp_master_cf(master_cf_path, target_domain, fallback_ip):
if not os.path.isfile(master_cf_path):
logger.debug("Postfix master.cf not present; using main IP")
return fallback_ip
try:
with open(master_cf_path, "r", encoding="utf-8", errors="ignore") as master_cf_file:
master_cf_lines = master_cf_file.readlines()
except Exception:
logger.exception("Failed reading %s; using main IP fallback", master_cf_path)
return fallback_ip
def is_service_header(line_text):
return bool(line_text) and (not line_text.startswith((" ", "\t"))) and ("unix" in line_text) and ("-" in line_text)
def extract_block_settings(block_lines):
block_helo_domain = None
block_bind_ip = None
for block_line in block_lines:
if "smtp_bind_address=" in block_line:
block_bind_ip = block_line.split("smtp_bind_address=", 1)[1].strip()
continue
if "smtp_helo_name=" in block_line:
block_helo_domain = block_line.split("smtp_helo_name=", 1)[1].strip().lower()
continue
return block_helo_domain, block_bind_ip
def block_matches_domain(block_lines):
block_helo_domain, block_bind_ip = extract_block_settings(block_lines)
if block_bind_ip and block_helo_domain == target_domain:
return block_bind_ip
return None
current_block_lines = []
for line in master_cf_lines:
if is_service_header(line):
matched_ip = block_matches_domain(current_block_lines) if current_block_lines else None
if matched_ip:
logger.info("Found CWP Postfix smtp_bind_address for %s: %s", target_domain, matched_ip)
return matched_ip
current_block_lines = [line]
continue
if current_block_lines:
current_block_lines.append(line)
matched_ip = block_matches_domain(current_block_lines) if current_block_lines else None
if matched_ip:
logger.info("Found CWP Postfix smtp_bind_address for %s: %s", target_domain, matched_ip)
return matched_ip
logger.info("No per-domain smtp_bind_address found in CWP master.cf; using main IP")
return fallback_ip
def get_mail_ip(report):
if not getattr(report, "main_ip", None):
logger.warning("main_ip is missing on report; defaulting mail IP to None")
return None
panel_type = (getattr(report, "panel_type", "") or "").strip()
target_domain = (getattr(report, "managed_domain", "") or "").strip().lower()
fallback_ip = report.main_ip
try:
if panel_type == "cPanel":
mail_ip = _mail_ip_from_cpanel_mailips("/etc/mailips", target_domain, fallback_ip)
report.mail_ip = mail_ip
return mail_ip
if panel_type == "Control Web Panel":
mail_ip = _mail_ip_from_cwp_master_cf("/etc/postfix/master.cf", target_domain, fallback_ip)
report.mail_ip = mail_ip
return mail_ip
report.mail_ip = fallback_ip
return fallback_ip
except Exception:
logger.exception("Unhandled error determining mail IP; falling back to main IP")
report.mail_ip = fallback_ip
return fallback_ip
def get_ptr(report):
if getattr(report, "local_email", None) is False:
logger.debug("local_email is False; skipping PTR lookup for external mail routing")
return None
mail_ip = getattr(report, "mail_ip", None)
if not mail_ip:
logger.warning("mail_ip is not set on report; cannot perform PTR lookup")
return None
try:
output = subprocess.check_output(
["dig", "+short", "-x", mail_ip],
text=True,
timeout=5
)
ptr_value = None
for line in output.splitlines():
line = line.strip()
if not line:
continue
ptr_value = line.rstrip(".")
break
if not ptr_value:
logger.debug("No PTR record found for %s", mail_ip)
return None
report.mail_ptr = ptr_value
return ptr_value
except Exception as e:
logger.debug("PTR lookup failed for %s: %s", mail_ip, e)
return None
def get_email_accounts(report):
panel_type = (getattr(report, "panel_type", "") or "").strip()
domain = (getattr(report, "managed_domain", "") or "").strip().lower()
if not domain:
report.email_accounts = []
return None
try:
if panel_type == "UltraStack ONE":
report.email_accounts = []
report.email_user_count = 0
return 0
if panel_type == "Baremetal":
logger.info("Baremetal detected; email account enumeration not implemented")
report.email_accounts = []
return None
if panel_type == "cPanel":
owner = (getattr(report, "domain_owner", "") or "").strip()
if not owner:
logger.warning("domain_owner is not set; cannot derive cPanel mail path")
report.email_accounts = []
return None
base_path = f"/home/{owner}/mail/{domain}"
elif panel_type == "Control Web Panel":
base_path = f"/var/vmail/{domain}"
else:
logger.warning("Unknown panel_type '%s'; cannot enumerate email accounts", panel_type)
report.email_accounts = []
return None
if not os.path.isdir(base_path):
logger.info("Mail directory not found for %s: %s", domain, base_path)
report.email_accounts = []
report.email_user_count = 0
return 0
email_accounts = []
with os.scandir(base_path) as entries:
for entry in entries:
if not entry.is_dir(follow_symlinks=False):
continue
mailbox_user = entry.name
if mailbox_user.startswith("."):
continue
email_address = f"{mailbox_user}@{domain}"
email_accounts.append(email_address)
email_accounts.sort()
report.email_accounts = email_accounts
report.email_user_count = len(email_accounts)
logger.info("Found %s email accounts for %s on panel %s", report.email_user_count, domain, panel_type)
return report.email_user_count
except Exception:
logger.exception("Failed to enumerate email accounts")
report.email_accounts = []
return None
def _mail_dir_size_bytes(directory_path: str) -> int:
total_bytes = 0
for root, _, files in os.walk(directory_path):
for file_name in files:
try:
total_bytes += os.path.getsize(os.path.join(root, file_name))
except FileNotFoundError:
continue
return total_bytes
def _mail_collect_usage_map(base_path: str, account_list):
usage = {}
for email_address in account_list or []:
username = (str(email_address).split("@", 1)[0] or "").strip()
if not username:
continue
mailbox_path = os.path.join(base_path, username)
if not os.path.isdir(mailbox_path):
logger.debug("Mailbox directory missing for: %s", email_address)
continue
usage[str(email_address)] = _mail_dir_size_bytes(mailbox_path)
return usage
def _mail_get_mail_base_path(report):
panel_type = (getattr(report, "panel_type", "") or "").strip()
if panel_type in ("UltraStack ONE", "Baremetal"):
return None
domain_lower = (getattr(report, "managed_domain", "") or "").strip().lower()
if not domain_lower:
logger.debug("managed_domain not set; skipping mail disk usage")
return None
if panel_type == "cPanel":
owner = (getattr(report, "domain_owner", "") or "").strip()
if not owner:
logger.debug("Domain owner not set; skipping mail disk usage for cPanel")
return None
return f"/home/{owner}/mail/{domain_lower}"
if panel_type == "Control Web Panel":
return f"/var/vmail/{domain_lower}"
logger.warning("Unknown panel type '%s' for mail disk usage", panel_type)
return None
def get_email_disk_usage(report):
report.email_disk_summary = "No disk usage data collected."
email_accounts = getattr(report, "email_accounts", None)
if not email_accounts:
logger.info("No email accounts detected; skipping disk usage")
return None
try:
base_path = _mail_get_mail_base_path(report)
if not base_path:
return None
usage_map = _mail_collect_usage_map(base_path, email_accounts)
if not usage_map:
return None
top_three = sorted(usage_map.items(), key=lambda item: item[1], reverse=True)[:3]
total_usage = sum(usage_map.values())
highest_email = top_three[0][0]
highest_mailbox_path = os.path.join(base_path, highest_email.split("@", 1)[0])
folder_breakdown = {
folder_name: _mail_dir_size_bytes(os.path.join(highest_mailbox_path, folder_name))
for folder_name in ("cur", "new", "tmp", "sent", "archive", "drafts", "trash")
if os.path.isdir(os.path.join(highest_mailbox_path, folder_name))
}
report.total_email_disk_usage = total_usage
report.highest_disk_mail_accounts = top_three
report.highest_disk_email_account = highest_email
report.highest_usage_email_folder = folder_breakdown
lines = [
report.format_label_value("Total email disk usage", report.mail_format_bytes(total_usage)),
"",
report.mail_format_label_only("Top email accounts by disk usage"),
]
lines.extend(
f" {email_address}: {report.mail_format_bytes(usage_bytes)}"
for email_address, usage_bytes in top_three
)
if folder_breakdown:
lines.append("")
lines.append(f"{report.mail_format_label_only('Folder usage for highest account')} ({highest_email}):")
lines.extend(
f" {folder_name}: {report.mail_format_bytes(folder_bytes)}"
for folder_name, folder_bytes in folder_breakdown.items()
)
report.email_disk_summary = "\n".join(lines)
return report.email_disk_summary
except Exception:
logger.exception("Failed calculating email disk usage")
return None
def _email_bounce_parse_timestamp(log_line: str, mta_type: str, current_year: int):
try:
parts = (log_line or "").split()
if mta_type == "exim":
if len(parts) < 2:
return None
return dt.datetime.strptime(f"{parts[0]} {parts[1]}", "%Y-%m-%d %H:%M:%S")
if len(parts) < 3:
return None
return dt.datetime.strptime(
f"{current_year} {parts[0]} {parts[1]} {parts[2]}",
"%Y %b %d %H:%M:%S",
)
except Exception:
return None
def _email_bounce_detect_status(log_line: str, lower_line: str, mta_type: str):
if mta_type == "exim":
is_bounce = " ** " in (log_line or "")
is_defer = (
" temporarily deferred" in (lower_line or "")
or " max defers and failures" in (lower_line or "")
)
return is_bounce, is_defer
is_bounce = "status=bounced" in (lower_line or "")
is_defer = "status=deferred" in (lower_line or "")
return is_bounce, is_defer
def _email_bounce_extract_address(log_line: str, lower_line: str, mta_type: str):
if mta_type == "postfix":
to_index = (lower_line or "").find(" to=<")
if to_index == -1:
to_index = (lower_line or "").find(" to=")
if to_index != -1:
start_index = to_index + 4
if start_index < len(log_line) and log_line[start_index] == "<":
start_index += 1
end_index = log_line.find(">", start_index)
if end_index == -1:
end_index = log_line.find(" ", start_index)
candidate = (log_line[start_index:end_index] if end_index != -1 else log_line[start_index:]).strip()
if "@" in candidate:
return candidate
match_obj = _EMAIL_BOUNCE_ADDR_REGEX.search(log_line or "")
return match_obj.group(0) if match_obj else None
def _email_bounce_scan_log_file(
log_path: str,
*,
mta_type: str,
managed_domain: str,
cutoff_time,
current_year: int,
bounce_stats: dict,
):
try:
with open(log_path, "r", encoding="utf-8", errors="ignore") as log_file:
for log_line in log_file:
log_line = log_line.rstrip("\n")
log_time = _email_bounce_parse_timestamp(log_line, mta_type, current_year)
if not log_time or log_time < cutoff_time:
continue
lower_line = log_line.lower()
is_bounce, is_defer = _email_bounce_detect_status(log_line, lower_line, mta_type)
if not (is_bounce or is_defer):
continue
email_address = _email_bounce_extract_address(log_line, lower_line, mta_type)
if not email_address:
continue
email_address = email_address.lower()
if not email_address.endswith("@" + managed_domain):
continue
if email_address not in bounce_stats:
bounce_stats[email_address] = {"bounced": 0, "deferred": 0}
if is_bounce:
bounce_stats[email_address]["bounced"] += 1
if is_defer:
bounce_stats[email_address]["deferred"] += 1
except Exception:
logger.exception("Failed while reading mail log: %s", log_path)
def _email_bounce_filter_stats(bounce_stats: dict):
return {
address: counts
for address, counts in (bounce_stats or {}).items()
if counts.get("bounced", 0) or counts.get("deferred", 0)
}
def get_email_bounce_stats(report):
panel_type = (getattr(report, "panel_type", "") or "").strip()
managed_domain = (getattr(report, "managed_domain", "") or "").strip().lower()
report.email_bounce_summary = "No bounce or defer data collected."
if not managed_domain:
logger.warning("managed_domain is not set; cannot analyze mail logs")
return None
if panel_type == "cPanel":
log_path = "/var/log/exim_mainlog"
mta_type = "exim"
else:
log_path = "/var/log/maillog"
mta_type = "postfix"
if not os.path.isfile(log_path):
logger.info("Mail log not found at %s", log_path)
return None
current_time = dt.datetime.now()
cutoff_time = current_time - dt.timedelta(days=1)
bounce_stats = {}
for email_address in (getattr(report, "email_accounts", []) or []):
bounce_stats[str(email_address).lower()] = {"bounced": 0, "deferred": 0}
_email_bounce_scan_log_file(
log_path=log_path,
mta_type=mta_type,
managed_domain=managed_domain,
cutoff_time=cutoff_time,
current_year=current_time.year,
bounce_stats=bounce_stats,
)
report.email_bounce_stats = _email_bounce_filter_stats(bounce_stats) or None
if not report.email_bounce_stats:
report.email_bounce_summary = "No bounce or defer data collected."
logger.info("No bounce/defer stats found for domain %s in last 24 hours", managed_domain)
return None
report.email_bounce_summary = report.email_bounce_format_summary(report.email_bounce_stats)
logger.debug(
"Collected bounce/defer stats for %d email accounts on domain %s",
len(report.email_bounce_stats),
managed_domain,
)
return report.email_bounce_stats
def get_spam_protection(report):
panel_type = (getattr(report, "panel_type", "") or "").strip()
domain_owner = (getattr(report, "domain_owner", "") or "").strip()
report.spam_protection_summary = "No spam protection detected."
try:
if panel_type == "cPanel":
if not domain_owner:
logger.warning("domain_owner missing for SpamAssassin detection")
report.spam_protection = "None"
report.spam_protection_summary = "No spam protection detected."
return report.spam_protection
sa_flag_path = f"/home/{domain_owner}/.spamassassinenable"
if os.path.isfile(sa_flag_path):
report.spam_protection = "SpamAssassin"
report.spam_protection_summary = "Detected SpamAssassin"
return report.spam_protection
report.spam_protection = "None"
report.spam_protection_summary = "No spam protection detected."
return report.spam_protection
if panel_type == "Control Web Panel":
master_cf = "/etc/postfix/master.cf"
if not os.path.isfile(master_cf):
logger.warning("Postfix master.cf not found; cannot detect spam protection")
report.spam_protection = "None"
report.spam_protection_summary = "No spam protection detected."
return report.spam_protection
try:
with open(master_cf, "r", encoding="utf-8", errors="ignore") as f:
content = f.read().lower()
if "content_filter=smtp-amavis" in content:
report.spam_protection = "Amavisd"
report.spam_protection_summary = "Detected Amavisd"
return report.spam_protection
if "spamassassin" in content:
report.spam_protection = "SpamAssassin"
report.spam_protection_summary = "Detected SpamAssassin"
return report.spam_protection
report.spam_protection = "None"
report.spam_protection_summary = "No spam protection detected."
return report.spam_protection
except Exception:
logger.exception("Failed to parse Postfix master.cf for spam protection detection")
report.spam_protection = "None"
report.spam_protection_summary = "No spam protection detected."
return report.spam_protection
if panel_type in ["UltraStack ONE", "Baremetal"]:
report.spam_protection = "None"
report.spam_protection_summary = "No spam protection detected."
return report.spam_protection
logger.warning("Unknown panel type '%s', defaulting to None", panel_type)
report.spam_protection = "None"
report.spam_protection_summary = "No spam protection detected."
return report.spam_protection
except Exception:
logger.exception("Error determining spam protection setup")
report.spam_protection = "None"
report.spam_protection_summary = "No spam protection detected."
return report.spam_protection
def mail_build_section(report):
logger.debug("Building 'Email' section")
local_email_flag = getattr(report, "local_email", None)
if local_email_flag is None:
try:
dns_lower = (get_dns_info(report) or "").lower()
if "mx" in dns_lower:
report.local_email = bool(report.managed_domain) and (report.managed_domain.lower() in dns_lower)
else:
report.local_email = True
except Exception:
report.local_email = True
local_email_flag = report.local_email
section_progress = SectionProgress(
report.progress,
(report.section_index, report.sections_total),
(1 if local_email_flag is False else 6),
report.section_name,
)
try:
if local_email_flag is False:
section_progress.tick("External email configured")
return (
f"{report.format_heading('## Email Summary')}\n"
f"{report.format_amber(f'{report.managed_domain} does not utilize this server for email.')}\n"
"MX records route mail delivery to an external provider, so local mail checks are skipped."
)
section_progress.tick("Determining outbound mail IP")
get_mail_ip(report)
mail_ip = getattr(report, "mail_ip", None) or "Unknown"
section_progress.tick("PTR lookup")
ptr_display = get_ptr(report) or "No PTR found"
section_progress.tick("Counting accounts")
get_email_accounts(report)
email_account_count = getattr(report, "email_user_count", None)
section_progress.tick("Measuring disk usage")
get_email_disk_usage(report)
section_progress.tick("Checking bounces/defers")
get_email_bounce_stats(report)
bounces_summary = getattr(report, "email_bounce_summary", None) or "No bounce or defer data collected."
if str(bounces_summary).strip().lower() == "no bounce or defer data collected.":
bounces_summary = report.apply_color("green", "No bounce or defer data collected.")
else:
bounces_summary = report.format_block_dim(bounces_summary)
section_progress.tick("Detecting spam protection")
get_spam_protection(report)
spam_summary = getattr(report, "spam_protection_summary", None) or "No spam protection detected."
if (getattr(report, "spam_protection", None) or "").strip().lower() not in ("", "none"):
spam_summary = report.apply_color("green", spam_summary)
else:
spam_summary = report.apply_color("red", spam_summary)
return (
f"{report.format_heading('## Email Summary')}\n"
f"{report.format_label_value('Outbound Mail IP', mail_ip)}\n"
f"{report.format_label_value('Reverse DNS (PTR)', ptr_display)}\n"
f"{report.format_label_value('Number of Email Accounts', (str(email_account_count) if email_account_count is not None else 'Unknown'))}\n\n"
f"{report.format_subheading('### Highest Email Account Disk Usage:')}\n"
f"{getattr(report, 'email_disk_summary', None) or 'No disk usage data collected.'}\n\n"
f"{report.format_subheading('### Bounce Back and Deferral Count:')}\n"
f"{bounces_summary}\n\n"
f"{report.format_subheading('### Spam Protection:')}\n"
f"{spam_summary}"
)
except Exception:
logger.exception("Failed to build 'Email' section")
return report.format_error(
"[Error] Failed to build email section. "
"Check email accounts, disk usage, bounces and defers, and spam protection manually. "
"Please report this as a bug."
)
# --- Security
def get_monarx(report):
try:
result = subprocess.run(
["systemctl", "is-active", "--quiet", "monarx-agent.service"],
check=False
)
if result.returncode == 0:
report.has_monarx = True
logger.info("Monarx agent service is running.")
else:
report.has_monarx = False
logger.info("Monarx agent service is NOT running.")
except Exception as e:
report.has_monarx = False
logger.error("Failed to check Monarx agent service: %s", e)
def get_imunify(report):
try:
services = [
"imunify360-webshield.service",
"imunify360-pam.service",
"imunify-auditd-log-reader.service",
]
any_running = False
for service in services:
result = subprocess.run(
["systemctl", "is-active", "--quiet", service],
check=False
)
if result.returncode == 0:
logger.info("Imunify service %s is running.", service)
any_running = True
else:
logger.debug("Imunify service %s is not running.", service)
report.has_imunify = any_running
except Exception as e:
report.has_imunify = False
logger.error("Failed to check Imunify360 services: %s", e)
def get_firewall(report):
"""
- Controllers: csf, apf, ufw, firewalld
- Backends: nftables, iptables
- Related: imunify360, fail2ban
"""
def systemd_is_active(unit_name: str) -> bool:
try:
return subprocess.run(
["systemctl", "is-active", "--quiet", unit_name],
check=False
).returncode == 0
except Exception:
logger.debug("systemctl check failed for unit %s", unit_name, exc_info=True)
return False
layer_units = {
"csf": ["csf"],
"apf": ["apf"],
"ufw": ["ufw"],
"firewalld": ["firewalld"],
"nftables": ["nftables"],
"iptables": ["iptables", "ip6tables"],
"imunify360": ["imunify360-webshield", "imunify360-pam", "imunify-auditd-log-reader",
"imunify360", "imunify-agent", "imunify360-agent"],
"fail2ban": ["fail2ban"],
}
detected = []
for layer, units in layer_units.items():
if any(systemd_is_active(u) for u in units):
detected.append(layer)
report.firewall_stack = detected
logger.info("Security layers detected (systemd only): %s", ", ".join(detected) if detected else "none")
return detected
def _ssh_parse_bool(value, default=False):
if isinstance(value, list):
value = value[-1] if value else ""
string_value = (value or "").strip().lower()
if string_value in ("yes", "true", "1", "on"):
return True
if string_value in ("no", "false", "0", "off"):
return False
return default
def _ssh_get_effective_config():
sshd_path = shutil.which("sshd") or "/usr/sbin/sshd"
output = subprocess.check_output(
[sshd_path, "-T"],
text=True,
stderr=subprocess.STDOUT,
timeout=10,
)
multi_keys = {
"port",
"listenaddress",
"authenticationmethods",
"allowusers",
"allowgroups",
"denyusers",
"denygroups",
}
config = {}
for line in (output or "").splitlines():
if not line.strip() or " " not in line:
continue
key, value = line.split(None, 1)
key = key.strip().lower()
value = value.strip()
if key in multi_keys:
config.setdefault(key, []).append(value)
else:
config[key] = value
return config
def _ssh_auth_method_alternatives(config):
auth_value = config.get("authenticationmethods")
auth_string = " ".join(auth_value) if isinstance(auth_value, list) else (auth_value or "")
if not auth_string.strip():
return []
return [set(part.strip() for part in alt.split(",") if part.strip()) for alt in auth_string.split()]
def _ssh_passwords_allowed_by_auth_methods(alternatives):
if not alternatives:
return True
for alternative in alternatives:
has_password = (
"password" in alternative
or any(method.startswith("keyboard-interactive") for method in alternative)
)
if has_password:
return True
return False
def _ssh_passwords_require_key(alternatives):
if not alternatives:
return False
saw_password = False
requires_key_everywhere = True
for alternative in alternatives:
has_password = (
"password" in alternative
or any(method.startswith("keyboard-interactive") for method in alternative)
)
if has_password:
saw_password = True
if "publickey" not in alternative:
requires_key_everywhere = False
return saw_password and requires_key_everywhere
def _ssh_root_access_mode(config):
prl_raw = (config.get("permitrootlogin") or "prohibit-password").lower()
prl_map = {
"no": "Disabled",
"yes": "Enabled",
"without-password": "Key-only",
"prohibit-password": "Key-only",
"forced-commands-only": "Key-only (forced commands)",
}
root_mode = prl_map.get(prl_raw, prl_raw.capitalize())
return root_mode, (root_mode != "Disabled")
def _ssh_password_login_state(config):
password_auth_enabled = _ssh_parse_bool(config.get("passwordauthentication"), True)
keyboard_interactive_enabled = _ssh_parse_bool(
config.get("kbdinteractiveauthentication") or config.get("challengeresponseauthentication"),
False,
)
use_pam = _ssh_parse_bool(config.get("usepam"), True)
password_via_keyboard = keyboard_interactive_enabled and use_pam
alternatives = _ssh_auth_method_alternatives(config)
passwords_allowed_by_am = _ssh_passwords_allowed_by_auth_methods(alternatives)
passwords_require_key = _ssh_passwords_require_key(alternatives)
passwords_effective = (password_auth_enabled or password_via_keyboard) and passwords_allowed_by_am
return passwords_effective, passwords_require_key
def _ssh_limits(config):
max_auth_tries = str(config.get("maxauthtries") or "6")
max_sessions = str(config.get("maxsessions") or "10")
try:
tries_int = int(max_auth_tries)
except Exception:
tries_int = None
return max_auth_tries, tries_int, max_sessions
def _ssh_analyze_config(config):
ports_value = config.get("port") or ["22"]
ports = [ports_value] if isinstance(ports_value, str) else list(ports_value)
root_mode, root_allowed = _ssh_root_access_mode(config)
public_key_enabled = _ssh_parse_bool(config.get("pubkeyauthentication"), True)
passwords_effective, passwords_require_key = _ssh_password_login_state(config)
max_auth_tries, tries_int, max_sessions = _ssh_limits(config)
empty_passwords_allowed = _ssh_parse_bool(config.get("permitemptypasswords"), False)
return {
"ports": ports,
"root_mode": root_mode,
"root_allowed": root_allowed,
"public_key_enabled": public_key_enabled,
"passwords_effective": passwords_effective,
"passwords_require_key": passwords_require_key,
"max_auth_tries": max_auth_tries,
"tries_int": tries_int,
"max_sessions": max_sessions,
"empty_passwords_allowed": empty_passwords_allowed,
}
def get_ssh_settings(report):
try:
try:
effective_config = _ssh_get_effective_config()
except Exception:
logger.exception("Failed to read effective SSH config via `sshd -T`")
return report.format_error("Unable to read effective SSH configuration")
analysis = _ssh_analyze_config(effective_config)
return report.ssh_render_summary(analysis)
except Exception:
logger.exception("Failed to summarize SSH settings")
return report.format_error("Unable to summarize SSH settings")
def _ssl_parse_host_port(domain: str):
cleaned = re.sub(r"^\s*https?://", "", (domain or "").strip(), flags=re.I).split("/")[0].strip()
host = cleaned
port = 443
if cleaned.startswith("["):
bracket_end = cleaned.find("]")
if bracket_end != -1:
host = cleaned[1:bracket_end]
remainder = cleaned[bracket_end + 1 :]
if remainder.startswith(":"):
try:
port = int(remainder[1:])
except ValueError:
port = 443
else:
if ":" in cleaned and cleaned.count(":") == 1:
host_part, port_part = cleaned.rsplit(":", 1)
if port_part.isdigit():
host = host_part
port = int(port_part)
try:
host_idna = host.encode("idna").decode("ascii")
except Exception:
host_idna = host
return host_idna, port
def _ssl_fetch_cert(host_idna: str, port: int, verify: bool):
def is_ip_address(host_text: str) -> bool:
try:
ipaddress.ip_address(host_text)
return True
except ValueError:
return False
def candidate_ca_files():
env_cafile = os.environ.get("SSL_CERT_FILE")
if env_cafile:
yield env_cafile
candidates = (
"/etc/pki/tls/certs/ca-bundle.crt",
"/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem",
"/etc/ssl/certs/ca-bundle.crt",
)
for path in candidates:
yield path
def load_system_cas(context: ssl.SSLContext) -> None:
for cafile in candidate_ca_files():
try:
if cafile and Path(cafile).is_file():
context.load_verify_locations(cafile=cafile)
logger.debug("Loaded CA bundle for TLS verify: %s", cafile)
return
except Exception as exc:
logger.debug("Failed loading CA bundle %s: %s", cafile, exc)
if verify:
context = ssl.create_default_context()
context.check_hostname = True
context.verify_mode = ssl.CERT_REQUIRED
if not context.get_ca_certs():
load_system_cas(context)
else:
context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
context.check_hostname = False
context.verify_mode = ssl.CERT_NONE
server_hostname = None if is_ip_address(host_idna) else host_idna
with socket.create_connection((host_idna, port), timeout=8) as sock:
with context.wrap_socket(sock, server_hostname=server_hostname) as secure_socket:
cert_dict = secure_socket.getpeercert()
if cert_dict:
return cert_dict
cert_der = secure_socket.getpeercert(binary_form=True)
if not cert_der:
return {}
try:
pem_text = ssl.DER_cert_to_PEM_cert(cert_der)
temp_dir = os.environ.get("SPARTA_TMP_DIR", "/root/SPARTA_tmp")
os.makedirs(temp_dir, exist_ok=True)
temp_path = os.path.join(temp_dir, f"sparta_peer_cert_{host_idna}_{port}.pem")
with open(temp_path, "w", encoding="utf-8") as handle:
handle.write(pem_text)
decoded = ssl._ssl._test_decode_cert(temp_path)
return decoded or {}
except Exception as exc:
logger.debug("Failed decoding peer cert for %s:%s: %s", host_idna, port, exc)
return {}
def _ssl_get_peer_cert(host_idna: str, port: int):
try:
return _ssl_fetch_cert(host_idna, port, verify=True), True, False
except ssl.SSLCertVerificationError as exc:
message_parts = [
getattr(exc, "verify_message", "") or "",
getattr(exc, "reason", "") or "",
str(exc) or "",
]
is_self_signed_hint = "self signed" in " ".join(message_parts).lower()
logger.debug("Verified TLS failed for %s:%s: %s", host_idna, port, exc)
return _ssl_fetch_cert(host_idna, port, verify=False), False, is_self_signed_hint
except Exception as exc:
logger.debug("Verified TLS failed for %s:%s: %s", host_idna, port, exc)
return _ssl_fetch_cert(host_idna, port, verify=False), None, False
def _ssl_get_issuer_org(cert_data):
issuer_list = cert_data.get("issuer") or []
for rdn in issuer_list:
for key, value in rdn:
if (key or "").lower() in ("organizationname", "o"):
return value
for rdn in issuer_list:
for key, value in rdn:
if (key or "").lower() in ("commonname", "cn"):
return value
return None
def _ssl_is_self_signed(cert_data):
issuer_list = cert_data.get("issuer") or []
subject_list = cert_data.get("subject") or []
try:
return bool(issuer_list and subject_list and issuer_list == subject_list)
except Exception as exc:
logger.debug("Failed to compare subject and issuer for self-signed detection: %s", exc)
return False
def _ssl_parse_expiration(cert_data):
raw_expires = cert_data.get("notAfter")
if not raw_expires:
return None, None, None
try:
expires_text = raw_expires.replace(" GMT", " UTC")
expiration_dt = dt.datetime.strptime(expires_text, "%b %d %H:%M:%S %Y UTC").replace(
tzinfo=dt.timezone.utc
)
now = dt.datetime.now(dt.timezone.utc)
days_remaining = max((expiration_dt - now).days, 0)
return expiration_dt.strftime("%Y-%m-%d %H:%M:%S UTC"), days_remaining, expiration_dt.isoformat()
except Exception as exc:
logger.debug("Failed to parse expiration: %s", exc)
return None, None, None
def get_ssl(report):
try:
domain = (getattr(report, "managed_domain", "") or "").strip()
if not domain:
report.has_ssl = None
return report.format_error("status: no domain provided")
host_idna, port = _ssl_parse_host_port(domain)
try:
cert_data, trust_state, self_signed_hint = _ssl_get_peer_cert(host_idna, port)
except Exception as unverified_exc:
logger.error("TLS handshake failed (unverified) for %s:%s: %s", host_idna, port, unverified_exc)
report.has_ssl = False
report.ssl_trusted = False
return report.ssl_disabled_status_line()
report.has_ssl = True
self_signed = bool(self_signed_hint or _ssl_is_self_signed(cert_data))
issuer_org = _ssl_get_issuer_org(cert_data)
expiration_info = _ssl_parse_expiration(cert_data)
report.ssl_update_report_fields(trust_state, self_signed, issuer_org, expiration_info)
lines = [
f"{report.ssl_label('status')} {report.ssl_status_text(trust_state, self_signed)}",
f"{report.ssl_label('issuer')} {report.ssl_issuer_text(self_signed, issuer_org)}",
f"{report.ssl_label('expires')} {report.ssl_expires_text(expiration_info)}",
]
return "\n".join(lines)
except Exception:
logger.exception("Failed to summarize SSL settings")
report.has_ssl = None
return report.format_error("Unable to summarize SSL settings")
def security_build_section(report):
logger.debug("Building 'Security' section")
steps = 5
section_progress = SectionProgress(
report.progress,
(report.section_index, report.sections_total),
steps,
report.section_name,
)
def _colored_status_line(label_text, detected, detected_text="Detected", missing_text="Not Detected"):
label = report.apply_color("white", report.apply_color("bold", f"{label_text}:"))
value = report.apply_color("green", detected_text) if detected else report.apply_color("red", missing_text)
return f"{label} {value}"
def _colored_firewall_line(firewall_items):
label = report.apply_color("white", report.apply_color("bold", "Firewall:"))
if firewall_items:
return f"{label} {report.apply_color('green', ', '.join(firewall_items))}"
return f"{label} {report.apply_color('red', 'No firewall detected')}"
try:
section_progress.tick("Checking Monarx agent")
get_monarx(report)
section_progress.tick("Checking Imunify agent")
get_imunify(report)
section_progress.tick("Detecting firewall stack")
get_firewall(report)
section_progress.tick("Reading SSH config")
ssh_settings_summary = get_ssh_settings(report)
section_progress.tick("Summarizing SSL")
ssl_summary = get_ssl(report)
monarx_line = _colored_status_line("Monarx", bool(getattr(report, "has_monarx", False)))
imunify_line = _colored_status_line("Imunify", bool(getattr(report, "has_imunify", False)))
firewall_line = _colored_firewall_line(getattr(report, "firewall_stack", None) or [])
return (
f"{report.format_heading('## Security Summary')}\n\n"
f"{report.format_subheading('### Anti-Malware')}\n"
f"{monarx_line}\n"
f"{imunify_line}\n"
f"{firewall_line}\n\n"
f"{report.format_subheading('### SSH')}\n"
f"{report.format_block_dim(ssh_settings_summary)}\n\n"
f"{report.format_subheading('### SSL')}\n"
f"{report.format_block_dim(ssl_summary)}"
)
except Exception:
logger.exception("Failed to build 'Security Insights' section")
error_text = (
"[Error] Failed to build security section. Check for Monarx, Imunify, Firewall, SSH, "
"and SSL settings manually. Please report this as a bug."
)
return report.format_error(error_text)
# --- Services
SERVICE_CATEGORIES = {
"Web Servers": {
"Apache": ["httpd", "apache2"],
"NGINX": ["nginx"],
"LiteSpeed": ["lshttpd", "litespeed", "lsws"],
"HAProxy": ["haproxy"],
"Varnish": ["varnish"],
"Hitch": ["hitch"],
"nghttpx": ["nghttpx"],
},
"Application Runtimes": {
"Tomcat": ["tomcat"],
"NodeJS": ["node", "nodejs", "pm2"],
"Gunicorn": ["gunicorn"],
"Supervisord": ["supervisord"],
"Monit": ["monit"],
"Docker": ["docker"],
"Podman": ["podman"],
},
"Databases and Search": {
"MySQL / MariaDB": ["mysql", "mysqld", "mariadb"],
"PostgreSQL": ["postgresql", "postgresql-"],
"MongoDB": ["mongod", "mongodb"],
"OpenSearch": ["opensearch"],
"Elasticsearch": ["elasticsearch"],
"Redis": ["redis"],
"Memcached": ["memcached"],
"RabbitMQ": ["rabbitmq"],
},
"Mail and DNS": {
"BIND (named)": ["named"],
"PowerDNS Authoritative": ["pdns"],
"PowerDNS Recursor": ["pdns-recursor"],
},
"Security and Firewall": {
"Fail2Ban": ["fail2ban"],
"CSF": ["csf", "lfd"],
"firewalld": ["firewalld"],
"iptables": ["iptables"],
"nftables": ["nftables"],
"ClamAV Daemon": ["clamd", "clamav-daemon"],
"ClamAV Freshclam": ["freshclam"],
"Imunify360": ["imunify360", "imunify360-agent"],
"ImunifyAV": ["imunifyav", "imunify-antivirus"],
"auditd": ["auditd"],
},
"Monitoring": {
"Node Exporter": ["node_exporter"],
"Prometheus": ["prometheus"],
"Grafana": ["grafana"],
"Netdata": ["netdata"],
},
}
def get_service_on_port(port):
try:
output = subprocess.check_output(["ss", "-ltnp"], text=True)
for line in output.splitlines():
if f":{port} " in line:
match = re.search(r'users:\(\("([^"]+)",pid=(\d+),', line)
if match:
return match.group(1)
except Exception:
logger.exception("Failed to get service on port %s", port)
return None
def _systemd_list_service_units(timeout_seconds=10):
units = set()
try:
unit_files_text = subprocess.check_output(
["systemctl", "list-unit-files", "--type=service", "--no-legend", "--no-pager"],
text=True,
timeout=timeout_seconds,
)
for line_text in unit_files_text.splitlines():
parts = line_text.split()
if parts and parts[0].endswith(".service"):
units.add(parts[0])
except Exception:
logger.debug("systemctl list-unit-files failed", exc_info=True)
try:
list_units_text = subprocess.check_output(
["systemctl", "list-units", "--type=service", "--all", "--no-legend", "--no-pager"],
text=True,
timeout=timeout_seconds,
)
for line_text in list_units_text.splitlines():
parts = line_text.split()
if parts and parts[0].endswith(".service"):
units.add(parts[0])
except Exception:
logger.debug("systemctl list-units failed", exc_info=True)
return units
def _systemd_state_for(unit_name, timeout_seconds=3):
try:
show_output = subprocess.check_output(
[
"systemctl",
"show",
unit_name,
"--property=LoadState,ActiveState,UnitFileState",
"--no-pager",
],
text=True,
timeout=timeout_seconds,
stderr=subprocess.DEVNULL,
)
except Exception:
return "unknown"
key_values = {}
for line_text in show_output.splitlines():
if "=" not in line_text:
continue
key, value = line_text.split("=", 1)
key_values[key.strip()] = value.strip()
load_state = key_values.get("LoadState", "")
active_state = key_values.get("ActiveState", "")
unit_file_state = key_values.get("UnitFileState", "")
if load_state == "not-found":
return "not-installed"
disabled_like = {
"disabled",
"masked",
"static",
"generated",
"indirect",
"bad",
"invalid",
"transient",
}
if active_state == "active":
return "active"
if active_state == "failed":
return "failed"
if active_state == "activating":
return "activating"
if active_state in ("deactivating", "inactive"):
if unit_file_state in disabled_like:
return "inactive (disabled)"
return "inactive"
return active_state or "unknown"
def _systemd_find_units_by_patterns(all_units, patterns):
matched_units = set()
for pattern in patterns:
if pattern.endswith(".service") and pattern in all_units:
matched_units.add(pattern)
continue
sanitized = pattern.replace("*", ".*")
try:
regex_pattern = re.compile(rf"^{sanitized}$")
except Exception:
continue
for unit_name in all_units:
if regex_pattern.match(unit_name):
matched_units.add(unit_name)
return sorted(matched_units)
def _systemd_probe_units(units_to_probe):
statuses = []
for unit_name in units_to_probe:
statuses.append((_systemd_state_for(unit_name), unit_name))
return statuses
def get_installed_services(report):
categories = SERVICE_CATEGORIES
all_units = _systemd_list_service_units(timeout_seconds=10)
results = {}
for category, services in categories.items():
logger.debug("Checking category: %s", category)
category_results = {}
for friendly_name, patterns in services.items():
matched_units = _systemd_find_units_by_patterns(all_units, patterns)
statuses = _systemd_probe_units(matched_units or patterns)
if any(status[0] == "active" for status in statuses):
state = "active"
elif any(status[0] == "failed" for status in statuses):
state = "failed"
elif any(status[0] == "activating" for status in statuses):
state = "activating"
elif any(status[0] == "inactive" for status in statuses):
state = "inactive"
elif any(status[0] == "inactive (disabled)" for status in statuses):
state = "inactive (disabled)"
elif any(status[0] == "not-installed" for status in statuses):
state = "not-installed"
else:
state = "unknown"
category_results[friendly_name] = {
"state": state,
"units": matched_units or patterns,
}
results[category] = category_results
report.installed_services = results
return report.installed_services
def get_service_stats(report):
failed_services_map = {}
try:
systemctl_output = subprocess.check_output(
["systemctl", "--failed", "--no-legend", "--no-pager"],
text=True,
timeout=5,
stderr=subprocess.DEVNULL,
)
failed_service_units = []
for line in systemctl_output.splitlines():
columns = line.split()
if columns:
unit_name = columns[0]
if unit_name.endswith(".service"):
failed_service_units.append(unit_name)
friendly_to_patterns = {}
for services in SERVICE_CATEGORIES.values():
for friendly_name, patterns in services.items():
friendly_to_patterns.setdefault(friendly_name, set()).update(patterns)
for friendly_name, patterns in friendly_to_patterns.items():
matched_units = []
for unit_name in failed_service_units:
base_unit_name = unit_name[:-8] if unit_name.endswith(".service") else unit_name
for pattern in patterns:
if pattern in base_unit_name:
matched_units.append(unit_name)
break
if matched_units:
failed_services_map[friendly_name] = ", ".join(sorted(set(matched_units)))
except Exception as exc:
logger.debug("Failed to query failed services: %s", exc)
failed_services_map = {}
report.failed_services = failed_services_map if failed_services_map else []
if not failed_services_map:
return None
return "\n".join(
f"- {friendly} ({units})"
for friendly, units in failed_services_map.items())
def _svc_run_text(command_args, timeout_seconds=10):
return subprocess.check_output(
command_args,
text=True,
timeout=timeout_seconds,
stderr=subprocess.DEVNULL,
)
def _svc_get_journald_storage_mode():
try:
journald_conf = _svc_run_text(
["systemd-analyze", "cat-config", "systemd/journald.conf"],
timeout_seconds=6,
)
for line in journald_conf.splitlines():
stripped = line.strip()
if not stripped or stripped.startswith(("#", ";")):
continue
if stripped.startswith("Storage="):
return stripped.split("=", 1)[1].strip()
except Exception:
pass
if os.path.isdir("/var/log/journal"):
return "persistent (inferred)"
return "volatile (inferred)"
def _svc_get_boot_count(journalctl_path):
try:
boots_text = _svc_run_text([journalctl_path, "--list-boots", "--no-pager"], timeout_seconds=6).strip()
boots = [line for line in boots_text.splitlines() if line.strip()]
return len(boots)
except Exception:
return 0
def _svc_get_any_failed_unit_name():
try:
failed_output = _svc_run_text(
["systemctl", "--failed", "--no-legend", "--no-pager"],
timeout_seconds=5,
)
for line in failed_output.splitlines():
columns = line.split()
if columns and columns[0].endswith(".service"):
return columns[0]
except Exception:
pass
return None
def _svc_unit_status_has_rotation_warning(unit_name):
if not unit_name:
return False
try:
status_text = _svc_run_text(
["systemctl", "status", unit_name, "-l", "--no-pager"],
timeout_seconds=6,
)
lower_status = status_text.lower()
return ("journal has been rotated" in lower_status) or ("log output is incomplete" in lower_status)
except Exception:
return False
def _svc_is_systemd_failure_line(lower_line):
if "systemd[" not in lower_line and "systemd:" not in lower_line:
return False
failure_markers = (
"failed to start",
"main process exited",
"entered failed state",
"failed with result",
"start request repeated too quickly",
"service hold-off time over",
"unit entered failed state",
"start operation timed out",
)
return any(marker in lower_line for marker in failure_markers)
def _svc_build_no_history_message(journald_storage, boots_available, rotation_warning, reason_line):
reason_lower = (reason_line or "").strip().lower()
if "no journal entries returned" in reason_lower:
base = "No journal entries returned for the last 24 hours."
else:
base = "No historical service failures found in the last 24 hours."
notes = []
storage_text = (journald_storage or "").strip().lower()
if storage_text.startswith("volatile"):
notes.append("journald storage is volatile")
try:
boots_int = int(boots_available)
except Exception:
boots_int = None
if boots_int is not None and boots_int <= 1:
notes.append("limited boot history")
if rotation_warning:
notes.append("journal rotation detected")
if notes:
return f"{base} (Note: {', '.join(notes)})"
return base
def _svc_try_journal_query(journalctl_path):
try:
return _svc_run_text(
[journalctl_path, "--since", "24 hours ago", "-p", "0..4", "-n", "800", "--no-pager"],
timeout_seconds=12,
)
except Exception as exc:
logger.debug("journalctl query failed: %s", exc)
return ""
def _svc_collect_friendly_patterns():
friendly_patterns = {}
for services in SERVICE_CATEGORIES.values():
for friendly_name, patterns in services.items():
friendly_patterns.setdefault(friendly_name, set()).update(patterns)
return friendly_patterns
def _svc_parse_service_failures(logs_text):
friendly_patterns = _svc_collect_friendly_patterns()
patterns_lower = {
friendly_name: [pattern.lower() for pattern in patterns]
for friendly_name, patterns in friendly_patterns.items()
}
service_history = {friendly_name: {"count": 0, "lines": []} for friendly_name in friendly_patterns}
for line in (logs_text or "").splitlines():
lower_line = line.lower()
if not _svc_is_systemd_failure_line(lower_line):
continue
for friendly_name, patterns in patterns_lower.items():
if any(pattern in lower_line for pattern in patterns):
bucket = service_history[friendly_name]
bucket["count"] += 1
if len(bucket["lines"]) < 3:
bucket["lines"].append(line.strip())
break
return {friendly_name: data for friendly_name, data in service_history.items() if data["count"] > 0}
def get_service_failure_history(report):
try:
journalctl_path = shutil.which("journalctl")
if not journalctl_path:
report.service_failure_history = {}
return "Historical failure check unavailable (journalctl not found)."
journald_storage = _svc_get_journald_storage_mode()
boots_available = _svc_get_boot_count(journalctl_path)
rotation_warning = _svc_unit_status_has_rotation_warning(_svc_get_any_failed_unit_name())
logs_text = _svc_try_journal_query(journalctl_path)
if not logs_text:
report.service_failure_history = {}
return _svc_build_no_history_message(
journald_storage,
boots_available,
rotation_warning,
"no journal entries returned for that query",
)
cleaned_history = _svc_parse_service_failures(logs_text)
report.service_failure_history = cleaned_history
if not cleaned_history:
return _svc_build_no_history_message(
journald_storage,
boots_available,
rotation_warning,
"no systemd failure patterns matched",
)
return report.svc_format_service_failure_output(cleaned_history)
except Exception as exc:
logger.debug("Failed to collect historical service failures: %s", exc)
report.service_failure_history = {}
return None
def services_build_section(report):
logger.debug("Building 'Services' section")
steps = 4
section_progress = SectionProgress(
report.progress,
(report.section_index, report.sections_total),
steps,
report.section_name,
)
try:
section_progress.tick("Enumerating installed services")
installed_services = get_installed_services(report)
installed_summary_text = report.format_installed_services(installed_services)
section_progress.tick("Checking current failed services")
current_failed_summary = get_service_stats(report)
if not current_failed_summary:
current_failed_summary = "None detected"
current_failed_style = "red" if current_failed_summary != "None detected" else "dim"
current_failed_block = report.apply_color(current_failed_style, current_failed_summary)
section_progress.tick("Checking historical service failures")
history_summary = get_service_failure_history(report)
if not history_summary:
history_summary = "None detected"
section_progress.tick("Finalizing services section")
return (
f"{report.format_heading('## Software Services Summary')}\n"
"Below is a categorized summary of key services detected on the system and their current status.\n"
"If a service is missing from the list, it means it was not detected on the server.\n\n"
f"{installed_summary_text}\n"
f"{report.format_subheading('Current Failed Services (systemctl --failed):')}\n"
f"{current_failed_block}\n\n"
f"{report.format_subheading('Historical Service Failures (recent logs):')}\n"
f"{report.format_block_dim(history_summary)}\n"
)
except Exception:
logger.exception("Failed to build 'Services and Versions' section")
error_text = (
"[Error] Failed to build services section. Manually check for installed services "
"and whether they are active, failed, or enabled but not running. Please report this as a bug."
)
return report.format_error(error_text)
# --- Main
def main():
def collect_section_specs():
return (
("system", "System Section", system_build_section),
("site", "Account and Website Section", account_build_section),
("traffic", "Traffic Section", traffic_build_section),
("php", "PHP Section", php_build_section),
("db", "Database Section", database_build_section),
("mail", "Mail Section", mail_build_section),
("security", "Security Section", security_build_section),
("services", "Software Services Section", services_build_section),
)
def resolve_chosen_sections(parsed_args, section_specs):
section_keys = [spec[0] for spec in section_specs]
include_keys = {key for key in section_keys if getattr(parsed_args, key, False)}
exclude_keys = {key for key in section_keys if getattr(parsed_args, f"no_{key}", False)}
run_full_mode = parsed_args.full or not include_keys
base_keys = set(section_keys) if run_full_mode else include_keys
chosen_keys = [key for key in section_keys if key in base_keys and key not in exclude_keys]
chosen_sections = tuple(spec for spec in section_specs if spec[0] in chosen_keys)
return run_full_mode, chosen_keys, chosen_sections
def needs_domain_context(chosen_keys):
domain_context_keys = ("site", "traffic", "php", "mail", "security")
return any(section_key in chosen_keys for section_key in domain_context_keys)
def prepare_domain_context(report, chosen_keys):
if not needs_domain_context(chosen_keys):
return
get_domains(report)
get_managed_domain(report)
needs_user_context = any(key in chosen_keys for key in ("php", "mail", "security"))
if needs_user_context and not (getattr(report, "domain_owner", None) or "").strip():
get_user_stats(report)
needs_ip_context = any(key in chosen_keys for key in ("mail", "traffic", "security"))
if needs_ip_context and not (getattr(report, "main_ip", None) or "").strip():
get_ip_info(report)
def queue_deadweight_scan_if_needed(report, chosen_keys):
if "system" not in chosen_keys or getattr(report, "deadweight_future", None) is not None:
return
try:
report.deadweight_future = EXECUTOR.submit(get_deadweight)
logger.info("[deadweight] background scan queued")
except Exception:
logger.exception("Failed to queue deadweight background scan")
report.deadweight_future = None
def run_sections(report, chosen_sections):
report.sections_total = len(chosen_sections)
report.progress = ProgressBar(
report.sections_total * 100,
prefix="Building:",
suffix="",
decimals=0,
)
for index, (section_key, section_label, section_func) in enumerate(chosen_sections, start=1):
report.section_index = index
report.section_name = section_label
logger.info("Building section: %s", section_label)
try:
content = report.run_section(
f"{section_key}_build_section",
lambda section_func=section_func: section_func(report),
)
report.add_section(content)
except (RuntimeError, OSError) as err:
logger.exception("Fatal error occurred in SPARTA main execution: %s", err)
report.progress.finish()
def strip_ansi(text: str) -> str:
if not text:
return ""
ansi_escape = re.compile(r"\x1B\[[0-?]*[ -/]*[@-~]")
return ansi_escape.sub("", text)
def write_report_to_file(report_text: str) -> str:
timestamp = dt.datetime.now().strftime("%Y_%m_%d_%H%M%S")
report_path = f"/root/server_audit_report.{timestamp}.md"
temp_path = report_path + ".tmp"
file_text = strip_ansi(report_text)
os.makedirs("/root", exist_ok=True)
with open(temp_path, "w", encoding="utf-8") as handle:
handle.write(file_text.rstrip() + "\n")
os.replace(temp_path, report_path)
return report_path
setup_logging()
logger.info("=== SPARTA Execution Started ===")
args = build_arg_parser().parse_args()
section_specs = collect_section_specs()
run_full_mode, chosen_keys, chosen_sections = resolve_chosen_sections(args, section_specs)
logger.debug("Run mode: full=%s sections=%s", run_full_mode, ",".join(chosen_keys))
try:
report = SpartaReport()
get_server_id(report)
get_panel_type(report)
prepare_domain_context(report, chosen_keys)
if "db" in chosen_keys:
get_mysqltuner_opt_in(report)
if "system" in chosen_keys:
logger.warning(
"Servers with larger than normal disk usage may delay report generation. Please be patient."
"Use --no-system to skip disk-space usage checks."
)
queue_deadweight_scan_if_needed(report, chosen_keys)
run_sections(report, chosen_sections)
report_text = report.generate_report()
report_file_path = write_report_to_file(report_text)
sys.stdout.write(report_text + "\n")
sys.stdout.write(f"\nReport saved to: {report_file_path}\n")
sys.stdout.flush()
EXECUTOR.shutdown(wait=False)
logger.info("=== SPARTA Report Generation Complete ===")
except Exception:
logger.exception("Fatal error occurred in SPARTA main execution.")
if __name__ == "__main__":
main()
Sindbad File Manager Version 1.0, Coded By Sindbad EG ~ The Terrorists