Initial commit
This commit is contained in:
@@ -0,0 +1,3 @@
|
||||
"""Package principal de PVE Backup Report."""
|
||||
|
||||
__version__ = "1.0.0"
|
||||
@@ -0,0 +1,5 @@
|
||||
from pve_backup_report.cli import main
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
@@ -0,0 +1,612 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import logging
|
||||
import tempfile
|
||||
from collections.abc import Sequence
|
||||
from pathlib import Path
|
||||
|
||||
from pve_backup_report import __version__
|
||||
from pve_backup_report.collectors import collect_report_data
|
||||
from pve_backup_report.collectors import collect_backup_task_candidates, recent_unique_vzdump_tasks, task_log_line
|
||||
from pve_backup_report.collectors import (
|
||||
collect_pbs_access_users,
|
||||
collect_pbs_storages,
|
||||
normalize_pbs_datastore_usage,
|
||||
)
|
||||
from pve_backup_report.config import ConfigError, load_config
|
||||
from pve_backup_report.coverage import (
|
||||
STATUS_DISABLED_PBS,
|
||||
STATUS_INDETERMINATE,
|
||||
STATUS_MISSING,
|
||||
STATUS_NON_PBS_PLANNED,
|
||||
STATUS_PBS_PLANNED,
|
||||
analyze_backup_coverage,
|
||||
)
|
||||
from pve_backup_report.logging_config import configure_logging
|
||||
from pve_backup_report.pbs_client import PbsApiError, PbsClient
|
||||
from pve_backup_report.pve_client import PveApiError, PveClient
|
||||
from pve_backup_report.report_data import prepare_report_data, report_data_to_dict
|
||||
from pve_backup_report.report_pdf import format_size
|
||||
from pve_backup_report.report_weasy_pdf import render_pdf
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def build_parser() -> argparse.ArgumentParser:
|
||||
parser = argparse.ArgumentParser(
|
||||
prog="pve-backup-report",
|
||||
description="Prepare le rapport de sauvegarde Proxmox VE.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--check-config",
|
||||
action="store_true",
|
||||
help="valide la configuration puis quitte",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--check-api",
|
||||
action="store_true",
|
||||
help="teste les endpoints PVE /nodes, /storage et /cluster/backup puis quitte",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--dump-inventory",
|
||||
action="store_true",
|
||||
help="affiche les stockages PBS et jobs de sauvegarde normalises puis quitte",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--dump-coverage",
|
||||
action="store_true",
|
||||
help=(
|
||||
"affiche la couverture VM/CT basee sur vmid, all=1, pools "
|
||||
"et le type de storage puis quitte"
|
||||
),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--dump-report-data",
|
||||
action="store_true",
|
||||
help="affiche en JSON les donnees structurees destinees au futur PDF puis quitte",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--dump-pbs-storage-usages",
|
||||
action="store_true",
|
||||
help="interroge directement les PBS et affiche l'espace total, consomme et libre des datastores",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--dump-pbs-users",
|
||||
action="store_true",
|
||||
help="affiche les utilisateurs PBS configures sur les stockages PVE et leurs permissions effectives",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--generate-pdf",
|
||||
action="store_true",
|
||||
help="genere un rapport PDF horodate puis quitte",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--debug-last-backup-vmid",
|
||||
type=int,
|
||||
metavar="VMID",
|
||||
help="affiche les taches/logs vzdump recentes mentionnant un VMID puis quitte",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--version",
|
||||
action="version",
|
||||
version=f"%(prog)s {__version__}",
|
||||
)
|
||||
return parser
|
||||
|
||||
|
||||
def run(argv: Sequence[str] | None = None) -> int:
|
||||
parser = build_parser()
|
||||
args = parser.parse_args(argv)
|
||||
|
||||
try:
|
||||
config = load_config()
|
||||
except ConfigError as exc:
|
||||
configure_logging()
|
||||
logger.error("Configuration invalide: %s", exc)
|
||||
return 1
|
||||
|
||||
configure_logging(config.log_level)
|
||||
logger.info("Demarrage de PVE Backup Report %s", __version__)
|
||||
logger.info("Cible PVE configuree: %s", config.pve_api_url)
|
||||
for pbs_server in config.configured_pbs_servers:
|
||||
logger.info("Cible %s configuree: %s", pbs_server.name, pbs_server.api_url)
|
||||
if config.pve_verify_tls is False:
|
||||
logger.warning("Verification TLS desactivee par PVE_VERIFY_TLS=false")
|
||||
if config.pve_ca_bundle is not None:
|
||||
logger.warning("PVE_CA_BUNDLE est renseigne mais ignore car TLS est desactive")
|
||||
|
||||
if args.check_config:
|
||||
logger.info("Configuration valide")
|
||||
return 0
|
||||
|
||||
if args.check_api:
|
||||
client = PveClient(config)
|
||||
try:
|
||||
results = client.check_required_endpoints()
|
||||
except PveApiError as exc:
|
||||
logger.error("Verification API PVE interrompue: %s", exc)
|
||||
return 3
|
||||
finally:
|
||||
client.close()
|
||||
|
||||
has_error = False
|
||||
for result in results:
|
||||
if result.ok and result.count is None:
|
||||
logger.info("%s: OK", result.endpoint)
|
||||
elif result.ok:
|
||||
if result.detail:
|
||||
logger.info(
|
||||
"%s: OK, %s element(s), %s",
|
||||
result.endpoint,
|
||||
result.count,
|
||||
result.detail,
|
||||
)
|
||||
else:
|
||||
logger.info("%s: OK, %s element(s)", result.endpoint, result.count)
|
||||
else:
|
||||
has_error = True
|
||||
logger.error("%s: ECHEC - %s", result.endpoint, result.error)
|
||||
if result.error and "Sys.Audit" in result.error:
|
||||
logger.error(
|
||||
"Action requise: verifier les droits effectifs du token "
|
||||
"PVE, notamment la separation de privileges et Sys.Audit sur /"
|
||||
)
|
||||
return 3 if has_error else 0
|
||||
|
||||
if args.dump_inventory:
|
||||
report_data = collect_data_or_log_error(config, "inventaire")
|
||||
if report_data is None:
|
||||
return 3
|
||||
|
||||
log_inventory(report_data)
|
||||
return 0 if not any(issue.severity == "error" for issue in report_data.issues) else 5
|
||||
|
||||
if args.dump_coverage:
|
||||
report_data = collect_data_or_log_error(config, "couverture")
|
||||
if report_data is None:
|
||||
return 3
|
||||
|
||||
report_data = analyze_backup_coverage(report_data)
|
||||
log_coverage(report_data)
|
||||
return 0 if not any(issue.severity == "error" for issue in report_data.issues) else 5
|
||||
|
||||
if args.dump_report_data:
|
||||
report_data = collect_data_or_log_error(config, "donnees rapport")
|
||||
if report_data is None:
|
||||
return 3
|
||||
|
||||
report_data = prepare_report_data(report_data, config)
|
||||
print(json.dumps(report_data_to_dict(report_data), indent=2, ensure_ascii=False))
|
||||
return 0 if not any(issue.severity == "error" for issue in report_data.issues) else 5
|
||||
|
||||
if args.dump_pbs_storage_usages:
|
||||
return dump_pbs_storage_usages(config)
|
||||
|
||||
if args.dump_pbs_users:
|
||||
return dump_pbs_users(config)
|
||||
|
||||
if args.generate_pdf:
|
||||
try:
|
||||
report_output_dir = ensure_report_output_dir_writable(config.report_output_dir)
|
||||
except OSError as exc:
|
||||
logger.error("Generation PDF echouee: %s", exc)
|
||||
return 4
|
||||
|
||||
report_data = collect_data_or_log_error(config, "generation PDF")
|
||||
if report_data is None:
|
||||
return 3
|
||||
|
||||
report_data = prepare_report_data(report_data, config)
|
||||
try:
|
||||
pdf_path = render_pdf(
|
||||
report_data,
|
||||
report_output_dir,
|
||||
config.report_filename_prefix,
|
||||
config.pbs_hostnames,
|
||||
)
|
||||
except (OSError, RuntimeError) as exc:
|
||||
logger.error("Generation PDF echouee: %s", exc)
|
||||
return 4
|
||||
|
||||
logger.info("Rapport PDF genere: %s", pdf_path)
|
||||
return 0 if not any(issue.severity == "error" for issue in report_data.issues) else 5
|
||||
|
||||
if args.debug_last_backup_vmid is not None:
|
||||
return debug_last_backup_vmid(config, args.debug_last_backup_vmid)
|
||||
|
||||
logger.info("Squelette initialise: collecte PVE et generation PDF non implementees")
|
||||
return 0
|
||||
|
||||
|
||||
def collect_data_or_log_error(config, label: str):
|
||||
client = PveClient(config)
|
||||
pbs_clients = configured_pbs_clients(config)
|
||||
try:
|
||||
return collect_report_data(client, pbs_clients)
|
||||
except PveApiError as exc:
|
||||
logger.error("Collecte %s echouee: %s", label, exc)
|
||||
return None
|
||||
finally:
|
||||
client.close()
|
||||
for pbs_client in pbs_clients:
|
||||
pbs_client.close()
|
||||
|
||||
|
||||
def configured_pbs_clients(config) -> list[PbsClient]:
|
||||
return [
|
||||
PbsClient(config, server=pbs_server)
|
||||
for pbs_server in config.configured_pbs_servers
|
||||
]
|
||||
|
||||
|
||||
def ensure_report_output_dir_writable(path: Path) -> Path:
|
||||
try:
|
||||
ensure_writable_directory(path)
|
||||
except OSError as exc:
|
||||
if path != Path("/reports"):
|
||||
raise
|
||||
|
||||
fallback_path = Path("reports")
|
||||
try:
|
||||
ensure_writable_directory(fallback_path)
|
||||
except OSError:
|
||||
raise OSError(report_output_dir_error_message(path, exc)) from exc
|
||||
|
||||
logger.warning(
|
||||
"REPORT_OUTPUT_DIR=/reports inaccessible hors Docker; "
|
||||
"utilisation du repertoire local %s",
|
||||
fallback_path,
|
||||
)
|
||||
return fallback_path
|
||||
|
||||
return path
|
||||
|
||||
|
||||
def ensure_writable_directory(path: Path) -> None:
|
||||
if path.exists() and not path.is_dir():
|
||||
raise OSError(f"{path}: REPORT_OUTPUT_DIR doit pointer vers un repertoire")
|
||||
|
||||
try:
|
||||
path.mkdir(parents=True, exist_ok=True)
|
||||
except OSError as exc:
|
||||
raise OSError(report_output_dir_error_message(path, exc)) from exc
|
||||
|
||||
try:
|
||||
with tempfile.NamedTemporaryFile(
|
||||
dir=path,
|
||||
prefix=".pve-backup-report-write-test-",
|
||||
delete=True,
|
||||
):
|
||||
pass
|
||||
except OSError as exc:
|
||||
raise OSError(report_output_dir_error_message(path, exc)) from exc
|
||||
|
||||
|
||||
def report_output_dir_error_message(path: Path, exc: OSError) -> str:
|
||||
details = f"{path}: impossible d'ecrire dans REPORT_OUTPUT_DIR: {exc}"
|
||||
if path == Path("/reports"):
|
||||
return (
|
||||
f"{details}. La valeur /reports est prevue pour le conteneur Docker "
|
||||
"avec le volume ./reports:/reports. En execution locale, le fallback "
|
||||
"automatique vers reports/ a aussi echoue; verifier les droits du "
|
||||
"repertoire courant ou configurer REPORT_OUTPUT_DIR avec un chemin "
|
||||
"accessible."
|
||||
)
|
||||
return details
|
||||
|
||||
|
||||
def dump_pbs_storage_usages(config) -> int:
|
||||
pbs_clients = configured_pbs_clients(config)
|
||||
if not pbs_clients:
|
||||
logger.error("Aucun PBS configure")
|
||||
return 1
|
||||
|
||||
has_error = False
|
||||
try:
|
||||
for pbs_client in pbs_clients:
|
||||
logger.info("Interrogation PBS: %s (%s)", pbs_client.server_name, pbs_client.api_url)
|
||||
try:
|
||||
raw_datastores = pbs_client.get_datastores()
|
||||
except PbsApiError as exc:
|
||||
has_error = True
|
||||
logger.error("%s /config/datastore: ECHEC - %s", pbs_client.server_name, exc)
|
||||
continue
|
||||
|
||||
if not isinstance(raw_datastores, list):
|
||||
has_error = True
|
||||
logger.error("%s /config/datastore: reponse inattendue", pbs_client.server_name)
|
||||
continue
|
||||
|
||||
logger.info("%s /config/datastore: OK, %s datastore(s)", pbs_client.server_name, len(raw_datastores))
|
||||
if not raw_datastores:
|
||||
logger.warning("%s: aucun datastore retourne par /config/datastore", pbs_client.server_name)
|
||||
continue
|
||||
|
||||
for raw_datastore in raw_datastores:
|
||||
if not isinstance(raw_datastore, dict):
|
||||
logger.warning("%s: datastore ignore, format invalide", pbs_client.server_name)
|
||||
continue
|
||||
datastore = datastore_name_from_raw(raw_datastore)
|
||||
if datastore is None:
|
||||
logger.warning("%s: datastore ignore, nom absent", pbs_client.server_name)
|
||||
continue
|
||||
try:
|
||||
raw_status = pbs_client.get_datastore_status(datastore)
|
||||
except PbsApiError as exc:
|
||||
has_error = True
|
||||
logger.error(
|
||||
"%s /admin/datastore/%s/status: ECHEC - %s",
|
||||
pbs_client.server_name,
|
||||
datastore,
|
||||
exc,
|
||||
)
|
||||
continue
|
||||
if not isinstance(raw_status, dict):
|
||||
has_error = True
|
||||
logger.error(
|
||||
"%s /admin/datastore/%s/status: reponse inattendue",
|
||||
pbs_client.server_name,
|
||||
datastore,
|
||||
)
|
||||
continue
|
||||
usage = normalize_pbs_datastore_usage(pbs_client.server_name, datastore, raw_status)
|
||||
logger.info(
|
||||
"%s datastore=%s total=%s consomme=%s libre=%s",
|
||||
usage.server_name,
|
||||
usage.datastore,
|
||||
display_value(format_size(usage.total_bytes)),
|
||||
display_value(format_size(usage.used_bytes)),
|
||||
display_value(format_size(usage.available_bytes)),
|
||||
)
|
||||
finally:
|
||||
for pbs_client in pbs_clients:
|
||||
pbs_client.close()
|
||||
|
||||
return 5 if has_error else 0
|
||||
|
||||
|
||||
def dump_pbs_users(config) -> int:
|
||||
client = PveClient(config)
|
||||
pbs_clients = configured_pbs_clients(config)
|
||||
if not pbs_clients:
|
||||
logger.error("Aucun PBS configure")
|
||||
client.close()
|
||||
return 1
|
||||
|
||||
issues = []
|
||||
try:
|
||||
try:
|
||||
pbs_storages = collect_pbs_storages(client, issues)
|
||||
except PveApiError as exc:
|
||||
logger.error("Collecte des stockages PBS PVE echouee: %s", exc)
|
||||
return 3
|
||||
|
||||
users = collect_pbs_access_users(pbs_clients, pbs_storages, issues)
|
||||
logger.info("Utilisateurs PBS utilises par PVE: %s", len(users))
|
||||
for user in users:
|
||||
logger.info(
|
||||
"- %s: auth-id=%s, storage=%s, datastore=%s, namespace=%s, enabled=%s, expire=%s, email=%s, permissions=%s, commentaire=%s",
|
||||
user.server_name,
|
||||
user.auth_id,
|
||||
user.storage_id,
|
||||
display_value(user.datastore),
|
||||
display_value(user.namespace),
|
||||
display_bool(user.enabled),
|
||||
display_value(user.expire),
|
||||
display_value(user.email),
|
||||
display_permissions(user.permissions),
|
||||
display_value(user.comment),
|
||||
)
|
||||
|
||||
for issue in issues:
|
||||
logger.warning(
|
||||
"- %s/%s: %s%s",
|
||||
issue.severity,
|
||||
issue.component,
|
||||
issue.message,
|
||||
f" ({issue.details})" if issue.details else "",
|
||||
)
|
||||
return 0 if not any(issue.severity == "error" for issue in issues) else 5
|
||||
finally:
|
||||
client.close()
|
||||
for pbs_client in pbs_clients:
|
||||
pbs_client.close()
|
||||
|
||||
|
||||
def datastore_name_from_raw(raw_datastore: dict[str, object]) -> str | None:
|
||||
for key in ("name", "store", "datastore"):
|
||||
value = raw_datastore.get(key)
|
||||
if value is not None and str(value).strip():
|
||||
return str(value).strip()
|
||||
return None
|
||||
|
||||
|
||||
def debug_last_backup_vmid(config, vmid: int) -> int:
|
||||
client = PveClient(config)
|
||||
try:
|
||||
try:
|
||||
guests = collect_report_data(client).guests
|
||||
except PveApiError as exc:
|
||||
logger.error("Collecte VM/CT echouee: %s", exc)
|
||||
return 3
|
||||
|
||||
issues = []
|
||||
tasks = recent_unique_vzdump_tasks(
|
||||
collect_backup_task_candidates(client, guests, issues)
|
||||
)
|
||||
logger.info("Taches vzdump inspectees: %s", len(tasks))
|
||||
found = False
|
||||
for task in tasks[: config.pve_task_history_limit]:
|
||||
node = task.get("node")
|
||||
upid = task.get("upid")
|
||||
if not node or not upid:
|
||||
continue
|
||||
try:
|
||||
raw_log = client.get_task_log(str(node), str(upid))
|
||||
except PveApiError as exc:
|
||||
logger.warning("Log indisponible pour %s: %s", upid, exc)
|
||||
continue
|
||||
lines = []
|
||||
if isinstance(raw_log, list):
|
||||
lines = [line for line in (task_log_line(entry) for entry in raw_log) if line]
|
||||
matching_lines = [line for line in lines if str(vmid) in line]
|
||||
if not matching_lines:
|
||||
continue
|
||||
found = True
|
||||
logger.info(
|
||||
"Tache trouvee node=%s upid=%s status=%s start=%s end=%s",
|
||||
node,
|
||||
upid,
|
||||
task.get("status"),
|
||||
task.get("starttime"),
|
||||
task.get("endtime"),
|
||||
)
|
||||
for line in matching_lines[:20]:
|
||||
logger.info("LOG %s", line)
|
||||
if not found:
|
||||
logger.warning("Aucune ligne de log recente ne mentionne le VMID %s", vmid)
|
||||
if issues:
|
||||
for issue in issues:
|
||||
logger.warning("%s/%s: %s", issue.severity, issue.component, issue.message)
|
||||
return 0
|
||||
finally:
|
||||
client.close()
|
||||
|
||||
|
||||
def log_inventory(report_data) -> None:
|
||||
logger.info("Storages PBS: %s", len(report_data.pbs_storages))
|
||||
for storage in report_data.pbs_storages:
|
||||
logger.info(
|
||||
"- %s: user=%s, server=%s, datastore=%s, namespace=%s, enabled=%s",
|
||||
storage.storage_id,
|
||||
display_value(storage.username),
|
||||
display_value(storage.server),
|
||||
display_value(storage.datastore),
|
||||
display_value(storage.namespace),
|
||||
display_bool(storage.enabled),
|
||||
)
|
||||
|
||||
logger.info("Jobs backup: %s", len(report_data.backup_jobs))
|
||||
for job in report_data.backup_jobs:
|
||||
logger.info(
|
||||
"- %s: storage=%s, schedule=%s, enabled=%s, mode=%s, selection=%s, exclude=%s",
|
||||
job.job_id,
|
||||
display_value(job.storage),
|
||||
display_value(job.schedule),
|
||||
display_bool(job.enabled),
|
||||
display_value(job.mode),
|
||||
display_value(job.selection),
|
||||
display_value(job.excluded),
|
||||
)
|
||||
|
||||
logger.info("Pools: %s", len(report_data.pools))
|
||||
for pool in report_data.pools:
|
||||
vmids = ", ".join(str(vmid) for vmid in sorted(pool.vmids))
|
||||
logger.info(
|
||||
"- %s: vmids=%s",
|
||||
pool.pool_id,
|
||||
vmids or "aucun membre VM/CT",
|
||||
)
|
||||
|
||||
if report_data.issues:
|
||||
logger.info("Anomalies collecte: %s", len(report_data.issues))
|
||||
for issue in report_data.issues:
|
||||
logger.warning(
|
||||
"- %s/%s: %s%s",
|
||||
issue.severity,
|
||||
issue.component,
|
||||
issue.message,
|
||||
f" ({issue.details})" if issue.details else "",
|
||||
)
|
||||
|
||||
|
||||
def log_coverage(report_data) -> None:
|
||||
pbs_planned = [item for item in report_data.coverage if item.status == STATUS_PBS_PLANNED]
|
||||
non_pbs_planned = [
|
||||
item for item in report_data.coverage if item.status == STATUS_NON_PBS_PLANNED
|
||||
]
|
||||
disabled_pbs = [item for item in report_data.coverage if item.status == STATUS_DISABLED_PBS]
|
||||
missing = [item for item in report_data.coverage if item.status == STATUS_MISSING]
|
||||
indeterminate = [
|
||||
item for item in report_data.coverage if item.status == STATUS_INDETERMINATE
|
||||
]
|
||||
|
||||
logger.info("VM/CT inventories: %s", len(report_data.guests))
|
||||
logger.info("Sauvegardes PBS planifiees: %s", len(pbs_planned))
|
||||
logger.info("Sauvegardes non PBS planifiees: %s", len(non_pbs_planned))
|
||||
logger.info("Sauvegardes vers PBS desactive: %s", len(disabled_pbs))
|
||||
logger.info("Non sauvegardees: %s", len(missing))
|
||||
logger.info("Indeterminees: %s", len(indeterminate))
|
||||
|
||||
if disabled_pbs:
|
||||
logger.info("VM/CT avec sauvegarde vers PBS desactive:")
|
||||
for item in disabled_pbs:
|
||||
log_coverage_item(item)
|
||||
|
||||
if non_pbs_planned:
|
||||
logger.info("VM/CT sauvegardees vers storage non PBS:")
|
||||
for item in non_pbs_planned:
|
||||
log_coverage_item(item)
|
||||
|
||||
if indeterminate:
|
||||
logger.info("VM/CT indeterminees:")
|
||||
for item in indeterminate:
|
||||
log_coverage_item(item)
|
||||
|
||||
if missing:
|
||||
logger.info("VM/CT non sauvegardees:")
|
||||
for item in missing:
|
||||
log_coverage_item(item)
|
||||
|
||||
if pbs_planned:
|
||||
logger.info("VM/CT sauvegardees vers PBS actif:")
|
||||
for item in pbs_planned:
|
||||
log_coverage_item(item)
|
||||
|
||||
|
||||
def log_coverage_item(item) -> None:
|
||||
guest = item.guest
|
||||
jobs = ", ".join(job.job_id for job in item.jobs) if item.jobs else "non renseigne"
|
||||
storages = ", ".join(item.storages) if item.storages else "non renseigne"
|
||||
details = [
|
||||
f"type={guest.guest_type}",
|
||||
f"noeud={display_value(guest.node)}",
|
||||
f"etat={display_value(guest.status)}",
|
||||
f"couverture={item.status}",
|
||||
f"stockage={storages}",
|
||||
f"jobs={jobs}",
|
||||
]
|
||||
if item.reason:
|
||||
details.append(f"detail={item.reason}")
|
||||
|
||||
logger.info(
|
||||
"- %s %s %s",
|
||||
guest.vmid,
|
||||
guest.name,
|
||||
" ".join(details),
|
||||
)
|
||||
|
||||
|
||||
def display_value(value: object | None) -> str:
|
||||
if value is None or value == "":
|
||||
return "non renseigne"
|
||||
return str(value)
|
||||
|
||||
|
||||
def display_bool(value: bool | None) -> str:
|
||||
if value is None:
|
||||
return "non renseigne"
|
||||
return "oui" if value else "non"
|
||||
|
||||
|
||||
def display_permissions(permissions: dict[str, bool]) -> str:
|
||||
enabled_permissions = sorted(
|
||||
permission for permission, enabled in permissions.items() if enabled
|
||||
)
|
||||
return ", ".join(enabled_permissions) if enabled_permissions else "non renseigne"
|
||||
|
||||
|
||||
def main(argv: Sequence[str] | None = None) -> int:
|
||||
return run(argv)
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,265 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import re
|
||||
from collections.abc import Mapping
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
class ConfigError(ValueError):
|
||||
"""Erreur de configuration runtime."""
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class PbsServerConfig:
|
||||
prefix: str
|
||||
name: str
|
||||
api_url: str | None
|
||||
api_token_id: str | None
|
||||
api_token_secret: str | None
|
||||
verify_tls: bool
|
||||
ca_bundle: Path | None
|
||||
timeout_seconds: int
|
||||
|
||||
@property
|
||||
def configured(self) -> bool:
|
||||
return (
|
||||
self.api_url is not None
|
||||
and self.api_token_id is not None
|
||||
and self.api_token_secret is not None
|
||||
)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class AppConfig:
|
||||
pve_api_url: str
|
||||
pve_api_token_id: str
|
||||
pve_api_token_secret: str
|
||||
report_output_dir: Path
|
||||
report_timezone: str
|
||||
pve_verify_tls: bool
|
||||
pve_ca_bundle: Path | None
|
||||
pve_timeout_seconds: int
|
||||
pve_backup_jobs_endpoint: str
|
||||
pve_task_history_limit: int
|
||||
pve_task_log_limit: int
|
||||
pbs_hostnames: dict[str, str]
|
||||
pbs_servers: tuple[PbsServerConfig, ...]
|
||||
log_level: str
|
||||
report_filename_prefix: str
|
||||
|
||||
@property
|
||||
def configured_pbs_servers(self) -> tuple[PbsServerConfig, ...]:
|
||||
return tuple(server for server in self.pbs_servers if server.configured)
|
||||
|
||||
|
||||
def load_config(env_file: str | Path | None = ".env") -> AppConfig:
|
||||
load_env_file(env_file)
|
||||
pve_verify_tls = parse_bool(os.getenv("PVE_VERIFY_TLS", "true"), "PVE_VERIFY_TLS")
|
||||
pve_timeout_seconds = parse_int(
|
||||
os.getenv("PVE_TIMEOUT_SECONDS", "30"),
|
||||
"PVE_TIMEOUT_SECONDS",
|
||||
)
|
||||
|
||||
config = AppConfig(
|
||||
pve_api_url=require_env("PVE_API_URL"),
|
||||
pve_api_token_id=require_env("PVE_API_TOKEN_ID"),
|
||||
pve_api_token_secret=require_env("PVE_API_TOKEN_SECRET"),
|
||||
report_output_dir=Path(os.getenv("REPORT_OUTPUT_DIR", "reports")),
|
||||
report_timezone=os.getenv("REPORT_TIMEZONE", "Europe/Paris"),
|
||||
pve_verify_tls=pve_verify_tls,
|
||||
pve_ca_bundle=parse_optional_path(os.getenv("PVE_CA_BUNDLE")),
|
||||
pve_timeout_seconds=pve_timeout_seconds,
|
||||
pve_backup_jobs_endpoint=parse_endpoint(
|
||||
os.getenv("PVE_BACKUP_JOBS_ENDPOINT", "/cluster/backup"),
|
||||
"PVE_BACKUP_JOBS_ENDPOINT",
|
||||
),
|
||||
pve_task_history_limit=parse_int(
|
||||
os.getenv("PVE_TASK_HISTORY_LIMIT", "500"),
|
||||
"PVE_TASK_HISTORY_LIMIT",
|
||||
),
|
||||
pve_task_log_limit=parse_int(
|
||||
os.getenv("PVE_TASK_LOG_LIMIT", "5000"),
|
||||
"PVE_TASK_LOG_LIMIT",
|
||||
),
|
||||
pbs_hostnames=parse_mapping(os.getenv("PBS_HOSTNAMES", ""), "PBS_HOSTNAMES"),
|
||||
pbs_servers=parse_pbs_servers(
|
||||
os.environ,
|
||||
pve_verify_tls=pve_verify_tls,
|
||||
pve_timeout_seconds=pve_timeout_seconds,
|
||||
),
|
||||
log_level=os.getenv("LOG_LEVEL", "INFO").upper(),
|
||||
report_filename_prefix=os.getenv(
|
||||
"REPORT_FILENAME_PREFIX",
|
||||
"rapport-sauvegardes-pve",
|
||||
),
|
||||
)
|
||||
return config
|
||||
|
||||
|
||||
PBS_SERVER_ENV_PATTERN = re.compile(r"^PBS(\d+)_([A-Z0-9_]+)$")
|
||||
PBS_SERVER_REQUIRED_KEYS = ("API_URL", "API_TOKEN_ID", "API_TOKEN_SECRET")
|
||||
PBS_SERVER_KNOWN_KEYS = {
|
||||
"NAME",
|
||||
"API_URL",
|
||||
"API_TOKEN_ID",
|
||||
"API_TOKEN_SECRET",
|
||||
"VERIFY_TLS",
|
||||
"CA_BUNDLE",
|
||||
"TIMEOUT_SECONDS",
|
||||
}
|
||||
|
||||
|
||||
def parse_pbs_servers(
|
||||
environ: Mapping[str, str],
|
||||
*,
|
||||
pve_verify_tls: bool,
|
||||
pve_timeout_seconds: int,
|
||||
) -> tuple[PbsServerConfig, ...]:
|
||||
prefixes = sorted(
|
||||
{
|
||||
match.group(1)
|
||||
for key in environ
|
||||
if (match := PBS_SERVER_ENV_PATTERN.match(key))
|
||||
and match.group(2) in PBS_SERVER_KNOWN_KEYS
|
||||
},
|
||||
key=lambda value: (int(value), value),
|
||||
)
|
||||
|
||||
servers = []
|
||||
for number in prefixes:
|
||||
prefix = f"PBS{number}"
|
||||
values = {
|
||||
key: parse_optional_string(environ.get(f"{prefix}_{key}"))
|
||||
for key in PBS_SERVER_REQUIRED_KEYS
|
||||
}
|
||||
validate_optional_group(
|
||||
prefix,
|
||||
{f"{prefix}_{key}": value for key, value in values.items()},
|
||||
)
|
||||
servers.append(
|
||||
PbsServerConfig(
|
||||
prefix=prefix,
|
||||
name=(environ.get(f"{prefix}_NAME", prefix).strip() or prefix),
|
||||
api_url=values["API_URL"],
|
||||
api_token_id=values["API_TOKEN_ID"],
|
||||
api_token_secret=values["API_TOKEN_SECRET"],
|
||||
verify_tls=parse_bool(
|
||||
environ.get(f"{prefix}_VERIFY_TLS", str(pve_verify_tls)),
|
||||
f"{prefix}_VERIFY_TLS",
|
||||
),
|
||||
ca_bundle=parse_optional_path(environ.get(f"{prefix}_CA_BUNDLE")),
|
||||
timeout_seconds=parse_int(
|
||||
environ.get(f"{prefix}_TIMEOUT_SECONDS", str(pve_timeout_seconds)),
|
||||
f"{prefix}_TIMEOUT_SECONDS",
|
||||
),
|
||||
)
|
||||
)
|
||||
return tuple(servers)
|
||||
|
||||
|
||||
def load_env_file(env_file: str | Path | None) -> None:
|
||||
if env_file is None:
|
||||
return
|
||||
|
||||
path = Path(env_file)
|
||||
if not path.exists():
|
||||
return
|
||||
|
||||
try:
|
||||
from dotenv import load_dotenv
|
||||
except ImportError:
|
||||
load_env_file_fallback(path)
|
||||
return
|
||||
|
||||
load_dotenv(path)
|
||||
|
||||
|
||||
def load_env_file_fallback(path: Path) -> None:
|
||||
for line in path.read_text(encoding="utf-8").splitlines():
|
||||
stripped = line.strip()
|
||||
if not stripped or stripped.startswith("#") or "=" not in stripped:
|
||||
continue
|
||||
key, value = stripped.split("=", 1)
|
||||
os.environ.setdefault(key.strip(), value.strip().strip('"').strip("'"))
|
||||
|
||||
|
||||
def require_env(name: str) -> str:
|
||||
value = os.getenv(name)
|
||||
if value is None or value.strip() == "":
|
||||
raise ConfigError(f"variable obligatoire absente: {name}")
|
||||
return value.strip()
|
||||
|
||||
|
||||
def parse_bool(value: str, name: str) -> bool:
|
||||
normalized = value.strip().lower()
|
||||
if normalized in {"1", "true", "yes", "y", "on"}:
|
||||
return True
|
||||
if normalized in {"0", "false", "no", "n", "off"}:
|
||||
return False
|
||||
raise ConfigError(f"{name} doit etre un booleen")
|
||||
|
||||
|
||||
def parse_int(value: str, name: str) -> int:
|
||||
try:
|
||||
parsed = int(value)
|
||||
except ValueError as exc:
|
||||
raise ConfigError(f"{name} doit etre un entier") from exc
|
||||
if parsed <= 0:
|
||||
raise ConfigError(f"{name} doit etre strictement positif")
|
||||
return parsed
|
||||
|
||||
|
||||
def parse_optional_path(value: str | None) -> Path | None:
|
||||
if value is None or value.strip() == "":
|
||||
return None
|
||||
return Path(value.strip())
|
||||
|
||||
|
||||
def parse_optional_string(value: str | None) -> str | None:
|
||||
if value is None or value.strip() == "":
|
||||
return None
|
||||
return value.strip()
|
||||
|
||||
|
||||
def validate_optional_group(name: str, values: dict[str, object | None]) -> None:
|
||||
configured = {key for key, value in values.items() if value is not None}
|
||||
if not configured or configured == set(values):
|
||||
return
|
||||
if configured == {f"{name}_API_URL"}:
|
||||
return
|
||||
missing = sorted(set(values) - configured)
|
||||
raise ConfigError(
|
||||
f"configuration {name} incomplete: variables manquantes {', '.join(missing)}"
|
||||
)
|
||||
|
||||
|
||||
def parse_endpoint(value: str, name: str) -> str:
|
||||
stripped = value.strip()
|
||||
if not stripped:
|
||||
raise ConfigError(f"{name} ne doit pas etre vide")
|
||||
if not stripped.startswith("/"):
|
||||
raise ConfigError(f"{name} doit commencer par /")
|
||||
return stripped
|
||||
|
||||
|
||||
def parse_mapping(value: str, name: str) -> dict[str, str]:
|
||||
mapping: dict[str, str] = {}
|
||||
stripped = value.strip()
|
||||
if not stripped:
|
||||
return mapping
|
||||
|
||||
for item in stripped.split(","):
|
||||
pair = item.strip()
|
||||
if not pair:
|
||||
continue
|
||||
if "=" not in pair:
|
||||
raise ConfigError(f"{name} doit utiliser le format cle=valeur")
|
||||
key, mapped_value = pair.split("=", 1)
|
||||
key = key.strip()
|
||||
mapped_value = mapped_value.strip()
|
||||
if not key or not mapped_value:
|
||||
raise ConfigError(f"{name} contient une entree vide")
|
||||
mapping[key] = mapped_value
|
||||
return mapping
|
||||
@@ -0,0 +1,207 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from pve_backup_report.models import BackupCoverage, BackupJob, Guest, PbsStorage, Pool, ReportData
|
||||
|
||||
STATUS_PBS_PLANNED = "sauvegarde_pbs_planifiee"
|
||||
STATUS_NON_PBS_PLANNED = "sauvegarde_non_pbs_planifiee"
|
||||
STATUS_DISABLED_PBS = "sauvegarde_pbs_desactivee"
|
||||
STATUS_MISSING = "non_sauvegardee"
|
||||
STATUS_INDETERMINATE = "indetermine"
|
||||
|
||||
|
||||
def analyze_backup_coverage(report_data: ReportData) -> ReportData:
|
||||
coverage = calculate_backup_coverage(
|
||||
guests=report_data.guests,
|
||||
backup_jobs=report_data.backup_jobs,
|
||||
pbs_storages=report_data.pbs_storages,
|
||||
pools=report_data.pools,
|
||||
)
|
||||
return ReportData(
|
||||
pbs_server_names=report_data.pbs_server_names,
|
||||
pbs_storages=report_data.pbs_storages,
|
||||
pbs_access_users=report_data.pbs_access_users,
|
||||
pbs_retention_policies=report_data.pbs_retention_policies,
|
||||
pbs_snapshot_summaries=report_data.pbs_snapshot_summaries,
|
||||
pbs_datastore_usages=report_data.pbs_datastore_usages,
|
||||
pbs_gc_statuses=report_data.pbs_gc_statuses,
|
||||
guests=report_data.guests,
|
||||
pools=report_data.pools,
|
||||
backup_jobs=report_data.backup_jobs,
|
||||
coverage=coverage,
|
||||
last_backup_results=report_data.last_backup_results,
|
||||
summary=report_data.summary,
|
||||
issues=report_data.issues,
|
||||
)
|
||||
|
||||
|
||||
def calculate_backup_coverage(
|
||||
guests: list[Guest],
|
||||
backup_jobs: list[BackupJob],
|
||||
pbs_storages: list[PbsStorage],
|
||||
pools: list[Pool] | None = None,
|
||||
) -> list[BackupCoverage]:
|
||||
guests_by_vmid = {guest.vmid: guest for guest in guests}
|
||||
pools_by_id = {pool.pool_id: pool for pool in pools or []}
|
||||
jobs_by_vmid: dict[int, list[BackupJob]] = {}
|
||||
indeterminate_by_vmid: dict[int, list[BackupJob]] = {}
|
||||
|
||||
for job in backup_jobs:
|
||||
if not job.enabled:
|
||||
continue
|
||||
|
||||
scope = resolve_job_scope(job, guests_by_vmid, pools_by_id)
|
||||
if scope.indeterminate:
|
||||
for guest in guests:
|
||||
indeterminate_by_vmid.setdefault(guest.vmid, []).append(job)
|
||||
continue
|
||||
|
||||
for vmid in scope.vmids:
|
||||
jobs_by_vmid.setdefault(vmid, []).append(job)
|
||||
|
||||
pbs_by_id = {storage.storage_id: storage for storage in pbs_storages}
|
||||
coverage: list[BackupCoverage] = []
|
||||
for guest in guests:
|
||||
jobs = jobs_by_vmid.get(guest.vmid, [])
|
||||
if jobs:
|
||||
status, reason = classify_jobs(jobs, pbs_by_id)
|
||||
coverage.append(
|
||||
BackupCoverage(
|
||||
guest=guest,
|
||||
status=status,
|
||||
jobs=jobs,
|
||||
reason=reason,
|
||||
storages=sorted({job.storage for job in jobs if job.storage}),
|
||||
)
|
||||
)
|
||||
elif guest.vmid in indeterminate_by_vmid:
|
||||
jobs = indeterminate_by_vmid[guest.vmid]
|
||||
coverage.append(
|
||||
BackupCoverage(
|
||||
guest=guest,
|
||||
status=STATUS_INDETERMINATE,
|
||||
jobs=jobs,
|
||||
reason="selection de job non interpretee",
|
||||
storages=sorted({job.storage for job in jobs if job.storage}),
|
||||
)
|
||||
)
|
||||
else:
|
||||
coverage.append(
|
||||
BackupCoverage(
|
||||
guest=guest,
|
||||
status=STATUS_MISSING,
|
||||
reason="aucun job actif applicable",
|
||||
)
|
||||
)
|
||||
return coverage
|
||||
|
||||
|
||||
def calculate_explicit_vmid_coverage(
|
||||
guests: list[Guest],
|
||||
backup_jobs: list[BackupJob],
|
||||
) -> list[BackupCoverage]:
|
||||
return calculate_backup_coverage(guests, backup_jobs, pbs_storages=[], pools=[])
|
||||
|
||||
|
||||
class JobScope:
|
||||
def __init__(self, vmids: set[int] | None = None, indeterminate: bool = False) -> None:
|
||||
self.vmids = vmids or set()
|
||||
self.indeterminate = indeterminate
|
||||
|
||||
|
||||
def resolve_job_scope(
|
||||
job: BackupJob,
|
||||
guests_by_vmid: dict[int, Guest],
|
||||
pools_by_id: dict[str, Pool],
|
||||
) -> JobScope:
|
||||
explicit_vmids = parse_job_vmids(job)
|
||||
if explicit_vmids:
|
||||
return JobScope(explicit_vmids)
|
||||
|
||||
source = job.selection or ""
|
||||
if selection_truthy(source, "all"):
|
||||
excluded = parse_job_exclusions(job)
|
||||
return JobScope(set(guests_by_vmid) - excluded)
|
||||
|
||||
pool_id = extract_selection_value(source, "pool")
|
||||
if pool_id:
|
||||
pool = pools_by_id.get(pool_id)
|
||||
if pool is None:
|
||||
return JobScope(indeterminate=True)
|
||||
excluded = parse_job_exclusions(job)
|
||||
return JobScope(pool.vmids - excluded)
|
||||
|
||||
return JobScope()
|
||||
|
||||
|
||||
def parse_job_vmids(job: BackupJob) -> set[int]:
|
||||
source = job.selection or ""
|
||||
vmid_part = extract_selection_value(source, "vmid")
|
||||
|
||||
included = parse_vmid_list(vmid_part)
|
||||
excluded = parse_job_exclusions(job)
|
||||
return included - excluded
|
||||
|
||||
|
||||
def parse_job_exclusions(job: BackupJob) -> set[int]:
|
||||
source = job.selection or ""
|
||||
excluded_part = job.excluded or extract_selection_value(source, "exclude")
|
||||
return parse_vmid_list(excluded_part)
|
||||
|
||||
|
||||
def classify_jobs(
|
||||
jobs: list[BackupJob],
|
||||
pbs_by_id: dict[str, PbsStorage],
|
||||
) -> tuple[str, str | None]:
|
||||
has_active_pbs = False
|
||||
has_disabled_pbs = False
|
||||
has_non_pbs = False
|
||||
unknown_storages: set[str] = set()
|
||||
|
||||
for job in jobs:
|
||||
if not job.storage:
|
||||
unknown_storages.add("non renseigne")
|
||||
continue
|
||||
|
||||
pbs_storage = pbs_by_id.get(job.storage)
|
||||
if pbs_storage is None:
|
||||
has_non_pbs = True
|
||||
continue
|
||||
if pbs_storage.enabled is False:
|
||||
has_disabled_pbs = True
|
||||
continue
|
||||
has_active_pbs = True
|
||||
|
||||
if has_active_pbs:
|
||||
return STATUS_PBS_PLANNED, None
|
||||
if has_disabled_pbs:
|
||||
return STATUS_DISABLED_PBS, "storage PBS desactive"
|
||||
if has_non_pbs:
|
||||
return STATUS_NON_PBS_PLANNED, "job actif vers storage non PBS"
|
||||
return STATUS_INDETERMINATE, "storage cible non renseigne ou inconnu"
|
||||
|
||||
|
||||
def extract_selection_value(selection: str, key: str) -> str | None:
|
||||
prefix = f"{key}="
|
||||
for part in selection.split(", "):
|
||||
if part.startswith(prefix):
|
||||
return part.removeprefix(prefix)
|
||||
return None
|
||||
|
||||
|
||||
def selection_truthy(selection: str, key: str) -> bool:
|
||||
value = extract_selection_value(selection, key)
|
||||
if value is None:
|
||||
return False
|
||||
return value.strip().lower() in {"1", "true", "yes", "y", "on"}
|
||||
|
||||
|
||||
def parse_vmid_list(value: str | None) -> set[int]:
|
||||
if not value:
|
||||
return set()
|
||||
|
||||
vmids: set[int] = set()
|
||||
for item in value.replace(";", ",").split(","):
|
||||
stripped = item.strip()
|
||||
if stripped.isdigit():
|
||||
vmids.add(int(stripped))
|
||||
return vmids
|
||||
@@ -0,0 +1,14 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import sys
|
||||
|
||||
|
||||
def configure_logging(level: str = "INFO") -> None:
|
||||
numeric_level = getattr(logging, level.upper(), logging.INFO)
|
||||
logging.basicConfig(
|
||||
level=numeric_level,
|
||||
format="%(asctime)s %(levelname)s %(name)s - %(message)s",
|
||||
stream=sys.stdout,
|
||||
force=True,
|
||||
)
|
||||
@@ -0,0 +1,178 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class PbsStorage:
|
||||
storage_id: str
|
||||
username: str | None = None
|
||||
server: str | None = None
|
||||
datastore: str | None = None
|
||||
namespace: str | None = None
|
||||
enabled: bool | None = None
|
||||
raw: dict[str, object] = field(default_factory=dict)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class PbsRetentionPolicy:
|
||||
policy_id: str
|
||||
server_name: str
|
||||
datastore: str | None = None
|
||||
namespace: str | None = None
|
||||
schedule: str | None = None
|
||||
enabled: bool = True
|
||||
keep_last: int | None = None
|
||||
keep_hourly: int | None = None
|
||||
keep_daily: int | None = None
|
||||
keep_weekly: int | None = None
|
||||
keep_monthly: int | None = None
|
||||
keep_yearly: int | None = None
|
||||
max_depth: int | None = None
|
||||
comment: str | None = None
|
||||
raw: dict[str, object] = field(default_factory=dict)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class PbsBackupSnapshotSummary:
|
||||
server_name: str
|
||||
vmid: int
|
||||
guest_type: str
|
||||
datastore: str
|
||||
namespace: str
|
||||
snapshot_count: int = 0
|
||||
oldest_backup_at: datetime | None = None
|
||||
newest_backup_at: datetime | None = None
|
||||
newest_backup_size_bytes: int | None = None
|
||||
raw: dict[str, object] = field(default_factory=dict)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class PbsDatastoreUsage:
|
||||
server_name: str
|
||||
datastore: str
|
||||
total_bytes: int | None = None
|
||||
used_bytes: int | None = None
|
||||
available_bytes: int | None = None
|
||||
raw: dict[str, object] = field(default_factory=dict)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class PbsGarbageCollectionStatus:
|
||||
server_name: str
|
||||
datastore: str
|
||||
status: str
|
||||
schedule: str | None = None
|
||||
last_run_endtime: datetime | None = None
|
||||
next_run: datetime | None = None
|
||||
raw: dict[str, object] = field(default_factory=dict)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class PbsAccessUser:
|
||||
server_name: str
|
||||
auth_id: str
|
||||
user_id: str
|
||||
storage_id: str
|
||||
datastore: str | None = None
|
||||
namespace: str | None = None
|
||||
enabled: bool | None = None
|
||||
expire: int | None = None
|
||||
email: str | None = None
|
||||
comment: str | None = None
|
||||
permissions: dict[str, bool] = field(default_factory=dict)
|
||||
raw: dict[str, object] = field(default_factory=dict)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class Guest:
|
||||
vmid: int
|
||||
name: str
|
||||
guest_type: str
|
||||
node: str | None = None
|
||||
status: str | None = None
|
||||
notes: str | None = None
|
||||
raw: dict[str, object] = field(default_factory=dict)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class BackupJob:
|
||||
job_id: str
|
||||
storage: str | None = None
|
||||
schedule: str | None = None
|
||||
enabled: bool = True
|
||||
mode: str | None = None
|
||||
selection: str | None = None
|
||||
excluded: str | None = None
|
||||
raw: dict[str, object] = field(default_factory=dict)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class Pool:
|
||||
pool_id: str
|
||||
vmids: set[int] = field(default_factory=set)
|
||||
raw: dict[str, object] = field(default_factory=dict)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class CollectionIssue:
|
||||
severity: str
|
||||
component: str
|
||||
message: str
|
||||
details: str | None = None
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class BackupCoverage:
|
||||
guest: Guest
|
||||
status: str
|
||||
jobs: list[BackupJob] = field(default_factory=list)
|
||||
reason: str | None = None
|
||||
storages: list[str] = field(default_factory=list)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class LastBackupResult:
|
||||
vmid: int
|
||||
status: str
|
||||
finished_at: datetime | None = None
|
||||
duration_seconds: int | None = None
|
||||
node: str | None = None
|
||||
raw: dict[str, object] = field(default_factory=dict)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class ReportSummary:
|
||||
generated_at: datetime | None = None
|
||||
total_vm: int = 0
|
||||
total_ct: int = 0
|
||||
total_guests: int = 0
|
||||
pbs_storage_count: int = 0
|
||||
backup_job_count: int = 0
|
||||
active_backup_job_count: int = 0
|
||||
inactive_backup_job_count: int = 0
|
||||
pbs_planned_count: int = 0
|
||||
non_pbs_planned_count: int = 0
|
||||
disabled_pbs_count: int = 0
|
||||
missing_count: int = 0
|
||||
indeterminate_count: int = 0
|
||||
issue_count: int = 0
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class ReportData:
|
||||
pbs_server_names: list[str] = field(default_factory=list)
|
||||
pbs_storages: list[PbsStorage] = field(default_factory=list)
|
||||
pbs_access_users: list[PbsAccessUser] = field(default_factory=list)
|
||||
pbs_retention_policies: list[PbsRetentionPolicy] = field(default_factory=list)
|
||||
pbs_snapshot_summaries: dict[tuple[str, str, str, str, int], PbsBackupSnapshotSummary] = field(default_factory=dict)
|
||||
pbs_datastore_usages: list[PbsDatastoreUsage] = field(default_factory=list)
|
||||
pbs_gc_statuses: list[PbsGarbageCollectionStatus] = field(default_factory=list)
|
||||
guests: list[Guest] = field(default_factory=list)
|
||||
pools: list[Pool] = field(default_factory=list)
|
||||
backup_jobs: list[BackupJob] = field(default_factory=list)
|
||||
coverage: list[BackupCoverage] = field(default_factory=list)
|
||||
last_backup_results: dict[int, LastBackupResult] = field(default_factory=dict)
|
||||
summary: ReportSummary = field(default_factory=ReportSummary)
|
||||
issues: list[CollectionIssue] = field(default_factory=list)
|
||||
@@ -0,0 +1,207 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Any
|
||||
from urllib.parse import quote, urljoin
|
||||
|
||||
import requests
|
||||
import urllib3
|
||||
|
||||
from pve_backup_report import __version__
|
||||
from pve_backup_report.config import AppConfig, PbsServerConfig
|
||||
from pve_backup_report.sanitization import sanitize_message
|
||||
|
||||
|
||||
class PbsApiError(RuntimeError):
|
||||
"""Erreur generique lors d'un appel a l'API PBS."""
|
||||
|
||||
|
||||
class PbsHttpError(PbsApiError):
|
||||
def __init__(self, endpoint: str, status_code: int, message: str) -> None:
|
||||
super().__init__(f"{endpoint}: HTTP {status_code} - {message}")
|
||||
self.endpoint = endpoint
|
||||
self.status_code = status_code
|
||||
self.message = message
|
||||
|
||||
|
||||
class PbsConnectionError(PbsApiError):
|
||||
pass
|
||||
|
||||
|
||||
class PbsResponseError(PbsApiError):
|
||||
pass
|
||||
|
||||
|
||||
@dataclass
|
||||
class PbsClient:
|
||||
"""Client API PBS reutilisable base sur requests."""
|
||||
|
||||
config: AppConfig
|
||||
server: PbsServerConfig | None = None
|
||||
session: requests.Session = field(default_factory=requests.Session)
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
if self.server is None:
|
||||
if not self.config.configured_pbs_servers:
|
||||
raise PbsConnectionError("aucun PBS configure")
|
||||
self.server = self.config.configured_pbs_servers[0]
|
||||
self.session.headers.update(
|
||||
{
|
||||
"Authorization": self._authorization_header(),
|
||||
"Accept": "application/json",
|
||||
"User-Agent": f"pve-backup-report/{__version__}",
|
||||
}
|
||||
)
|
||||
if self.verify_tls is False:
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
|
||||
@property
|
||||
def server_id(self) -> str:
|
||||
return self._server.prefix.lower()
|
||||
|
||||
@property
|
||||
def server_name(self) -> str:
|
||||
return self._server.name
|
||||
|
||||
@property
|
||||
def api_url(self) -> str | None:
|
||||
return self._server.api_url
|
||||
|
||||
@property
|
||||
def timeout_seconds(self) -> int:
|
||||
return self._server.timeout_seconds
|
||||
|
||||
@property
|
||||
def base_url(self) -> str:
|
||||
if self.api_url is None:
|
||||
raise PbsConnectionError(f"{self.server_name}_API_URL non configure")
|
||||
base = self.api_url.rstrip("/")
|
||||
if not base.endswith("/api2/json"):
|
||||
base = f"{base}/api2/json"
|
||||
return base
|
||||
|
||||
@property
|
||||
def verify_tls(self) -> bool | str:
|
||||
if not self._server.verify_tls:
|
||||
return False
|
||||
if self._server.ca_bundle is not None:
|
||||
return str(self._server.ca_bundle)
|
||||
return True
|
||||
|
||||
def get(self, endpoint: str, params: dict[str, object] | None = None) -> Any:
|
||||
normalized_endpoint = self._normalize_endpoint(endpoint)
|
||||
url = urljoin(f"{self.base_url}/", normalized_endpoint)
|
||||
|
||||
try:
|
||||
response = self.session.get(
|
||||
url,
|
||||
params=params,
|
||||
timeout=self.timeout_seconds,
|
||||
verify=self.verify_tls,
|
||||
)
|
||||
except requests.exceptions.SSLError as exc:
|
||||
raise PbsConnectionError(
|
||||
f"{endpoint}: verification TLS echouee: {self._sanitize_exception(exc)}"
|
||||
) from exc
|
||||
except requests.exceptions.ConnectionError as exc:
|
||||
raise PbsConnectionError(
|
||||
f"{endpoint}: erreur reseau API PBS: {self._sanitize_exception(exc)}"
|
||||
) from exc
|
||||
except requests.Timeout as exc:
|
||||
raise PbsConnectionError(
|
||||
f"{endpoint}: timeout apres {self.timeout_seconds}s"
|
||||
) from exc
|
||||
except requests.RequestException as exc:
|
||||
raise PbsConnectionError(
|
||||
f"{endpoint}: erreur de connexion API PBS: {self._sanitize_exception(exc)}"
|
||||
) from exc
|
||||
|
||||
if response.status_code >= 400:
|
||||
raise PbsHttpError(
|
||||
endpoint=endpoint,
|
||||
status_code=response.status_code,
|
||||
message=self._extract_error_message(response),
|
||||
)
|
||||
|
||||
try:
|
||||
payload = response.json()
|
||||
except ValueError as exc:
|
||||
raise PbsResponseError(f"{endpoint}: reponse JSON invalide") from exc
|
||||
|
||||
if not isinstance(payload, dict) or "data" not in payload:
|
||||
raise PbsResponseError(f"{endpoint}: champ 'data' absent de la reponse")
|
||||
|
||||
return payload["data"]
|
||||
|
||||
def get_prune_jobs(self) -> Any:
|
||||
return self.get("/config/prune")
|
||||
|
||||
def get_datastores(self) -> Any:
|
||||
return self.get("/config/datastore")
|
||||
|
||||
def get_datastore_status(self, datastore: str) -> Any:
|
||||
encoded_datastore = quote(datastore, safe="")
|
||||
return self.get(f"/admin/datastore/{encoded_datastore}/status")
|
||||
|
||||
def get_datastore_gc_status(self, datastore: str) -> Any:
|
||||
encoded_datastore = quote(datastore, safe="")
|
||||
return self.get(f"/admin/datastore/{encoded_datastore}/gc")
|
||||
|
||||
def get_datastore_namespaces(self, datastore: str) -> Any:
|
||||
encoded_datastore = quote(datastore, safe="")
|
||||
return self.get(f"/admin/datastore/{encoded_datastore}/namespace")
|
||||
|
||||
def get_datastore_snapshots(self, datastore: str, namespace: str | None) -> Any:
|
||||
encoded_datastore = quote(datastore, safe="")
|
||||
params: dict[str, object] | None = None
|
||||
if namespace and namespace != "/":
|
||||
params = {"ns": namespace}
|
||||
return self.get(f"/admin/datastore/{encoded_datastore}/snapshots", params=params)
|
||||
|
||||
def get_access_users(self) -> Any:
|
||||
return self.get("/access/users")
|
||||
|
||||
def get_access_permissions(self, auth_id: str, path: str) -> Any:
|
||||
return self.get("/access/permissions", params={"auth-id": auth_id, "path": path})
|
||||
|
||||
def close(self) -> None:
|
||||
self.session.close()
|
||||
|
||||
def _authorization_header(self) -> str:
|
||||
token_id = self._server.api_token_id
|
||||
token_secret = self._server.api_token_secret
|
||||
if token_id is None or token_secret is None:
|
||||
raise PbsConnectionError(f"token API {self.server_name} non configure")
|
||||
return f"PBSAPIToken={token_id}:{token_secret}"
|
||||
|
||||
@property
|
||||
def _server(self) -> PbsServerConfig:
|
||||
if self.server is None:
|
||||
raise PbsConnectionError("aucun PBS configure")
|
||||
return self.server
|
||||
|
||||
@staticmethod
|
||||
def _normalize_endpoint(endpoint: str) -> str:
|
||||
return endpoint.lstrip("/")
|
||||
|
||||
@staticmethod
|
||||
def _extract_error_message(response: requests.Response) -> str:
|
||||
try:
|
||||
payload = response.json()
|
||||
except ValueError:
|
||||
return sanitize_message(response.reason)
|
||||
|
||||
if isinstance(payload, dict):
|
||||
for key in ("message", "error"):
|
||||
value = payload.get(key)
|
||||
if isinstance(value, str) and value:
|
||||
return sanitize_message(value)
|
||||
data = payload.get("data")
|
||||
if isinstance(data, str) and data:
|
||||
return sanitize_message(data)
|
||||
return sanitize_message(response.reason)
|
||||
|
||||
@staticmethod
|
||||
def _sanitize_exception(exc: BaseException) -> str:
|
||||
message = sanitize_message(exc)
|
||||
return message or exc.__class__.__name__
|
||||
@@ -0,0 +1,259 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Any
|
||||
from urllib.parse import quote, urljoin
|
||||
|
||||
import requests
|
||||
import urllib3
|
||||
|
||||
from pve_backup_report import __version__
|
||||
from pve_backup_report.config import AppConfig
|
||||
from pve_backup_report.sanitization import sanitize_message
|
||||
|
||||
|
||||
class PveApiError(RuntimeError):
|
||||
"""Erreur generique lors d'un appel a l'API PVE."""
|
||||
|
||||
|
||||
class PveHttpError(PveApiError):
|
||||
def __init__(self, endpoint: str, status_code: int, message: str) -> None:
|
||||
super().__init__(f"{endpoint}: HTTP {status_code} - {message}")
|
||||
self.endpoint = endpoint
|
||||
self.status_code = status_code
|
||||
self.message = message
|
||||
|
||||
|
||||
class PveConnectionError(PveApiError):
|
||||
pass
|
||||
|
||||
|
||||
class PveResponseError(PveApiError):
|
||||
pass
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class EndpointCheckResult:
|
||||
endpoint: str
|
||||
ok: bool
|
||||
count: int | None = None
|
||||
detail: str | None = None
|
||||
error: str | None = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class PveClient:
|
||||
"""Client API PVE reutilisable base sur requests."""
|
||||
|
||||
config: AppConfig
|
||||
session: requests.Session = field(default_factory=requests.Session)
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
self.session.headers.update(
|
||||
{
|
||||
"Authorization": self._authorization_header(),
|
||||
"Accept": "application/json",
|
||||
"User-Agent": f"pve-backup-report/{__version__}",
|
||||
}
|
||||
)
|
||||
if self.verify_tls is False:
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
|
||||
@property
|
||||
def base_url(self) -> str:
|
||||
base = self.config.pve_api_url.rstrip("/")
|
||||
if not base.endswith("/api2/json"):
|
||||
base = f"{base}/api2/json"
|
||||
return base
|
||||
|
||||
@property
|
||||
def verify_tls(self) -> bool | str:
|
||||
if not self.config.pve_verify_tls:
|
||||
return False
|
||||
if self.config.pve_ca_bundle is not None:
|
||||
return str(self.config.pve_ca_bundle)
|
||||
return True
|
||||
|
||||
def get(self, endpoint: str, params: dict[str, object] | None = None) -> Any:
|
||||
normalized_endpoint = self._normalize_endpoint(endpoint)
|
||||
url = urljoin(f"{self.base_url}/", normalized_endpoint)
|
||||
self._validate_ca_bundle(endpoint)
|
||||
|
||||
try:
|
||||
response = self.session.get(
|
||||
url,
|
||||
params=params,
|
||||
timeout=self.config.pve_timeout_seconds,
|
||||
verify=self.verify_tls,
|
||||
)
|
||||
except requests.exceptions.SSLError as exc:
|
||||
raise PveConnectionError(
|
||||
f"{endpoint}: verification TLS echouee: {self._sanitize_exception(exc)}"
|
||||
) from exc
|
||||
except requests.exceptions.ConnectionError as exc:
|
||||
raise PveConnectionError(
|
||||
f"{endpoint}: erreur reseau API PVE: {self._sanitize_exception(exc)}"
|
||||
) from exc
|
||||
except requests.Timeout as exc:
|
||||
raise PveConnectionError(
|
||||
f"{endpoint}: timeout apres {self.config.pve_timeout_seconds}s"
|
||||
) from exc
|
||||
except requests.RequestException as exc:
|
||||
raise PveConnectionError(
|
||||
f"{endpoint}: erreur de connexion API PVE: {self._sanitize_exception(exc)}"
|
||||
) from exc
|
||||
|
||||
if response.status_code >= 400:
|
||||
raise PveHttpError(
|
||||
endpoint=endpoint,
|
||||
status_code=response.status_code,
|
||||
message=self._extract_error_message(response),
|
||||
)
|
||||
|
||||
try:
|
||||
payload = response.json()
|
||||
except ValueError as exc:
|
||||
raise PveResponseError(f"{endpoint}: reponse JSON invalide") from exc
|
||||
|
||||
if not isinstance(payload, dict) or "data" not in payload:
|
||||
raise PveResponseError(f"{endpoint}: champ 'data' absent de la reponse")
|
||||
|
||||
return payload["data"]
|
||||
|
||||
def get_nodes(self) -> Any:
|
||||
return self.get("/nodes")
|
||||
|
||||
def get_storages(self) -> Any:
|
||||
return self.get("/storage")
|
||||
|
||||
def get_backup_jobs(self) -> Any:
|
||||
return self.get(self.config.pve_backup_jobs_endpoint)
|
||||
|
||||
def get_cluster_index(self) -> Any:
|
||||
return self.get("/cluster")
|
||||
|
||||
def get_cluster_resources(self) -> Any:
|
||||
return self.get("/cluster/resources")
|
||||
|
||||
def get_qemu_config(self, node: str, vmid: int) -> Any:
|
||||
encoded_node = quote(node, safe="")
|
||||
return self.get(f"/nodes/{encoded_node}/qemu/{vmid}/config")
|
||||
|
||||
def get_lxc_config(self, node: str, vmid: int) -> Any:
|
||||
encoded_node = quote(node, safe="")
|
||||
return self.get(f"/nodes/{encoded_node}/lxc/{vmid}/config")
|
||||
|
||||
def get_cluster_tasks(self) -> Any:
|
||||
return self.get("/cluster/tasks")
|
||||
|
||||
def get_node_tasks(self, node: str) -> Any:
|
||||
return self.get(f"/nodes/{node}/tasks")
|
||||
|
||||
def get_task_log(self, node: str, upid: str) -> Any:
|
||||
encoded_upid = quote(upid, safe="")
|
||||
return self.get(
|
||||
f"/nodes/{node}/tasks/{encoded_upid}/log",
|
||||
params={"start": 0, "limit": self.config.pve_task_log_limit},
|
||||
)
|
||||
|
||||
def get_pools(self) -> Any:
|
||||
return self.get("/pools")
|
||||
|
||||
def get_pool(self, pool_id: str) -> Any:
|
||||
return self.get(f"/pools/{pool_id}")
|
||||
|
||||
def check_required_endpoints(self) -> list[EndpointCheckResult]:
|
||||
results: list[EndpointCheckResult] = []
|
||||
endpoints = (
|
||||
"/nodes",
|
||||
"/storage",
|
||||
"/cluster",
|
||||
self.config.pve_backup_jobs_endpoint,
|
||||
)
|
||||
for endpoint in endpoints:
|
||||
try:
|
||||
data = self.get(endpoint)
|
||||
except PveApiError as exc:
|
||||
results.append(
|
||||
EndpointCheckResult(
|
||||
endpoint=endpoint,
|
||||
ok=False,
|
||||
error=str(exc),
|
||||
)
|
||||
)
|
||||
continue
|
||||
|
||||
count = len(data) if hasattr(data, "__len__") else None
|
||||
detail = self._summarize_endpoint_data(endpoint, data)
|
||||
results.append(
|
||||
EndpointCheckResult(
|
||||
endpoint=endpoint,
|
||||
ok=True,
|
||||
count=count,
|
||||
detail=detail,
|
||||
)
|
||||
)
|
||||
return results
|
||||
|
||||
def close(self) -> None:
|
||||
self.session.close()
|
||||
|
||||
def _authorization_header(self) -> str:
|
||||
return (
|
||||
"PVEAPIToken="
|
||||
f"{self.config.pve_api_token_id}={self.config.pve_api_token_secret}"
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _normalize_endpoint(endpoint: str) -> str:
|
||||
return endpoint.lstrip("/")
|
||||
|
||||
@staticmethod
|
||||
def _extract_error_message(response: requests.Response) -> str:
|
||||
try:
|
||||
payload = response.json()
|
||||
except ValueError:
|
||||
return response.reason or "erreur HTTP"
|
||||
|
||||
if isinstance(payload, dict):
|
||||
for key in ("message", "error"):
|
||||
value = payload.get(key)
|
||||
if isinstance(value, str) and value:
|
||||
return sanitize_message(value)
|
||||
data = payload.get("data")
|
||||
if isinstance(data, str) and data:
|
||||
return sanitize_message(data)
|
||||
|
||||
return sanitize_message(response.reason or "erreur HTTP")
|
||||
|
||||
@staticmethod
|
||||
def _summarize_endpoint_data(endpoint: str, data: Any) -> str | None:
|
||||
if endpoint != "/cluster" or not isinstance(data, list):
|
||||
return None
|
||||
|
||||
subdirs = [
|
||||
item["subdir"]
|
||||
for item in data
|
||||
if isinstance(item, dict) and isinstance(item.get("subdir"), str)
|
||||
]
|
||||
if not subdirs:
|
||||
return None
|
||||
return "sous-endpoints: " + ", ".join(sorted(subdirs))
|
||||
|
||||
def _validate_ca_bundle(self, endpoint: str) -> None:
|
||||
ca_bundle = self.config.pve_ca_bundle
|
||||
if not self.config.pve_verify_tls or ca_bundle is None:
|
||||
return
|
||||
if not ca_bundle.exists():
|
||||
raise PveConnectionError(
|
||||
f"{endpoint}: fichier CA introuvable: {ca_bundle}"
|
||||
)
|
||||
if not ca_bundle.is_file():
|
||||
raise PveConnectionError(
|
||||
f"{endpoint}: chemin CA invalide, fichier attendu: {ca_bundle}"
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _sanitize_exception(exc: BaseException) -> str:
|
||||
message = sanitize_message(exc)
|
||||
return message or exc.__class__.__name__
|
||||
@@ -0,0 +1,229 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import asdict, replace
|
||||
from datetime import datetime
|
||||
from typing import Any
|
||||
from zoneinfo import ZoneInfo, ZoneInfoNotFoundError
|
||||
|
||||
from pve_backup_report.config import AppConfig
|
||||
from pve_backup_report.coverage import (
|
||||
STATUS_DISABLED_PBS,
|
||||
STATUS_INDETERMINATE,
|
||||
STATUS_MISSING,
|
||||
STATUS_NON_PBS_PLANNED,
|
||||
STATUS_PBS_PLANNED,
|
||||
analyze_backup_coverage,
|
||||
)
|
||||
from pve_backup_report.models import ReportData, ReportSummary
|
||||
|
||||
SENSITIVE_FIELD_NAMES = {
|
||||
"authorization",
|
||||
"fingerprint",
|
||||
"files",
|
||||
"password",
|
||||
"raw",
|
||||
"secret",
|
||||
"ticket",
|
||||
"token",
|
||||
}
|
||||
|
||||
|
||||
def prepare_report_data(report_data: ReportData, config: AppConfig) -> ReportData:
|
||||
covered_report_data = analyze_backup_coverage(report_data)
|
||||
summary = build_report_summary(covered_report_data, config)
|
||||
return replace(
|
||||
covered_report_data,
|
||||
pbs_server_names=configured_pbs_server_names(config),
|
||||
summary=summary,
|
||||
)
|
||||
|
||||
|
||||
def configured_pbs_server_names(config: AppConfig) -> list[str]:
|
||||
return [server.name for server in config.configured_pbs_servers]
|
||||
|
||||
|
||||
def build_report_summary(report_data: ReportData, config: AppConfig) -> ReportSummary:
|
||||
return ReportSummary(
|
||||
generated_at=current_time(config.report_timezone),
|
||||
total_vm=count_guests_by_type(report_data, "qemu"),
|
||||
total_ct=count_guests_by_type(report_data, "lxc"),
|
||||
total_guests=len(report_data.guests),
|
||||
pbs_storage_count=len(report_data.pbs_storages),
|
||||
backup_job_count=len(report_data.backup_jobs),
|
||||
active_backup_job_count=sum(1 for job in report_data.backup_jobs if job.enabled),
|
||||
inactive_backup_job_count=sum(
|
||||
1 for job in report_data.backup_jobs if not job.enabled
|
||||
),
|
||||
pbs_planned_count=count_coverage(report_data, STATUS_PBS_PLANNED),
|
||||
non_pbs_planned_count=count_coverage(report_data, STATUS_NON_PBS_PLANNED),
|
||||
disabled_pbs_count=count_coverage(report_data, STATUS_DISABLED_PBS),
|
||||
missing_count=count_coverage(report_data, STATUS_MISSING),
|
||||
indeterminate_count=count_coverage(report_data, STATUS_INDETERMINATE),
|
||||
issue_count=len(report_data.issues),
|
||||
)
|
||||
|
||||
|
||||
def report_data_to_dict(report_data: ReportData) -> dict[str, Any]:
|
||||
return {
|
||||
"summary": serialize_dataclass(report_data.summary),
|
||||
"pbs_server_names": report_data.pbs_server_names,
|
||||
"pbs_storages": [
|
||||
{
|
||||
"id": storage.storage_id,
|
||||
"username": storage.username,
|
||||
"server": storage.server,
|
||||
"datastore": storage.datastore,
|
||||
"namespace": storage.namespace,
|
||||
"enabled": storage.enabled,
|
||||
}
|
||||
for storage in report_data.pbs_storages
|
||||
],
|
||||
"pbs_retention_policies": [
|
||||
{
|
||||
"id": policy.policy_id,
|
||||
"server": policy.server_name,
|
||||
"datastore": policy.datastore,
|
||||
"namespace": policy.namespace,
|
||||
"schedule": policy.schedule,
|
||||
"enabled": policy.enabled,
|
||||
"keep_last": policy.keep_last,
|
||||
"keep_hourly": policy.keep_hourly,
|
||||
"keep_daily": policy.keep_daily,
|
||||
"keep_weekly": policy.keep_weekly,
|
||||
"keep_monthly": policy.keep_monthly,
|
||||
"keep_yearly": policy.keep_yearly,
|
||||
"max_depth": policy.max_depth,
|
||||
"comment": policy.comment,
|
||||
}
|
||||
for policy in report_data.pbs_retention_policies
|
||||
],
|
||||
"pbs_access_users": [
|
||||
{
|
||||
"server": user.server_name,
|
||||
"auth_id": user.auth_id,
|
||||
"user_id": user.user_id,
|
||||
"storage_id": user.storage_id,
|
||||
"datastore": user.datastore,
|
||||
"namespace": user.namespace,
|
||||
"enabled": user.enabled,
|
||||
"expire": user.expire,
|
||||
"email": user.email,
|
||||
"comment": user.comment,
|
||||
"permissions": {
|
||||
permission: enabled
|
||||
for permission, enabled in sorted(user.permissions.items())
|
||||
},
|
||||
}
|
||||
for user in report_data.pbs_access_users
|
||||
],
|
||||
"pbs_snapshot_summaries": [
|
||||
{
|
||||
"server": summary.server_name,
|
||||
"vmid": summary.vmid,
|
||||
"type": summary.guest_type,
|
||||
"datastore": summary.datastore,
|
||||
"namespace": summary.namespace,
|
||||
"snapshot_count": summary.snapshot_count,
|
||||
"oldest_backup_at": serialize_datetime(summary.oldest_backup_at),
|
||||
"newest_backup_at": serialize_datetime(summary.newest_backup_at),
|
||||
"newest_backup_size_bytes": summary.newest_backup_size_bytes,
|
||||
}
|
||||
for summary in report_data.pbs_snapshot_summaries.values()
|
||||
],
|
||||
"pbs_datastore_usages": [
|
||||
serialize_dataclass_safe(usage)
|
||||
for usage in report_data.pbs_datastore_usages
|
||||
],
|
||||
"pbs_gc_statuses": [
|
||||
serialize_dataclass_safe(status)
|
||||
for status in report_data.pbs_gc_statuses
|
||||
],
|
||||
"backup_jobs": [
|
||||
{
|
||||
"id": job.job_id,
|
||||
"storage": job.storage,
|
||||
"schedule": job.schedule,
|
||||
"enabled": job.enabled,
|
||||
"mode": job.mode,
|
||||
"selection": job.selection,
|
||||
"exclude": job.excluded,
|
||||
}
|
||||
for job in report_data.backup_jobs
|
||||
],
|
||||
"coverage": [
|
||||
{
|
||||
"vmid": item.guest.vmid,
|
||||
"name": item.guest.name,
|
||||
"notes": item.guest.notes,
|
||||
"type": item.guest.guest_type,
|
||||
"node": item.guest.node,
|
||||
"status": item.guest.status,
|
||||
"coverage": item.status,
|
||||
"storages": item.storages,
|
||||
"jobs": [job.job_id for job in item.jobs],
|
||||
"detail": item.reason,
|
||||
"last_backup": serialize_last_backup(
|
||||
report_data.last_backup_results.get(item.guest.vmid)
|
||||
),
|
||||
}
|
||||
for item in report_data.coverage
|
||||
],
|
||||
"issues": [serialize_dataclass_safe(issue) for issue in report_data.issues],
|
||||
}
|
||||
|
||||
|
||||
def count_guests_by_type(report_data: ReportData, guest_type: str) -> int:
|
||||
return sum(1 for guest in report_data.guests if guest.guest_type == guest_type)
|
||||
|
||||
|
||||
def count_coverage(report_data: ReportData, status: str) -> int:
|
||||
return sum(1 for item in report_data.coverage if item.status == status)
|
||||
|
||||
|
||||
def current_time(timezone: str) -> datetime:
|
||||
try:
|
||||
tzinfo = ZoneInfo(timezone)
|
||||
except ZoneInfoNotFoundError:
|
||||
tzinfo = ZoneInfo("UTC")
|
||||
return datetime.now(tzinfo)
|
||||
|
||||
|
||||
def serialize_dataclass(value: object) -> dict[str, Any]:
|
||||
data = asdict(value)
|
||||
for key, item in list(data.items()):
|
||||
data[key] = serialize_datetime(item)
|
||||
return data
|
||||
|
||||
|
||||
def serialize_datetime(value: object) -> object:
|
||||
if isinstance(value, datetime):
|
||||
return value.isoformat()
|
||||
return value
|
||||
|
||||
|
||||
def serialize_dataclass_safe(value: object) -> dict[str, Any]:
|
||||
return redact_sensitive_data(serialize_dataclass(value))
|
||||
|
||||
|
||||
def redact_sensitive_data(value: Any) -> Any:
|
||||
if isinstance(value, dict):
|
||||
redacted = {}
|
||||
for key, item in value.items():
|
||||
if is_sensitive_field_name(str(key)):
|
||||
continue
|
||||
redacted[key] = redact_sensitive_data(item)
|
||||
return redacted
|
||||
if isinstance(value, list):
|
||||
return [redact_sensitive_data(item) for item in value]
|
||||
return value
|
||||
|
||||
|
||||
def is_sensitive_field_name(name: str) -> bool:
|
||||
normalized = name.casefold()
|
||||
return any(field in normalized for field in SENSITIVE_FIELD_NAMES)
|
||||
|
||||
|
||||
def serialize_last_backup(value: object | None) -> dict[str, Any] | None:
|
||||
if value is None:
|
||||
return None
|
||||
return serialize_dataclass_safe(value)
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,536 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from importlib import resources
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
from pve_backup_report import __version__
|
||||
from pve_backup_report.models import ReportData
|
||||
from pve_backup_report.report_pdf import (
|
||||
backup_retention_server_names,
|
||||
build_backup_retention_rows,
|
||||
coverage_row,
|
||||
display,
|
||||
display_bool,
|
||||
display_pbs_user_expire,
|
||||
display_permissions,
|
||||
format_datetime,
|
||||
format_pbs_server,
|
||||
namespaces_for_storages,
|
||||
pbs_datastore_usage_row,
|
||||
retention_policy_row,
|
||||
sort_text,
|
||||
unique_report_path,
|
||||
)
|
||||
|
||||
TEMPLATE_PACKAGE = "pve_backup_report"
|
||||
TEMPLATE_DIR = "templates"
|
||||
HTML_TEMPLATE = "report.html.j2"
|
||||
CSS_TEMPLATE = "report.css"
|
||||
RETENTION_GC_RUNNING_WARNING = (
|
||||
"Le nombre de versions des sauvegardes des VM/CT peut apparaitre superieur "
|
||||
"au nombre de versions declarees car le garbage collector du PBS concerne "
|
||||
"est en cours d'execution au moment de la generation du rapport."
|
||||
)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class ReportSection:
|
||||
section_id: str
|
||||
title: str
|
||||
headers: list[str]
|
||||
rows: list[list[Any]]
|
||||
empty_message: str | None = None
|
||||
page_break_after: bool = False
|
||||
level: int = 2
|
||||
warning: str | None = None
|
||||
|
||||
|
||||
def render_pdf(
|
||||
report_data: ReportData,
|
||||
output_dir: Path,
|
||||
filename_prefix: str = "rapport-sauvegardes-pve",
|
||||
pbs_hostnames: dict[str, str] | None = None,
|
||||
) -> Path:
|
||||
output_dir.mkdir(parents=True, exist_ok=True)
|
||||
pdf_path = unique_report_path(output_dir, filename_prefix, report_data.summary.generated_at)
|
||||
html = render_html(report_data, pbs_hostnames or {})
|
||||
|
||||
try:
|
||||
from weasyprint import HTML
|
||||
except ImportError as exc:
|
||||
raise RuntimeError(
|
||||
"Generation PDF WeasyPrint impossible: dependance weasyprint absente"
|
||||
) from exc
|
||||
|
||||
try:
|
||||
HTML(string=html, base_url=str(Path.cwd())).write_pdf(pdf_path)
|
||||
except AttributeError as exc:
|
||||
if "transform" in str(exc):
|
||||
raise RuntimeError(
|
||||
"Generation PDF WeasyPrint impossible: version pydyf incompatible. "
|
||||
"Installer les dependances du projet avec `pip install -r requirements.txt` "
|
||||
"afin d'utiliser pydyf>=0.10,<0.11 avec WeasyPrint 62.x."
|
||||
) from exc
|
||||
raise
|
||||
return pdf_path
|
||||
|
||||
|
||||
def render_html(report_data: ReportData, pbs_hostnames: dict[str, str] | None = None) -> str:
|
||||
try:
|
||||
from jinja2 import Environment, PackageLoader, select_autoescape
|
||||
except ImportError as exc:
|
||||
raise RuntimeError(
|
||||
"Generation HTML WeasyPrint impossible: dependance jinja2 absente"
|
||||
) from exc
|
||||
|
||||
environment = Environment(
|
||||
loader=PackageLoader(TEMPLATE_PACKAGE, TEMPLATE_DIR),
|
||||
autoescape=select_autoescape(["html", "j2"]),
|
||||
)
|
||||
template = environment.get_template(HTML_TEMPLATE)
|
||||
context = build_template_context(report_data, pbs_hostnames or {})
|
||||
context["css"] = load_template_css()
|
||||
return template.render(**context)
|
||||
|
||||
|
||||
def build_template_context(
|
||||
report_data: ReportData,
|
||||
pbs_hostnames: dict[str, str] | None = None,
|
||||
) -> dict[str, Any]:
|
||||
generated_at = report_data.summary.generated_at
|
||||
sections = build_sections(report_data, pbs_hostnames or {})
|
||||
return {
|
||||
"title": "Rapport des sauvegardes Proxmox VE",
|
||||
"subtitle": "Synthese operationnelle et element de preuve pour audit.",
|
||||
"version": __version__,
|
||||
"generated_at": format_datetime(generated_at) or "non renseigne",
|
||||
"sections": sections,
|
||||
}
|
||||
|
||||
|
||||
def build_sections(report_data: ReportData, pbs_hostnames: dict[str, str]) -> list[ReportSection]:
|
||||
retention_sections = build_all_backup_retention_sections(report_data)
|
||||
sections = [
|
||||
build_summary_section(report_data),
|
||||
build_pbs_storages_section(report_data),
|
||||
build_pbs_access_users_section(report_data),
|
||||
build_pbs_datastore_usages_section(report_data),
|
||||
build_retention_policies_section(report_data),
|
||||
build_backup_jobs_section(report_data),
|
||||
build_missing_guests_section(report_data),
|
||||
build_coverage_group_section(),
|
||||
*build_coverage_sections(report_data, pbs_hostnames),
|
||||
]
|
||||
if retention_sections:
|
||||
sections.extend([build_retention_group_section(), *retention_sections])
|
||||
sections.append(build_issues_section(report_data))
|
||||
return sections
|
||||
|
||||
|
||||
def build_all_backup_retention_sections(report_data: ReportData) -> list[ReportSection]:
|
||||
sections = []
|
||||
for server_name in backup_retention_server_names(report_data):
|
||||
sections.extend(build_backup_retention_sections(report_data, server_name))
|
||||
return sections
|
||||
|
||||
|
||||
def build_coverage_group_section() -> ReportSection:
|
||||
return ReportSection(
|
||||
section_id="sauvegarde-vmct",
|
||||
title="Sauvegarde des VM/CT",
|
||||
headers=[],
|
||||
rows=[],
|
||||
level=1,
|
||||
)
|
||||
|
||||
|
||||
def build_retention_group_section() -> ReportSection:
|
||||
return ReportSection(
|
||||
section_id="retention-sauvegardes-vmct",
|
||||
title="Retention des sauvegardes VM/CT",
|
||||
headers=[],
|
||||
rows=[],
|
||||
level=1,
|
||||
)
|
||||
|
||||
|
||||
def build_summary_section(report_data: ReportData) -> ReportSection:
|
||||
summary = report_data.summary
|
||||
return ReportSection(
|
||||
section_id="resume",
|
||||
title="Resume",
|
||||
headers=["Indicateur", "Valeur"],
|
||||
rows=[
|
||||
["VM", summary.total_vm],
|
||||
["Conteneurs LXC", summary.total_ct],
|
||||
["Total VM/CT", summary.total_guests],
|
||||
["Stockages PBS", summary.pbs_storage_count],
|
||||
["Jobs de sauvegarde", summary.backup_job_count],
|
||||
["Jobs actifs", summary.active_backup_job_count],
|
||||
["Jobs inactifs", summary.inactive_backup_job_count],
|
||||
["Sauvegardes PBS planifiees", summary.pbs_planned_count],
|
||||
["Sauvegardes non PBS planifiees", summary.non_pbs_planned_count],
|
||||
["Sauvegardes vers PBS desactive", summary.disabled_pbs_count],
|
||||
["Non sauvegardees", summary.missing_count],
|
||||
["Indeterminees", summary.indeterminate_count],
|
||||
["Anomalies", summary.issue_count],
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def build_pbs_storages_section(report_data: ReportData) -> ReportSection:
|
||||
return ReportSection(
|
||||
section_id="stockages-pbs",
|
||||
title="Stockages PBS déclarés sur PVE",
|
||||
headers=["ID", "Username", "Serveur PBS", "Datastore", "Namespace", "Actif"],
|
||||
rows=[
|
||||
[
|
||||
storage.storage_id,
|
||||
display(storage.username),
|
||||
display(storage.server),
|
||||
display(storage.datastore),
|
||||
display(storage.namespace),
|
||||
display_bool(storage.enabled),
|
||||
]
|
||||
for storage in report_data.pbs_storages
|
||||
],
|
||||
empty_message="Aucun stockage PBS collecte.",
|
||||
)
|
||||
|
||||
|
||||
def build_pbs_access_users_section(report_data: ReportData) -> ReportSection:
|
||||
return ReportSection(
|
||||
section_id="utilisateurs-pbs-audit-acces",
|
||||
title="Utilisateurs PBS - Audit des accès",
|
||||
headers=[
|
||||
"Serveur PBS",
|
||||
"Auth-id",
|
||||
"Storage PVE",
|
||||
"Datastore",
|
||||
"Namespace",
|
||||
"Actif",
|
||||
"Expiration",
|
||||
"Email",
|
||||
"Permissions",
|
||||
"Commentaire",
|
||||
],
|
||||
rows=[
|
||||
[
|
||||
user.server_name,
|
||||
display(user.auth_id),
|
||||
display(user.storage_id),
|
||||
display(user.datastore),
|
||||
display(user.namespace),
|
||||
display_bool(user.enabled),
|
||||
display_pbs_user_expire(user.expire),
|
||||
display(user.email),
|
||||
display_permissions(user.permissions),
|
||||
display(user.comment),
|
||||
]
|
||||
for user in report_data.pbs_access_users
|
||||
],
|
||||
empty_message="Aucun utilisateur PBS collecte.",
|
||||
)
|
||||
|
||||
|
||||
def build_pbs_datastore_usages_section(report_data: ReportData) -> ReportSection:
|
||||
return ReportSection(
|
||||
section_id="espaces-stockage-pbs",
|
||||
title="Espaces de stockage PBS",
|
||||
headers=["Serveur PBS", "Datastore", "Espace total", "Espace consomme", "Espace libre"],
|
||||
rows=[
|
||||
pbs_datastore_usage_row(usage)
|
||||
for usage in report_data.pbs_datastore_usages
|
||||
],
|
||||
empty_message="Aucun espace de stockage PBS collecte.",
|
||||
)
|
||||
|
||||
|
||||
def build_retention_policies_section(report_data: ReportData) -> ReportSection:
|
||||
return ReportSection(
|
||||
section_id="politique-retention",
|
||||
title="Politique de retention",
|
||||
headers=[
|
||||
"Serveur PBS",
|
||||
"Datastore",
|
||||
"Namespace",
|
||||
"Planification",
|
||||
"Actif",
|
||||
"Derniere",
|
||||
"Horaire",
|
||||
"Jour",
|
||||
"Semaine",
|
||||
"Mois",
|
||||
"Annee",
|
||||
"Profondeur",
|
||||
],
|
||||
rows=[
|
||||
retention_policy_row(policy)
|
||||
for policy in report_data.pbs_retention_policies
|
||||
],
|
||||
empty_message="Aucune politique de retention PBS collectee.",
|
||||
)
|
||||
|
||||
|
||||
def build_backup_jobs_section(report_data: ReportData) -> ReportSection:
|
||||
return ReportSection(
|
||||
section_id="jobs-sauvegarde",
|
||||
title="Jobs de sauvegarde",
|
||||
headers=["ID", "Storage", "Horaire", "Actif", "Mode", "Selection", "Exclusion"],
|
||||
rows=[
|
||||
[
|
||||
job.job_id,
|
||||
display(job.storage),
|
||||
display(job.schedule),
|
||||
display_bool(job.enabled),
|
||||
display(job.mode),
|
||||
display(job.selection),
|
||||
display(job.excluded),
|
||||
]
|
||||
for job in report_data.backup_jobs
|
||||
],
|
||||
empty_message="Aucun job de sauvegarde collecte.",
|
||||
page_break_after=True,
|
||||
)
|
||||
|
||||
|
||||
def build_missing_guests_section(report_data: ReportData) -> ReportSection:
|
||||
missing = [item for item in report_data.coverage if item.status == "non_sauvegardee"]
|
||||
return ReportSection(
|
||||
section_id="vmct-non-sauvegardees",
|
||||
title="VM/CT non sauvegardees",
|
||||
headers=["VMID", "Nom", "Notes", "Type", "Noeud", "Etat", "Detail"],
|
||||
rows=[coverage_row(item, include_storage=False) for item in missing],
|
||||
empty_message="Aucune VM/CT non sauvegardee detectee.",
|
||||
)
|
||||
|
||||
|
||||
def build_coverage_sections(report_data: ReportData, pbs_hostnames: dict[str, str]) -> list[ReportSection]:
|
||||
namespace_by_storage = {
|
||||
storage.storage_id: storage.namespace for storage in report_data.pbs_storages
|
||||
}
|
||||
server_by_storage = {
|
||||
storage.storage_id: format_pbs_server(storage.server, pbs_hostnames)
|
||||
for storage in report_data.pbs_storages
|
||||
}
|
||||
|
||||
coverage_by_namespace = {}
|
||||
for item in report_data.coverage:
|
||||
namespace = display(namespaces_for_storages(item.storages, namespace_by_storage))
|
||||
coverage_by_namespace.setdefault(namespace, []).append(item)
|
||||
|
||||
if not coverage_by_namespace:
|
||||
return [
|
||||
ReportSection(
|
||||
section_id="sauvegarde-vmct",
|
||||
title="Sauvegarde des VM/CT - non renseigne",
|
||||
headers=coverage_headers_without_namespace(),
|
||||
rows=[],
|
||||
empty_message="Aucune VM/CT collectee.",
|
||||
page_break_after=True,
|
||||
)
|
||||
]
|
||||
|
||||
sections = []
|
||||
sorted_namespaces = sorted(coverage_by_namespace, key=sort_text)
|
||||
for namespace in sorted_namespaces:
|
||||
rows = [
|
||||
coverage_row_without_namespace(
|
||||
item,
|
||||
namespace_by_storage,
|
||||
server_by_storage,
|
||||
report_data,
|
||||
)
|
||||
for item in sorted(
|
||||
coverage_by_namespace[namespace],
|
||||
key=coverage_sort_key_without_namespace,
|
||||
)
|
||||
]
|
||||
sections.append(
|
||||
ReportSection(
|
||||
section_id=f"sauvegarde-vmct-{section_id_fragment(namespace)}",
|
||||
title=f"Sauvegarde des VM/CT - {namespace}",
|
||||
headers=coverage_headers_without_namespace(),
|
||||
rows=rows,
|
||||
empty_message="Aucune VM/CT collectee.",
|
||||
page_break_after=namespace == sorted_namespaces[-1],
|
||||
)
|
||||
)
|
||||
return sections
|
||||
|
||||
|
||||
def coverage_headers_without_namespace() -> list[str]:
|
||||
return [
|
||||
"VMID",
|
||||
"Nom",
|
||||
"Notes",
|
||||
"Type",
|
||||
"Noeud",
|
||||
"Etat de la VM",
|
||||
"Sauvegarde",
|
||||
"Serveur PBS",
|
||||
"Storage",
|
||||
"Mode",
|
||||
"Frequence de sauvegarde",
|
||||
"Derniere sauvegarde",
|
||||
]
|
||||
|
||||
|
||||
def coverage_row_without_namespace(
|
||||
item,
|
||||
namespace_by_storage,
|
||||
server_by_storage,
|
||||
report_data: ReportData,
|
||||
) -> list[Any]:
|
||||
row = coverage_row(
|
||||
item,
|
||||
include_storage=True,
|
||||
namespace_by_storage=namespace_by_storage,
|
||||
server_by_storage=server_by_storage,
|
||||
last_backup_by_vmid=report_data.last_backup_results,
|
||||
)
|
||||
return row[:9] + row[10:]
|
||||
|
||||
|
||||
def coverage_sort_key_without_namespace(item) -> tuple[str, int]:
|
||||
schedules = ", ".join(
|
||||
sorted({job.schedule for job in item.jobs if job.schedule})
|
||||
)
|
||||
return (
|
||||
sort_text(schedules),
|
||||
item.guest.vmid,
|
||||
)
|
||||
|
||||
|
||||
def section_id_fragment(value: str) -> str:
|
||||
allowed = []
|
||||
for char in value.casefold():
|
||||
if char.isalnum():
|
||||
allowed.append(char)
|
||||
elif char in {"-", "_", " ", "/", "."}:
|
||||
allowed.append("-")
|
||||
fragment = "".join(allowed).strip("-")
|
||||
while "--" in fragment:
|
||||
fragment = fragment.replace("--", "-")
|
||||
return fragment or "non-renseigne"
|
||||
|
||||
|
||||
def build_backup_retention_sections(report_data: ReportData, server_name: str) -> list[ReportSection]:
|
||||
rows = build_backup_retention_rows(report_data, server_name)
|
||||
headers = [str(value) for value in rows[0]]
|
||||
data_rows = rows[1:]
|
||||
headers_without_namespace = headers[:2] + headers[3:]
|
||||
|
||||
if not data_rows:
|
||||
return [
|
||||
ReportSection(
|
||||
section_id=f"retention-{server_name.lower()}",
|
||||
title=f"Retention des sauvegardes VM/CT {server_name} - non renseigne",
|
||||
headers=headers_without_namespace,
|
||||
rows=[],
|
||||
empty_message=f"Aucune retention de sauvegarde VM/CT {server_name} collectee.",
|
||||
page_break_after=True,
|
||||
)
|
||||
]
|
||||
|
||||
rows_by_namespace = {}
|
||||
for row in data_rows:
|
||||
namespace = display(row[2] if len(row) > 2 else None)
|
||||
rows_by_namespace.setdefault(namespace, []).append(row)
|
||||
|
||||
sections = []
|
||||
sorted_namespaces = sorted(rows_by_namespace, key=sort_text)
|
||||
for namespace in sorted_namespaces:
|
||||
namespace_rows = rows_by_namespace[namespace]
|
||||
section_rows = [retention_row_without_namespace(row) for row in namespace_rows]
|
||||
sections.append(
|
||||
ReportSection(
|
||||
section_id=f"retention-{server_name.lower()}-{section_id_fragment(namespace)}",
|
||||
title=f"Retention des sauvegardes VM/CT {server_name} - {namespace}",
|
||||
headers=headers_without_namespace,
|
||||
rows=section_rows,
|
||||
empty_message=f"Aucune retention de sauvegarde VM/CT {server_name} collectee.",
|
||||
page_break_after=namespace == sorted_namespaces[-1],
|
||||
warning=retention_gc_warning(report_data, server_name, namespace_rows),
|
||||
)
|
||||
)
|
||||
return sections
|
||||
|
||||
|
||||
def retention_row_without_namespace(row: list[Any]) -> list[Any]:
|
||||
return row[:2] + row[3:]
|
||||
|
||||
|
||||
def retention_gc_warning(
|
||||
report_data: ReportData,
|
||||
server_name: str,
|
||||
rows: list[list[Any]],
|
||||
) -> str | None:
|
||||
gc_by_datastore = {
|
||||
status.datastore: status
|
||||
for status in report_data.pbs_gc_statuses
|
||||
if status.server_name == server_name
|
||||
}
|
||||
for row in rows:
|
||||
if len(row) < 9:
|
||||
continue
|
||||
datastore = retention_datastore_for_row(report_data, server_name, row)
|
||||
if datastore is None:
|
||||
continue
|
||||
gc_status = gc_by_datastore.get(datastore)
|
||||
if gc_status is not None and gc_status.status == "en_cours":
|
||||
return RETENTION_GC_RUNNING_WARNING
|
||||
return None
|
||||
|
||||
|
||||
def retention_datastore_for_row(
|
||||
report_data: ReportData,
|
||||
server_name: str,
|
||||
row: list[Any],
|
||||
) -> str | None:
|
||||
if len(row) > 3:
|
||||
datastore = display(row[3])
|
||||
if datastore != "non renseigne":
|
||||
return datastore
|
||||
|
||||
try:
|
||||
vmid = int(row[0])
|
||||
except (TypeError, ValueError):
|
||||
return None
|
||||
namespace = display(row[2] if len(row) > 2 else None)
|
||||
for summary in report_data.pbs_snapshot_summaries.values():
|
||||
if (
|
||||
summary.server_name == server_name
|
||||
and summary.vmid == vmid
|
||||
and display(summary.namespace) == namespace
|
||||
):
|
||||
return summary.datastore
|
||||
return None
|
||||
|
||||
|
||||
def build_issues_section(report_data: ReportData) -> ReportSection:
|
||||
return ReportSection(
|
||||
section_id="anomalies",
|
||||
title="Anomalies",
|
||||
headers=["Severite", "Composant", "Message", "Details"],
|
||||
rows=[
|
||||
[
|
||||
issue.severity,
|
||||
issue.component,
|
||||
issue.message,
|
||||
display(issue.details),
|
||||
]
|
||||
for issue in report_data.issues
|
||||
],
|
||||
empty_message="Aucune anomalie detectee.",
|
||||
)
|
||||
|
||||
|
||||
def load_template_css() -> str:
|
||||
return (
|
||||
resources.files(TEMPLATE_PACKAGE)
|
||||
.joinpath(TEMPLATE_DIR, CSS_TEMPLATE)
|
||||
.read_text(encoding="utf-8")
|
||||
)
|
||||
@@ -0,0 +1,19 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
|
||||
|
||||
SENSITIVE_PATTERNS = (
|
||||
re.compile(r"(PVEAPIToken=)[^\s]+", re.IGNORECASE),
|
||||
re.compile(r"(PBSAPIToken=)[^\s]+", re.IGNORECASE),
|
||||
re.compile(r"([A-Z0-9_]*TOKEN_SECRET=)[^\s,;]+", re.IGNORECASE),
|
||||
re.compile(r"(password=)[^\s,;]+", re.IGNORECASE),
|
||||
re.compile(r"(secret=)[^\s,;]+", re.IGNORECASE),
|
||||
)
|
||||
|
||||
|
||||
def sanitize_message(value: object) -> str:
|
||||
message = str(value).replace("\n", " ").strip()
|
||||
for pattern in SENSITIVE_PATTERNS:
|
||||
message = pattern.sub(r"\1***", message)
|
||||
return message
|
||||
@@ -0,0 +1,347 @@
|
||||
@page {
|
||||
size: A3 landscape;
|
||||
margin: 16mm 14mm 16mm 14mm;
|
||||
|
||||
@top-left {
|
||||
content: "Rapport des sauvegardes Proxmox VE";
|
||||
color: #1f4e79;
|
||||
font-size: 9pt;
|
||||
font-weight: 700;
|
||||
}
|
||||
|
||||
@top-right {
|
||||
content: "Page " counter(page) " / " counter(pages);
|
||||
color: #1f4e79;
|
||||
font-size: 8pt;
|
||||
}
|
||||
}
|
||||
|
||||
@page :first {
|
||||
@top-left {
|
||||
content: "";
|
||||
}
|
||||
|
||||
@top-right {
|
||||
content: "";
|
||||
}
|
||||
}
|
||||
|
||||
html {
|
||||
color: #111827;
|
||||
font-family: Arial, Helvetica, sans-serif;
|
||||
font-size: 8.5pt;
|
||||
line-height: 1.35;
|
||||
}
|
||||
|
||||
body {
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
/* ── Cover page ─────────────────────────────────────────────────── */
|
||||
|
||||
.cover {
|
||||
break-after: page;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
height: 265mm;
|
||||
}
|
||||
|
||||
.cover-logo-bar {
|
||||
border-bottom: 1px solid #dce6f1;
|
||||
padding: 5mm 10mm;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.cover-logo {
|
||||
height: 22pt;
|
||||
}
|
||||
|
||||
.cover-hero {
|
||||
padding: 22mm 12mm 18mm 12mm;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.cover-h1 {
|
||||
color: #1f4e79;
|
||||
font-size: 32pt;
|
||||
line-height: 1.15;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
.cover-spacer {
|
||||
flex: 1;
|
||||
}
|
||||
|
||||
.cover-meta-bar {
|
||||
background: #f4f7fa;
|
||||
border-top: 3px solid #1f4e79;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
.cover-meta-item {
|
||||
padding: 5mm 10mm;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.cover-meta-item:not(:last-child) {
|
||||
border-right: 1px solid #dce6f1;
|
||||
}
|
||||
|
||||
.cover-meta-label {
|
||||
color: #5b6770;
|
||||
display: block;
|
||||
font-size: 7pt;
|
||||
font-weight: 700;
|
||||
letter-spacing: 0.05em;
|
||||
text-transform: uppercase;
|
||||
}
|
||||
|
||||
.cover-meta-value {
|
||||
color: #1f4e79;
|
||||
display: block;
|
||||
font-size: 11pt;
|
||||
font-weight: 700;
|
||||
margin-top: 1mm;
|
||||
}
|
||||
|
||||
/* ── Table of contents ──────────────────────────────────────────── */
|
||||
|
||||
.toc {
|
||||
break-after: page;
|
||||
}
|
||||
|
||||
.toc h2 {
|
||||
border-bottom: 2px solid #1f4e79;
|
||||
color: #1f4e79;
|
||||
font-size: 15pt;
|
||||
line-height: 1.2;
|
||||
margin: 0 0 5mm 0;
|
||||
padding-bottom: 2.5mm;
|
||||
}
|
||||
|
||||
.toc ol {
|
||||
list-style: none;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
width: 200mm;
|
||||
}
|
||||
|
||||
.toc li {
|
||||
border-bottom: 1px dotted #c9d4e1;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
.toc a {
|
||||
color: #374151;
|
||||
display: block;
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
.toc a::after {
|
||||
content: target-counter(attr(href url), page);
|
||||
color: #1f4e79;
|
||||
float: right;
|
||||
font-weight: 700;
|
||||
min-width: 8mm;
|
||||
text-align: right;
|
||||
}
|
||||
|
||||
.toc-l1 > a {
|
||||
color: #1f4e79;
|
||||
font-size: 9.5pt;
|
||||
font-weight: 700;
|
||||
padding: 2.5mm 0;
|
||||
}
|
||||
|
||||
.toc-l2 > a {
|
||||
font-size: 8.5pt;
|
||||
padding: 1.8mm 0 1.8mm 5mm;
|
||||
}
|
||||
|
||||
/* ── Section headings ───────────────────────────────────────────── */
|
||||
|
||||
h1 {
|
||||
font-size: 24pt;
|
||||
line-height: 1.1;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
.section-group-title {
|
||||
align-items: center;
|
||||
border-bottom: 2px solid #1f4e79;
|
||||
color: #1f4e79;
|
||||
display: flex;
|
||||
font-size: 18pt;
|
||||
line-height: 1.15;
|
||||
margin: 3mm 0 5mm 0;
|
||||
padding-bottom: 2mm;
|
||||
}
|
||||
|
||||
.report-section {
|
||||
margin-bottom: 8mm;
|
||||
}
|
||||
|
||||
.report-section h2 {
|
||||
align-items: center;
|
||||
background: #f4f7fa;
|
||||
border-left: 4px solid #1f4e79;
|
||||
color: #1f4e79;
|
||||
display: flex;
|
||||
font-size: 12pt;
|
||||
line-height: 1.2;
|
||||
margin: 0 0 4mm 0;
|
||||
padding: 2.5mm 4mm;
|
||||
}
|
||||
|
||||
.page-break-after {
|
||||
break-after: page;
|
||||
}
|
||||
|
||||
/* ── Section icons ──────────────────────────────────────────────── */
|
||||
|
||||
.section-icon {
|
||||
height: 12pt;
|
||||
margin-right: 5pt;
|
||||
vertical-align: middle;
|
||||
width: 12pt;
|
||||
}
|
||||
|
||||
.section-icon-lg {
|
||||
height: 16pt;
|
||||
margin-right: 6pt;
|
||||
width: 16pt;
|
||||
}
|
||||
|
||||
/* ── KPI cards (resume section) ─────────────────────────────────── */
|
||||
|
||||
.kpi-grid {
|
||||
display: grid;
|
||||
gap: 3mm;
|
||||
grid-template-columns: repeat(7, 1fr);
|
||||
margin: 0 auto 4mm auto;
|
||||
width: 96%;
|
||||
}
|
||||
|
||||
.kpi-card {
|
||||
background: #f4f7fa;
|
||||
border: 1px solid #dce6f1;
|
||||
border-left: 3px solid #1f4e79;
|
||||
padding: 2.5mm 3mm 2mm 3mm;
|
||||
}
|
||||
|
||||
.kpi-card.kpi-alert {
|
||||
background: #fff7ed;
|
||||
border-left-color: #c2410c;
|
||||
}
|
||||
|
||||
.kpi-label {
|
||||
color: #5b6770;
|
||||
font-size: 6.5pt;
|
||||
letter-spacing: 0.03em;
|
||||
text-transform: uppercase;
|
||||
}
|
||||
|
||||
.kpi-value {
|
||||
color: #1f4e79;
|
||||
font-size: 13pt;
|
||||
font-weight: 700;
|
||||
line-height: 1.15;
|
||||
margin-top: 0.5mm;
|
||||
}
|
||||
|
||||
.kpi-card.kpi-alert .kpi-value {
|
||||
color: #c2410c;
|
||||
}
|
||||
|
||||
/* ── Tables ─────────────────────────────────────────────────────── */
|
||||
|
||||
table {
|
||||
border-collapse: collapse;
|
||||
font-size: 7.2pt;
|
||||
margin-left: auto;
|
||||
margin-right: auto;
|
||||
table-layout: fixed;
|
||||
width: 96%;
|
||||
}
|
||||
|
||||
thead {
|
||||
display: table-header-group;
|
||||
}
|
||||
|
||||
tr {
|
||||
break-inside: avoid;
|
||||
}
|
||||
|
||||
th {
|
||||
background: #1f4e79;
|
||||
border: 1px solid #a8bfd4;
|
||||
color: #ffffff;
|
||||
font-size: 7.5pt;
|
||||
font-weight: 700;
|
||||
padding: 2.5mm 2.5mm;
|
||||
text-align: left;
|
||||
vertical-align: top;
|
||||
}
|
||||
|
||||
td {
|
||||
border: 1px solid #c9d4e1;
|
||||
padding: 2.2mm 2.5mm;
|
||||
vertical-align: top;
|
||||
word-wrap: break-word;
|
||||
}
|
||||
|
||||
tbody tr:nth-child(even) td {
|
||||
background: #f4f8fc;
|
||||
}
|
||||
|
||||
/* ── Cell status classes ─────────────────────────────────────────── */
|
||||
|
||||
td.status-active {
|
||||
color: #166534;
|
||||
font-weight: 700;
|
||||
}
|
||||
|
||||
td.status-inactive {
|
||||
color: #9a3412;
|
||||
font-weight: 700;
|
||||
}
|
||||
|
||||
td.status-success {
|
||||
color: #166534;
|
||||
}
|
||||
|
||||
td.status-error {
|
||||
color: #9a3412;
|
||||
font-weight: 700;
|
||||
}
|
||||
|
||||
td.status-indeterminate {
|
||||
color: #854d0e;
|
||||
}
|
||||
|
||||
td.muted {
|
||||
color: #9ca3af;
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
/* ── Alert / empty messages ─────────────────────────────────────── */
|
||||
|
||||
.empty {
|
||||
background: #f4f7fa;
|
||||
border-left: 4px solid #1f4e79;
|
||||
color: #374151;
|
||||
font-size: 8pt;
|
||||
margin: 0;
|
||||
padding: 3mm 4mm;
|
||||
}
|
||||
|
||||
.warning {
|
||||
background: #fff7ed;
|
||||
border-left: 4px solid #c2410c;
|
||||
color: #7c2d12;
|
||||
font-size: 8pt;
|
||||
font-weight: 700;
|
||||
margin: 0 0 5mm 0;
|
||||
padding: 3mm 4mm;
|
||||
}
|
||||
File diff suppressed because one or more lines are too long
Reference in New Issue
Block a user