update
This commit is contained in:
748
AzA march 2026 - Kopie (18)/admin_routes.py
Normal file
748
AzA march 2026 - Kopie (18)/admin_routes.py
Normal file
@@ -0,0 +1,748 @@
|
||||
# admin_routes.py – AZA Admin Control Panel v2 (internal JSON endpoints)
|
||||
#
|
||||
# All endpoints require X-Admin-Token header matching AZA_ADMIN_TOKEN env var.
|
||||
# This router is mounted with prefix="/admin" in backend_main.py.
|
||||
#
|
||||
# v1 endpoints: system_status, licenses_overview, backup_status, billing_overview
|
||||
# v2 endpoints: license_customer_map, revenue_overview, alerts, dashboard_summary
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import calendar
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import sqlite3
|
||||
import time
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from fastapi import APIRouter, Depends
|
||||
from fastapi.responses import JSONResponse
|
||||
|
||||
from aza_security import require_admin_token
|
||||
|
||||
router = APIRouter(tags=["admin"], dependencies=[Depends(require_admin_token)])
|
||||
|
||||
_BASE_DIR = Path(__file__).resolve().parent
|
||||
_START_TIME = time.time()
|
||||
|
||||
_BACKUP_PATHS = [
|
||||
Path("/host_backups"),
|
||||
Path("/root/aza-backups"),
|
||||
Path("/root/aza-backups/daily"),
|
||||
Path("/var/backups/aza"),
|
||||
]
|
||||
_BACKUP_LOG_PATHS = [
|
||||
Path("/host_backups/backup.log"),
|
||||
Path("/root/aza-backups/backup.log"),
|
||||
Path("/var/log/aza-backup.log"),
|
||||
]
|
||||
|
||||
_LOOKUP_KEY_PRICES_CHF: Dict[str, int] = {
|
||||
"aza_basic_monthly": 59_00,
|
||||
"aza_basic_yearly": 590_00,
|
||||
"aza_team_monthly": 89_00,
|
||||
"aza_team_yearly": 890_00,
|
||||
}
|
||||
|
||||
|
||||
def _stripe_db_path() -> Path:
|
||||
return Path(os.environ.get(
|
||||
"STRIPE_DB_PATH",
|
||||
str(_BASE_DIR / "data" / "stripe_webhook.sqlite"),
|
||||
))
|
||||
|
||||
|
||||
def _events_log_path() -> Path:
|
||||
return Path(os.environ.get(
|
||||
"STRIPE_EVENTS_LOG",
|
||||
str(_BASE_DIR / "data" / "stripe_events.log.jsonl"),
|
||||
))
|
||||
|
||||
|
||||
def _disk_usage() -> Dict[str, Any]:
|
||||
try:
|
||||
usage = shutil.disk_usage("/")
|
||||
total_gb = round(usage.total / (1024 ** 3), 2)
|
||||
used_gb = round(usage.used / (1024 ** 3), 2)
|
||||
free_gb = round(usage.free / (1024 ** 3), 2)
|
||||
used_pct = round((usage.used / usage.total) * 100, 1) if usage.total else 0
|
||||
return {
|
||||
"total_gb": total_gb,
|
||||
"used_gb": used_gb,
|
||||
"free_gb": free_gb,
|
||||
"used_percent": used_pct,
|
||||
}
|
||||
except Exception as e:
|
||||
return {"error": str(e)}
|
||||
|
||||
|
||||
def _safe_db_connect(db_path: Path):
|
||||
if not db_path.exists():
|
||||
return None
|
||||
return sqlite3.connect(str(db_path))
|
||||
|
||||
|
||||
def _stripe_env_ok() -> bool:
|
||||
return (
|
||||
bool(os.environ.get("STRIPE_SECRET_KEY", "").strip())
|
||||
and bool(os.environ.get("STRIPE_WEBHOOK_SECRET", "").strip())
|
||||
)
|
||||
|
||||
|
||||
def _newest_backup_info() -> Dict[str, Any]:
|
||||
"""Scan all backup paths and return info about the most recent backup."""
|
||||
best: Dict[str, Any] = {"found": False}
|
||||
best_mtime = 0.0
|
||||
for bp in _BACKUP_PATHS:
|
||||
if not bp.exists() or not bp.is_dir():
|
||||
continue
|
||||
try:
|
||||
for entry in bp.iterdir():
|
||||
if not entry.is_dir():
|
||||
continue
|
||||
mt = entry.stat().st_mtime
|
||||
if mt > best_mtime:
|
||||
best_mtime = mt
|
||||
age_h = round((time.time() - mt) / 3600, 1)
|
||||
best = {
|
||||
"found": True,
|
||||
"path": str(bp),
|
||||
"name": entry.name,
|
||||
"time_utc": datetime.fromtimestamp(mt, tz=timezone.utc).isoformat(),
|
||||
"age_hours": age_h,
|
||||
"age_days": round(age_h / 24, 1),
|
||||
}
|
||||
except Exception:
|
||||
continue
|
||||
return best
|
||||
|
||||
|
||||
def _license_counts() -> Dict[str, int]:
|
||||
"""Return {status: count} from licenses table."""
|
||||
db_path = _stripe_db_path()
|
||||
if not db_path.exists():
|
||||
return {}
|
||||
try:
|
||||
con = sqlite3.connect(str(db_path))
|
||||
rows = con.execute(
|
||||
"SELECT status, COUNT(*) FROM licenses GROUP BY status"
|
||||
).fetchall()
|
||||
con.close()
|
||||
return {r[0]: r[1] for r in rows}
|
||||
except Exception:
|
||||
return {}
|
||||
|
||||
|
||||
# ═══════════════════════════════════════════════════════════════════════════════
|
||||
# v1 ENDPOINTS (unchanged)
|
||||
# ═══════════════════════════════════════════════════════════════════════════════
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# 1. GET /admin/system_status
|
||||
# ---------------------------------------------------------------------------
|
||||
@router.get("/system_status")
|
||||
def system_status() -> Dict[str, Any]:
|
||||
now = datetime.now(timezone.utc)
|
||||
uptime_s = int(time.time() - _START_TIME)
|
||||
|
||||
stripe_health: Dict[str, Any] = {"ok": False, "detail": "not_checked"}
|
||||
stripe_key_set = bool(os.environ.get("STRIPE_SECRET_KEY", "").strip())
|
||||
stripe_webhook_set = bool(os.environ.get("STRIPE_WEBHOOK_SECRET", "").strip())
|
||||
if stripe_key_set and stripe_webhook_set:
|
||||
stripe_health = {"ok": True, "detail": "env_configured"}
|
||||
elif not stripe_key_set:
|
||||
stripe_health = {"ok": False, "detail": "STRIPE_SECRET_KEY missing"}
|
||||
elif not stripe_webhook_set:
|
||||
stripe_health = {"ok": False, "detail": "STRIPE_WEBHOOK_SECRET missing"}
|
||||
|
||||
db_path = _stripe_db_path()
|
||||
db_exists = db_path.exists()
|
||||
db_size_kb = round(db_path.stat().st_size / 1024, 1) if db_exists else None
|
||||
|
||||
return {
|
||||
"status": "ok",
|
||||
"timestamp_utc": now.isoformat(),
|
||||
"uptime_seconds": uptime_s,
|
||||
"disk": _disk_usage(),
|
||||
"stripe": stripe_health,
|
||||
"database": {
|
||||
"path": str(db_path),
|
||||
"exists": db_exists,
|
||||
"size_kb": db_size_kb,
|
||||
},
|
||||
"python_pid": os.getpid(),
|
||||
}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# 2. GET /admin/licenses_overview
|
||||
# ---------------------------------------------------------------------------
|
||||
@router.get("/licenses_overview")
|
||||
def licenses_overview(email: Optional[str] = None) -> Dict[str, Any]:
|
||||
db_path = _stripe_db_path()
|
||||
if not db_path.exists():
|
||||
return {
|
||||
"status": "no_database",
|
||||
"db_path": str(db_path),
|
||||
"counts_by_status": {},
|
||||
"total": 0,
|
||||
"recent": [],
|
||||
}
|
||||
|
||||
try:
|
||||
con = sqlite3.connect(str(db_path))
|
||||
con.row_factory = sqlite3.Row
|
||||
|
||||
rows = con.execute(
|
||||
"SELECT status, COUNT(*) as cnt FROM licenses GROUP BY status"
|
||||
).fetchall()
|
||||
counts = {r["status"]: r["cnt"] for r in rows}
|
||||
total = sum(counts.values())
|
||||
|
||||
if email:
|
||||
email_clean = email.strip().lower()
|
||||
recent_rows = con.execute(
|
||||
"""SELECT * FROM licenses
|
||||
WHERE lower(customer_email) = ?
|
||||
ORDER BY updated_at DESC LIMIT 20""",
|
||||
(email_clean,),
|
||||
).fetchall()
|
||||
else:
|
||||
recent_rows = con.execute(
|
||||
"SELECT * FROM licenses ORDER BY updated_at DESC LIMIT 20"
|
||||
).fetchall()
|
||||
|
||||
recent = [dict(r) for r in recent_rows]
|
||||
con.close()
|
||||
|
||||
return {
|
||||
"status": "ok",
|
||||
"counts_by_status": counts,
|
||||
"total": total,
|
||||
"recent": recent,
|
||||
"filter_email": email or None,
|
||||
}
|
||||
except Exception as e:
|
||||
return {"status": "error", "error": str(e)}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# 3. GET /admin/backup_status
|
||||
# ---------------------------------------------------------------------------
|
||||
@router.get("/backup_status")
|
||||
def backup_status() -> Dict[str, Any]:
|
||||
result: Dict[str, Any] = {
|
||||
"disk": _disk_usage(),
|
||||
"backup_locations": [],
|
||||
"backup_log": None,
|
||||
}
|
||||
|
||||
for bp in _BACKUP_PATHS:
|
||||
entry: Dict[str, Any] = {"path": str(bp), "exists": bp.exists()}
|
||||
if bp.exists() and bp.is_dir():
|
||||
try:
|
||||
items = sorted(bp.iterdir(), key=lambda p: p.stat().st_mtime, reverse=True)
|
||||
dirs = [d for d in items if d.is_dir()]
|
||||
all_files = list(bp.rglob("*"))
|
||||
total_size = sum(f.stat().st_size for f in all_files if f.is_file())
|
||||
|
||||
entry["folder_count"] = len(dirs)
|
||||
entry["file_count"] = len([f for f in all_files if f.is_file()])
|
||||
entry["total_size_mb"] = round(total_size / (1024 ** 2), 2)
|
||||
if dirs:
|
||||
newest = dirs[0]
|
||||
mtime = newest.stat().st_mtime
|
||||
entry["newest_backup"] = newest.name
|
||||
entry["newest_backup_time_utc"] = datetime.fromtimestamp(
|
||||
mtime, tz=timezone.utc
|
||||
).isoformat()
|
||||
entry["newest_backup_age_hours"] = round(
|
||||
(time.time() - mtime) / 3600, 1
|
||||
)
|
||||
except Exception as e:
|
||||
entry["error"] = str(e)
|
||||
result["backup_locations"].append(entry)
|
||||
|
||||
for lp in _BACKUP_LOG_PATHS:
|
||||
if lp.exists() and lp.is_file():
|
||||
try:
|
||||
text = lp.read_text(encoding="utf-8", errors="replace")
|
||||
lines = text.strip().splitlines()
|
||||
tail = lines[-20:] if len(lines) > 20 else lines
|
||||
result["backup_log"] = {
|
||||
"path": str(lp),
|
||||
"total_lines": len(lines),
|
||||
"last_lines": tail,
|
||||
}
|
||||
except Exception as e:
|
||||
result["backup_log"] = {"path": str(lp), "error": str(e)}
|
||||
break
|
||||
|
||||
if result["backup_log"] is None:
|
||||
result["backup_log"] = {"status": "not_found", "searched": [str(p) for p in _BACKUP_LOG_PATHS]}
|
||||
|
||||
return result
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# 4. GET /admin/billing_overview
|
||||
# ---------------------------------------------------------------------------
|
||||
@router.get("/billing_overview")
|
||||
def billing_overview() -> Dict[str, Any]:
|
||||
stripe_key_set = bool(os.environ.get("STRIPE_SECRET_KEY", "").strip())
|
||||
stripe_webhook_set = bool(os.environ.get("STRIPE_WEBHOOK_SECRET", "").strip())
|
||||
stripe_ok = stripe_key_set and stripe_webhook_set
|
||||
|
||||
db_path = _stripe_db_path()
|
||||
events_path = _events_log_path()
|
||||
|
||||
db_info: Dict[str, Any] = {"exists": db_path.exists()}
|
||||
licenses_summary: List[Dict[str, Any]] = []
|
||||
events_info: Dict[str, Any] = {"exists": events_path.exists()}
|
||||
|
||||
if db_path.exists():
|
||||
try:
|
||||
con = sqlite3.connect(str(db_path))
|
||||
con.row_factory = sqlite3.Row
|
||||
db_info["size_kb"] = round(db_path.stat().st_size / 1024, 1)
|
||||
|
||||
rows = con.execute(
|
||||
"""SELECT subscription_id, customer_email, status,
|
||||
lookup_key, current_period_end, updated_at
|
||||
FROM licenses ORDER BY updated_at DESC LIMIT 20"""
|
||||
).fetchall()
|
||||
licenses_summary = [dict(r) for r in rows]
|
||||
|
||||
processed_count = con.execute(
|
||||
"SELECT COUNT(*) FROM processed_events"
|
||||
).fetchone()[0]
|
||||
db_info["processed_events_count"] = processed_count
|
||||
|
||||
con.close()
|
||||
except Exception as e:
|
||||
db_info["error"] = str(e)
|
||||
|
||||
if events_path.exists():
|
||||
try:
|
||||
size_kb = round(events_path.stat().st_size / 1024, 1)
|
||||
events_info["size_kb"] = size_kb
|
||||
with events_path.open("r", encoding="utf-8", errors="replace") as f:
|
||||
lines = f.readlines()
|
||||
events_info["total_lines"] = len(lines)
|
||||
tail_lines = lines[-10:] if len(lines) > 10 else lines
|
||||
recent_events: List[Dict[str, Any]] = []
|
||||
for line in tail_lines:
|
||||
line = line.strip()
|
||||
if not line:
|
||||
continue
|
||||
try:
|
||||
recent_events.append(json.loads(line))
|
||||
except Exception:
|
||||
recent_events.append({"raw": line[:200]})
|
||||
events_info["recent"] = recent_events
|
||||
except Exception as e:
|
||||
events_info["error"] = str(e)
|
||||
|
||||
return {
|
||||
"stripe_health": {
|
||||
"ok": stripe_ok,
|
||||
"secret_key_set": stripe_key_set,
|
||||
"webhook_secret_set": stripe_webhook_set,
|
||||
},
|
||||
"database": db_info,
|
||||
"licenses_recent": licenses_summary,
|
||||
"events_log": events_info,
|
||||
}
|
||||
|
||||
|
||||
# ═══════════════════════════════════════════════════════════════════════════════
|
||||
# v2 ENDPOINTS
|
||||
# ═══════════════════════════════════════════════════════════════════════════════
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# 5. GET /admin/license_customer_map
|
||||
# ---------------------------------------------------------------------------
|
||||
@router.get("/license_customer_map")
|
||||
def license_customer_map(
|
||||
email: Optional[str] = None,
|
||||
status: Optional[str] = None,
|
||||
) -> Dict[str, Any]:
|
||||
db_path = _stripe_db_path()
|
||||
if not db_path.exists():
|
||||
return {"status": "no_database", "total": 0, "licenses": []}
|
||||
|
||||
try:
|
||||
con = sqlite3.connect(str(db_path))
|
||||
con.row_factory = sqlite3.Row
|
||||
|
||||
query = """
|
||||
SELECT subscription_id, customer_id, customer_email, status,
|
||||
lookup_key, allowed_users, devices_per_user,
|
||||
current_period_end, client_reference_id, updated_at
|
||||
FROM licenses
|
||||
"""
|
||||
conditions: List[str] = []
|
||||
params: List[Any] = []
|
||||
|
||||
if email:
|
||||
conditions.append("lower(customer_email) = ?")
|
||||
params.append(email.strip().lower())
|
||||
if status:
|
||||
conditions.append("status = ?")
|
||||
params.append(status.strip())
|
||||
|
||||
if conditions:
|
||||
query += " WHERE " + " AND ".join(conditions)
|
||||
query += " ORDER BY updated_at DESC LIMIT 200"
|
||||
|
||||
rows = con.execute(query, params).fetchall()
|
||||
licenses = []
|
||||
now_ts = int(time.time())
|
||||
for r in rows:
|
||||
row_dict = dict(r)
|
||||
cpe = row_dict.get("current_period_end")
|
||||
if cpe and isinstance(cpe, int):
|
||||
row_dict["period_end_human"] = datetime.fromtimestamp(
|
||||
cpe, tz=timezone.utc
|
||||
).strftime("%Y-%m-%d %H:%M UTC")
|
||||
row_dict["period_expired"] = cpe < now_ts
|
||||
ua = row_dict.get("updated_at")
|
||||
if ua and isinstance(ua, int):
|
||||
row_dict["updated_at_human"] = datetime.fromtimestamp(
|
||||
ua, tz=timezone.utc
|
||||
).strftime("%Y-%m-%d %H:%M UTC")
|
||||
licenses.append(row_dict)
|
||||
|
||||
counts = _license_counts()
|
||||
con.close()
|
||||
|
||||
return {
|
||||
"status": "ok",
|
||||
"total": len(licenses),
|
||||
"counts_by_status": counts,
|
||||
"filter_email": email or None,
|
||||
"filter_status": status or None,
|
||||
"licenses": licenses,
|
||||
}
|
||||
except Exception as e:
|
||||
return {"status": "error", "error": str(e)}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# 6. GET /admin/revenue_overview
|
||||
# ---------------------------------------------------------------------------
|
||||
@router.get("/revenue_overview")
|
||||
def revenue_overview() -> Dict[str, Any]:
|
||||
now = datetime.now(timezone.utc)
|
||||
month_start = now.replace(day=1, hour=0, minute=0, second=0, microsecond=0)
|
||||
month_start_ts = int(month_start.timestamp())
|
||||
|
||||
result: Dict[str, Any] = {
|
||||
"month": now.strftime("%Y-%m"),
|
||||
"currency": "chf",
|
||||
"data_source": "local_db",
|
||||
}
|
||||
|
||||
counts = _license_counts()
|
||||
result["active_subscriptions"] = counts.get("active", 0)
|
||||
result["canceled_subscriptions"] = counts.get("canceled", 0)
|
||||
result["total_subscriptions"] = sum(counts.values())
|
||||
result["counts_by_status"] = counts
|
||||
|
||||
db_path = _stripe_db_path()
|
||||
counts_by_key: Dict[str, int] = {}
|
||||
estimated_mrr_cents = 0
|
||||
if db_path.exists():
|
||||
try:
|
||||
con = sqlite3.connect(str(db_path))
|
||||
rows = con.execute(
|
||||
"SELECT lookup_key, COUNT(*) FROM licenses WHERE status='active' GROUP BY lookup_key"
|
||||
).fetchall()
|
||||
for lk, cnt in rows:
|
||||
lk_str = lk or "unknown"
|
||||
counts_by_key[lk_str] = cnt
|
||||
price = _LOOKUP_KEY_PRICES_CHF.get(lk_str, 0)
|
||||
if "yearly" in lk_str:
|
||||
estimated_mrr_cents += int(price / 12) * cnt
|
||||
else:
|
||||
estimated_mrr_cents += price * cnt
|
||||
con.close()
|
||||
except Exception:
|
||||
pass
|
||||
result["counts_by_lookup_key"] = counts_by_key
|
||||
result["estimated_mrr_chf"] = round(estimated_mrr_cents / 100, 2)
|
||||
|
||||
stripe_data: Dict[str, Any] = {"available": False}
|
||||
stripe_key = os.environ.get("STRIPE_SECRET_KEY", "").strip()
|
||||
if stripe_key:
|
||||
try:
|
||||
import stripe as _stripe
|
||||
_stripe.api_key = stripe_key
|
||||
|
||||
charges = _stripe.Charge.list(
|
||||
created={"gte": month_start_ts},
|
||||
limit=100,
|
||||
)
|
||||
gross_cents = 0
|
||||
charge_count = 0
|
||||
recent_charges: List[Dict[str, Any]] = []
|
||||
for ch in charges.auto_paging_iter():
|
||||
if ch.status == "succeeded" and ch.paid:
|
||||
gross_cents += ch.amount
|
||||
charge_count += 1
|
||||
recent_charges.append({
|
||||
"amount_chf": round(ch.amount / 100, 2),
|
||||
"email": ch.billing_details.email if ch.billing_details else ch.receipt_email,
|
||||
"date_utc": datetime.fromtimestamp(ch.created, tz=timezone.utc).strftime("%Y-%m-%d %H:%M UTC"),
|
||||
"description": ch.description or "",
|
||||
"charge_id": ch.id,
|
||||
})
|
||||
|
||||
refunds = _stripe.Refund.list(
|
||||
created={"gte": month_start_ts},
|
||||
limit=100,
|
||||
)
|
||||
refund_cents = 0
|
||||
refund_count = 0
|
||||
recent_refunds: List[Dict[str, Any]] = []
|
||||
for rf in refunds.auto_paging_iter():
|
||||
if rf.status == "succeeded":
|
||||
refund_cents += rf.amount
|
||||
refund_count += 1
|
||||
recent_refunds.append({
|
||||
"amount_chf": round(rf.amount / 100, 2),
|
||||
"date_utc": datetime.fromtimestamp(rf.created, tz=timezone.utc).strftime("%Y-%m-%d %H:%M UTC"),
|
||||
"refund_id": rf.id,
|
||||
})
|
||||
|
||||
stripe_data = {
|
||||
"available": True,
|
||||
"current_month_gross_chf": round(gross_cents / 100, 2),
|
||||
"current_month_charges": charge_count,
|
||||
"current_month_refunds_chf": round(refund_cents / 100, 2),
|
||||
"current_month_refund_count": refund_count,
|
||||
"current_month_net_chf": round((gross_cents - refund_cents) / 100, 2),
|
||||
"recent_charges": recent_charges,
|
||||
"recent_refunds": recent_refunds,
|
||||
}
|
||||
result["data_source"] = "stripe_api+local_db"
|
||||
except Exception as e:
|
||||
stripe_data = {"available": False, "error": str(e)}
|
||||
|
||||
result["stripe_live"] = stripe_data
|
||||
|
||||
events_path = _events_log_path()
|
||||
event_summary: Dict[str, int] = {}
|
||||
if events_path.exists():
|
||||
try:
|
||||
with events_path.open("r", encoding="utf-8", errors="replace") as f:
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
if not line:
|
||||
continue
|
||||
try:
|
||||
evt = json.loads(line)
|
||||
ts = evt.get("ts", 0)
|
||||
if ts >= month_start_ts:
|
||||
kind = evt.get("kind", "unknown")
|
||||
event_summary[kind] = event_summary.get(kind, 0) + 1
|
||||
except Exception:
|
||||
continue
|
||||
except Exception:
|
||||
pass
|
||||
result["current_month_events"] = event_summary
|
||||
|
||||
return result
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# 7. GET /admin/alerts
|
||||
# ---------------------------------------------------------------------------
|
||||
@router.get("/alerts")
|
||||
def alerts() -> Dict[str, Any]:
|
||||
alert_list: List[Dict[str, str]] = []
|
||||
|
||||
disk = _disk_usage()
|
||||
used_pct = disk.get("used_percent", 0)
|
||||
free_gb = disk.get("free_gb", 999)
|
||||
if isinstance(used_pct, (int, float)):
|
||||
if used_pct >= 95:
|
||||
alert_list.append({
|
||||
"id": "disk_critical",
|
||||
"severity": "critical",
|
||||
"message": f"Disk usage {used_pct}% – less than {free_gb} GB free",
|
||||
})
|
||||
elif used_pct >= 85:
|
||||
alert_list.append({
|
||||
"id": "disk_high",
|
||||
"severity": "warning",
|
||||
"message": f"Disk usage {used_pct}% – {free_gb} GB free",
|
||||
})
|
||||
|
||||
if not _stripe_env_ok():
|
||||
alert_list.append({
|
||||
"id": "stripe_not_configured",
|
||||
"severity": "critical",
|
||||
"message": "Stripe env vars (SECRET_KEY / WEBHOOK_SECRET) not set",
|
||||
})
|
||||
|
||||
db_path = _stripe_db_path()
|
||||
if not db_path.exists():
|
||||
alert_list.append({
|
||||
"id": "db_missing",
|
||||
"severity": "warning",
|
||||
"message": f"License database not found at {db_path}",
|
||||
})
|
||||
|
||||
counts = _license_counts()
|
||||
if not counts or counts.get("active", 0) == 0:
|
||||
alert_list.append({
|
||||
"id": "no_active_licenses",
|
||||
"severity": "info",
|
||||
"message": "No active licenses in database",
|
||||
})
|
||||
|
||||
backup = _newest_backup_info()
|
||||
if not backup["found"]:
|
||||
alert_list.append({
|
||||
"id": "backup_missing",
|
||||
"severity": "warning",
|
||||
"message": "No backup folders found in any known path",
|
||||
})
|
||||
else:
|
||||
age_h = backup.get("age_hours", 0)
|
||||
if age_h > 48:
|
||||
alert_list.append({
|
||||
"id": "backup_stale",
|
||||
"severity": "critical",
|
||||
"message": f"Latest backup is {backup.get('age_days', '?')} days old ({backup.get('name', '?')})",
|
||||
})
|
||||
elif age_h > 26:
|
||||
alert_list.append({
|
||||
"id": "backup_old",
|
||||
"severity": "warning",
|
||||
"message": f"Latest backup is {round(age_h, 0):.0f}h old ({backup.get('name', '?')})",
|
||||
})
|
||||
|
||||
severity_counts: Dict[str, int] = {"info": 0, "warning": 0, "critical": 0}
|
||||
for a in alert_list:
|
||||
sev = a.get("severity", "info")
|
||||
severity_counts[sev] = severity_counts.get(sev, 0) + 1
|
||||
|
||||
return {
|
||||
"total": len(alert_list),
|
||||
"counts_by_severity": severity_counts,
|
||||
"alerts": alert_list,
|
||||
}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# 8. GET /admin/dashboard_summary
|
||||
# ---------------------------------------------------------------------------
|
||||
@router.get("/dashboard_summary")
|
||||
def dashboard_summary() -> Dict[str, Any]:
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
disk = _disk_usage()
|
||||
counts = _license_counts()
|
||||
backup = _newest_backup_info()
|
||||
alert_data = alerts()
|
||||
|
||||
stripe_ok = _stripe_env_ok()
|
||||
|
||||
total_licenses = sum(counts.values())
|
||||
active = counts.get("active", 0)
|
||||
canceled = counts.get("canceled", 0)
|
||||
|
||||
rev: Dict[str, Any] = {
|
||||
"gross_chf": None,
|
||||
"refunds_chf": None,
|
||||
"net_chf": None,
|
||||
"data_source": "none",
|
||||
}
|
||||
stripe_key = os.environ.get("STRIPE_SECRET_KEY", "").strip()
|
||||
if stripe_key:
|
||||
try:
|
||||
import stripe as _stripe
|
||||
_stripe.api_key = stripe_key
|
||||
|
||||
month_start = now.replace(day=1, hour=0, minute=0, second=0, microsecond=0)
|
||||
month_start_ts = int(month_start.timestamp())
|
||||
|
||||
gross = 0
|
||||
for ch in _stripe.Charge.list(created={"gte": month_start_ts}, limit=100).auto_paging_iter():
|
||||
if ch.status == "succeeded" and ch.paid:
|
||||
gross += ch.amount
|
||||
refund_total = 0
|
||||
for rf in _stripe.Refund.list(created={"gte": month_start_ts}, limit=100).auto_paging_iter():
|
||||
if rf.status == "succeeded":
|
||||
refund_total += rf.amount
|
||||
|
||||
rev = {
|
||||
"gross_chf": round(gross / 100, 2),
|
||||
"refunds_chf": round(refund_total / 100, 2),
|
||||
"net_chf": round((gross - refund_total) / 100, 2),
|
||||
"data_source": "stripe_api",
|
||||
}
|
||||
except Exception:
|
||||
rev["data_source"] = "stripe_api_error"
|
||||
|
||||
return {
|
||||
"timestamp_utc": now.isoformat(),
|
||||
"system_ok": True,
|
||||
"stripe_ok": stripe_ok,
|
||||
"disk_free_gb": disk.get("free_gb"),
|
||||
"disk_used_percent": disk.get("used_percent"),
|
||||
"latest_backup": backup if backup["found"] else None,
|
||||
"licenses": {
|
||||
"total": total_licenses,
|
||||
"active": active,
|
||||
"canceled": canceled,
|
||||
"other": total_licenses - active - canceled,
|
||||
"counts_by_status": counts,
|
||||
},
|
||||
"current_month_revenue": rev,
|
||||
"alerts_total": alert_data["total"],
|
||||
"alerts_by_severity": alert_data["counts_by_severity"],
|
||||
}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# 9. GET /admin/devices?email=...
|
||||
# ---------------------------------------------------------------------------
|
||||
@router.get("/devices")
|
||||
def admin_devices(email: Optional[str] = None) -> Dict[str, Any]:
|
||||
"""Device overview for a customer email. If no email given, list all emails with devices."""
|
||||
from aza_device_enforcement import list_devices_for_email, DB_PATH as _DEV_DB
|
||||
|
||||
db_path = str(_BASE_DIR / "data" / "stripe_webhook.sqlite")
|
||||
try:
|
||||
from stripe_routes import DB_PATH as _SR_DB # type: ignore
|
||||
db_path = _SR_DB
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if email and email.strip():
|
||||
return list_devices_for_email(email.strip(), db_path=db_path)
|
||||
|
||||
try:
|
||||
con = sqlite3.connect(db_path)
|
||||
rows = con.execute(
|
||||
"""SELECT customer_email, COUNT(*) AS device_count,
|
||||
MAX(last_seen_at) AS last_active
|
||||
FROM device_bindings
|
||||
GROUP BY lower(customer_email)
|
||||
ORDER BY last_active DESC"""
|
||||
).fetchall()
|
||||
con.close()
|
||||
return {
|
||||
"customers": [
|
||||
{"email": r[0], "device_count": r[1], "last_active": r[2]}
|
||||
for r in rows
|
||||
]
|
||||
}
|
||||
except Exception as exc:
|
||||
return {"error": str(exc), "customers": []}
|
||||
Reference in New Issue
Block a user