update
1
AzA march 2026 - Kopie (18)/%~dp0client_run_log.txt
Normal file
@@ -0,0 +1 @@
|
||||
python: can't open file 'C:\\Users\\surov\\Documents\\AZA\\backup 24.2.26\\%~dp0basis14.py': [Errno 2] No such file or directory
|
||||
12
AzA march 2026 - Kopie (18)/.env.example
Normal file
@@ -0,0 +1,12 @@
|
||||
# AZA Desktop – Environment-Vorlage
|
||||
# Kopieren nach .env und echte Werte eintragen:
|
||||
# Copy-Item .env.example .env
|
||||
#
|
||||
# KEINE echten Secrets committen oder exportieren!
|
||||
|
||||
STRIPE_SECRET_KEY=sk_test_REPLACE_ME
|
||||
STRIPE_WEBHOOK_SECRET=whsec_REPLACE_ME
|
||||
STRIPE_SUCCESS_URL=http://127.0.0.1:8000/billing/success?session_id={CHECKOUT_SESSION_ID}
|
||||
STRIPE_CANCEL_URL=http://127.0.0.1:8000/billing/cancel
|
||||
MEDWORK_API_TOKEN=REPLACE_WITH_STRONG_TOKEN
|
||||
MEDWORK_API_TOKENS=REPLACE_WITH_STRONG_TOKEN
|
||||
1
AzA march 2026 - Kopie (18)/AZA)
Normal file
@@ -0,0 +1 @@
|
||||
AZA ONE-CLICK START (Backend -
|
||||
38
AzA march 2026 - Kopie (18)/AZA-basis14.spec
Normal file
@@ -0,0 +1,38 @@
|
||||
# -*- mode: python ; coding: utf-8 -*-
|
||||
|
||||
|
||||
a = Analysis(
|
||||
['C:\\Users\\surov\\Documents\\AZA\\backup 24.2.26\\basis14.py'],
|
||||
pathex=[],
|
||||
binaries=[],
|
||||
datas=[],
|
||||
hiddenimports=[],
|
||||
hookspath=[],
|
||||
hooksconfig={},
|
||||
runtime_hooks=[],
|
||||
excludes=[],
|
||||
noarchive=False,
|
||||
optimize=0,
|
||||
)
|
||||
pyz = PYZ(a.pure)
|
||||
|
||||
exe = EXE(
|
||||
pyz,
|
||||
a.scripts,
|
||||
a.binaries,
|
||||
a.datas,
|
||||
[],
|
||||
name='AZA-basis14',
|
||||
debug=False,
|
||||
bootloader_ignore_signals=False,
|
||||
strip=False,
|
||||
upx=True,
|
||||
upx_exclude=[],
|
||||
runtime_tmpdir=None,
|
||||
console=True,
|
||||
disable_windowed_traceback=False,
|
||||
argv_emulation=False,
|
||||
target_arch=None,
|
||||
codesign_identity=None,
|
||||
entitlements_file=None,
|
||||
)
|
||||
430
AzA march 2026 - Kopie (18)/AZA_DETAILED_HANDOVER.md
Normal file
@@ -0,0 +1,430 @@
|
||||
# AZA – Detaillierte Projektuebergabe / Handover
|
||||
|
||||
**Stand: 2026-04-06**
|
||||
**Zweck:** Diese Datei ist die verbindliche Referenz fuer jeden neuen Chat. Zuerst lesen, dann arbeiten.
|
||||
|
||||
---
|
||||
|
||||
## 1. Projektziel / Aktueller Fokus
|
||||
|
||||
**AZA** (AZA Medical AI Assistant / AZA Desktop) ist eine medizinische KI-Desktop-Anwendung fuer Windows. Sie unterstuetzt Aerzte bei Diktat, Textverarbeitung, medizinischer Recherche und Dokumentation.
|
||||
|
||||
**Architektur (Variante B – verbindlich seit 2026-03-25):**
|
||||
- Desktop-App (Python/Tkinter) kommuniziert mit eigenem AZA-Backend auf Hetzner
|
||||
- Backend leitet KI-Anfragen serverseitig an OpenAI weiter
|
||||
- Kein OpenAI-Key beim Kunden noetig
|
||||
- Kein OpenAI-Key in der Desktop-App
|
||||
|
||||
**Aktueller Schwerpunkt:**
|
||||
Produktiver Kundenfluss mit Lizenzschluessel. Die klare Zielsequenz ist:
|
||||
|
||||
```
|
||||
Kauf → Lizenzschluessel per E-Mail → Download → Installation → Aktivierung → Nutzung
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 2. Aktueller stabiler technischer Stand
|
||||
|
||||
### 2.1 Lizenzschluessel-Flow (PRODUKTIV, Stand 2026-04-06)
|
||||
|
||||
| Aspekt | Status |
|
||||
|---|---|
|
||||
| `/license/activate` | Funktioniert produktiv auf Hetzner |
|
||||
| `/license/status` | Funktioniert produktiv auf Hetzner |
|
||||
| Lizenzschluessel-Erzeugung | Automatisch beim Kauf (Format: `AZA-XXXX-XXXX-XXXX-XXXX`) |
|
||||
| Lizenzschluessel in DB | Gespeichert in `licenses`-Tabelle, Spalte `license_key` |
|
||||
| Desktop-Aktivierung | Lizenzschluessel kann in der Desktop-App eingegeben und aktiviert werden |
|
||||
| `/license/status?license_key=...` | Liefert `valid: true` fuer aktive Lizenzen |
|
||||
| Device-Enforcement | Aktiv und funktioniert korrekt |
|
||||
| Success-Seite `/billing/success` | Zeigt dem Kunden den Lizenzschluessel nach Kauf an |
|
||||
| Produktiver Test | Erfolgreich mit aktivem Lizenzschluessel |
|
||||
|
||||
**Wichtig:** Device-Bindings waren zeitweise ein Blocker. Fuer den Testdatensatz (`admin@aza-medwork.ch`) wurden bestehende Device-Bindings aus der DB geloescht, damit der erneute Aktivierungstest funktionierte. Bei neuen Kunden tritt dieses Problem nicht auf.
|
||||
|
||||
### 2.2 Mailversand (PRODUKTIV ueber Resend, Stand 2026-04-06)
|
||||
|
||||
| Aspekt | Status |
|
||||
|---|---|
|
||||
| Produktiver Versandkanal | **Resend HTTP API** |
|
||||
| Resend-Domain | `mail.aza-medwork.ch` (DNS bei Hostpoint verifiziert) |
|
||||
| Absender / MAIL_FROM | `AZA MedWork <noreply@mail.aza-medwork.ch>` |
|
||||
| Test-Endpunkt | `POST /stripe/test_license_email?email=...` (Admin-geschuetzt) |
|
||||
| Letzter erfolgreicher Test | `{"sent": true, "to": "admin@aza-medwork.ch"}` |
|
||||
| E-Mail Zustellung | Produktiv bestaetigt – Mail kommt an |
|
||||
|
||||
### 2.3 Stripe / Billing (PRODUKTIV)
|
||||
|
||||
| Aspekt | Status |
|
||||
|---|---|
|
||||
| Stripe-Modus | Live (sk_live_) |
|
||||
| Webhook-Endpunkt | `https://api.aza-medwork.ch/stripe/webhook` |
|
||||
| Echter Live-Kauf | CHF 59 Basic Monthly erfolgreich durchgefuehrt |
|
||||
| Lizenz-Lifecycle | Kauf → active → Storno/Refund → canceled → Desktop Testmodus (bewiesen) |
|
||||
| Webhook-Events | `checkout.session.completed`, `customer.subscription.updated`, `customer.subscription.deleted` |
|
||||
|
||||
### 2.4 Desktop-App
|
||||
|
||||
| Aspekt | Status |
|
||||
|---|---|
|
||||
| Lizenzpruefung | Ueber Backend (`/license/status`) mit Lizenzschluessel |
|
||||
| Vollmodus | Wenn Backend `valid: true` liefert |
|
||||
| Testmodus | Wenn keine gueltige Lizenz oder Backend nicht erreichbar |
|
||||
| Lokales Aktivierungs-Gate | Bei Remote-Backend uebersprungen (Root Cause 14 behoben) |
|
||||
| Update-Checker | Prueft `https://api.aza-medwork.ch/download/version.json` beim Start |
|
||||
| Aktuelle Version | `APP_VERSION = "1.0.0"`, `APP_CHANNEL = "stable"` |
|
||||
|
||||
### 2.5 Admin Control Panel (PRODUKTIV)
|
||||
|
||||
8 interne Admin-Endpunkte, geschuetzt via `X-Admin-Token` / `AZA_ADMIN_TOKEN`:
|
||||
|
||||
**v1:**
|
||||
- `GET /admin/system_status` – App-Health, Uptime, Disk, Stripe-Config, DB-Info
|
||||
- `GET /admin/licenses_overview` – Lizenzen nach Status, letzte 20, `?email=` Filter
|
||||
- `GET /admin/backup_status` – Backup-Pfade, Groesse, neustes Backup, Log-Tail
|
||||
- `GET /admin/billing_overview` – Stripe-Health, Lizenz-Summary, Event-Log
|
||||
|
||||
**v2:**
|
||||
- `GET /admin/license_customer_map` – Detaillierte Lizenznehmer-Uebersicht, `?email=` und `?status=` Filter
|
||||
- `GET /admin/revenue_overview` – MRR, Stripe-Live-Daten (gross/refunds/net), recent_charges, recent_refunds
|
||||
- `GET /admin/alerts` – Strukturierte Warnliste (info/warning/critical)
|
||||
- `GET /admin/dashboard_summary` – Sammel-Endpunkt fuer alle Kennzahlen
|
||||
|
||||
### 2.6 Backup / Storage
|
||||
|
||||
- Taegliches Backup-Skript: `/root/aza-backups/backup_aza.sh` (Cronjob)
|
||||
- Backup-Pfad: `/root/aza-backups/daily/`
|
||||
- In Container gemountet als `/host_backups` (read-only)
|
||||
- Ca. 137 GB frei, ca. 4-5% belegt – kein Speicherdruck
|
||||
|
||||
---
|
||||
|
||||
## 3. Mailversand-Historie / Root Causes / Finaler Weg
|
||||
|
||||
### Chronologie
|
||||
|
||||
1. **Erster Versuch: Hostpoint-SMTP**
|
||||
- Hostpoint-Mailbox `noreply@aza-medwork.ch` wurde angelegt
|
||||
- SMTP-Server: `asmtp.mail.hostpoint.ch`
|
||||
- Port 465 (SSL) und 587 (STARTTLS) getestet
|
||||
- SMTP-Daten wurden mehrfach geprueft und waren korrekt
|
||||
|
||||
2. **Beobachtete Fehler (SMTP von Hetzner/Container):**
|
||||
- Erste Tests: Auth-Fehler (falscher Host `mail.hostpoint.ch` statt `asmtp.mail.hostpoint.ch`)
|
||||
- Nach Host-Korrektur: `OSError: [Errno 101] Network is unreachable`
|
||||
- Port 465 → Timeout / Network unreachable
|
||||
- Port 587 → Timeout / Network unreachable
|
||||
- Ursache: Hetzner-Container kann Hostpoint-SMTP-Server nicht erreichen (Netzwerk-/Firewallsperre)
|
||||
|
||||
3. **Schlussfolgerung:**
|
||||
- Hostpoint-SMTP ist von Hetzner aus nicht nutzbar
|
||||
- Das ist ein Infrastruktur-/Netzwerkproblem, kein Code-Problem
|
||||
- Hostpoint-SMTP ist **nicht** der produktive Versandweg
|
||||
|
||||
4. **Loesung: Umstellung auf Resend HTTP API**
|
||||
- Resend-Account erstellt
|
||||
- Domain `mail.aza-medwork.ch` bei Resend registriert und via DNS bei Hostpoint verifiziert
|
||||
- `MAIL_FROM` gesetzt auf `AZA MedWork <noreply@mail.aza-medwork.ch>`
|
||||
- Code in `stripe_routes.py` umgebaut: `_send_via_resend()` als primaerer Kanal
|
||||
- **Wichtiger Fix:** Resend-HTTP-API erfordert `User-Agent` Header (ohne → Error 1010/403)
|
||||
- Fix angewandt: `"User-Agent": "AZA-MedWork/1.0"` im Request
|
||||
|
||||
5. **Finaler erfolgreicher Test:**
|
||||
```
|
||||
POST /stripe/test_license_email?email=admin@aza-medwork.ch
|
||||
→ {"sent": true, "to": "admin@aza-medwork.ch"}
|
||||
```
|
||||
E-Mail kam produktiv an.
|
||||
|
||||
### Aktueller Zustand der Mailfunktion in `stripe_routes.py`
|
||||
|
||||
```
|
||||
send_license_email(to_email, license_key)
|
||||
├── RESEND_API_KEY gesetzt? → _send_via_resend() [PRODUKTIVER WEG]
|
||||
└── sonst → _send_via_smtp() [INAKTIVER FALLBACK]
|
||||
```
|
||||
|
||||
- SMTP-Code ist noch vorhanden als Fallback
|
||||
- SMTP-Variablen in `.env` sind Altlast, nicht produktiv aktiv
|
||||
- Wenn `RESEND_API_KEY` gesetzt ist (und das ist es), wird immer Resend benutzt
|
||||
|
||||
---
|
||||
|
||||
## 4. Wichtige Pfade / Betriebsorte / Operator-Wissen
|
||||
|
||||
### Lokaler Windows-Rechner
|
||||
|
||||
| Was | Pfad |
|
||||
|---|---|
|
||||
| Projektordner | `C:\Users\surov\Documents\AZA_GIT\aza\AzA march 2026` |
|
||||
| Desktop-App direkt starten | `python basis14.py` (im Projektordner) |
|
||||
| Build-EXE | `.\build_exe.ps1` |
|
||||
| Build-Installer | `.\build_installer.ps1` |
|
||||
| Kompletter Release | `.\ship_release.ps1` |
|
||||
| Nur Upload | `.\publish_update.ps1` |
|
||||
| Installer-Artefakt | `dist\installer\aza_desktop_setup.exe` |
|
||||
| Release-Manifest | `release\version.json` |
|
||||
| Versionsquelle | `aza_version.py` (`APP_VERSION`, `APP_CHANNEL`) |
|
||||
|
||||
**Wichtig:** NICHT ueber lokale Starter starten (`start_all.bat`, `RUN_AZA_ONECLICK.bat`, `START_AZA.bat`, `start_backend_autoport.bat`) – diese setzen Env-Variablen auf localhost und ueberschreiben die Live-Backend-URL.
|
||||
|
||||
### Hetzner-Server (SSH)
|
||||
|
||||
| Was | Pfad / Befehl |
|
||||
|---|---|
|
||||
| SSH-Zugang | `ssh root@178.104.51.177` |
|
||||
| Repo-Root | `/root/aza-app` |
|
||||
| Docker-Compose-Ordner | `/root/aza-app/deploy` |
|
||||
| `.env`-Datei | `/root/aza-app/deploy/.env` |
|
||||
| Rebuild (immer im deploy-Ordner!) | `cd /root/aza-app/deploy && docker compose down && docker compose up -d --build` |
|
||||
| Container-Logs | `docker logs aza-api --tail 100` |
|
||||
| ENV im Container pruefen | `docker exec aza-api env \| grep VARIABLE` |
|
||||
| Backup-Ordner | `/root/aza-backups/daily/` |
|
||||
|
||||
**WICHTIG:** `docker compose` Befehle muessen IMMER im Ordner `/root/aza-app/deploy` ausgefuehrt werden, nicht im Repo-Root `/root/aza-app`.
|
||||
|
||||
### Hostpoint (Website / DNS)
|
||||
|
||||
| Was | Detail |
|
||||
|---|---|
|
||||
| Haupt-Website | Hostpoint bleibt fuer Website, Marketing, WooCommerce |
|
||||
| DNS-Verwaltung | Bei Hostpoint (fuer `aza-medwork.ch` und Subdomains) |
|
||||
| Mailboxen | Hostpoint verwaltet Mailboxen (z.B. `noreply@aza-medwork.ch`) |
|
||||
| Resend-DNS | `mail.aza-medwork.ch` DNS-Records fuer Resend bei Hostpoint gesetzt |
|
||||
|
||||
### Produktive URLs
|
||||
|
||||
| URL | Zweck |
|
||||
|---|---|
|
||||
| `https://api.aza-medwork.ch` | Backend-API |
|
||||
| `https://api.aza-medwork.ch/health` | Health-Check |
|
||||
| `https://api.aza-medwork.ch/stripe/webhook` | Stripe-Webhook |
|
||||
| `https://api.aza-medwork.ch/download/version.json` | Update-Manifest |
|
||||
| `https://api.aza-medwork.ch/download/aza_desktop_setup.exe` | Installer-Download |
|
||||
| `https://api.aza-medwork.ch/billing/success` | Kauf-Erfolgsseite |
|
||||
|
||||
---
|
||||
|
||||
## 5. Wichtige ENV / Konfiguration
|
||||
|
||||
### Aktive produktive ENV-Variablen (auf Hetzner in `/root/aza-app/deploy/.env`)
|
||||
|
||||
| Variable | Rolle | Status |
|
||||
|---|---|---|
|
||||
| `RESEND_API_KEY` | Resend API Credential fuer Mailversand | **AKTIV PRODUKTIV** |
|
||||
| `MAIL_FROM` | Absender fuer Lizenzschluessel-Mail | **AKTIV PRODUKTIV** |
|
||||
| `STRIPE_SECRET_KEY` | Stripe Live API Key (sk_live_...) | **AKTIV PRODUKTIV** |
|
||||
| `STRIPE_WEBHOOK_SECRET` | Stripe Webhook Signing Secret | **AKTIV PRODUKTIV** |
|
||||
| `AZA_ADMIN_TOKEN` | Token fuer Admin-Endpunkte | **AKTIV PRODUKTIV** |
|
||||
| `MEDWORK_API_TOKENS` | API-Token fuer Desktop-Backend-Kommunikation | **AKTIV PRODUKTIV** |
|
||||
| `OPENAI_API_KEY` | OpenAI API Key (serverseitig) | **AKTIV PRODUKTIV** |
|
||||
| `AZA_DOMAIN` | `api.aza-medwork.ch` | **AKTIV PRODUKTIV** |
|
||||
| `ACME_EMAIL` | `info@aza-medwork.ch` (fuer Caddy/HTTPS) | **AKTIV PRODUKTIV** |
|
||||
|
||||
### Inaktive / historische ENV-Variablen
|
||||
|
||||
| Variable | Rolle | Status |
|
||||
|---|---|---|
|
||||
| `SMTP_HOST` | Hostpoint SMTP Server | **INAKTIV** – Fallback, wird nicht genutzt |
|
||||
| `SMTP_PORT` | Hostpoint SMTP Port | **INAKTIV** – Fallback |
|
||||
| `SMTP_USER` | Hostpoint SMTP User | **INAKTIV** – Fallback |
|
||||
| `SMTP_PASS` | Hostpoint SMTP Passwort | **INAKTIV** – Fallback |
|
||||
| `SMTP_FROM` | Hostpoint SMTP Absender | **INAKTIV** – Fallback |
|
||||
|
||||
**Keine Rueckkehr zu Hostpoint-SMTP noetig, solange Resend stabil laeuft.**
|
||||
|
||||
---
|
||||
|
||||
## 6. Naechster Hauptblock: End-to-End-Kundentest
|
||||
|
||||
**Ziel:** Den kompletten Kundenfluss ohne Basteln, ohne manuelle DB-Eingriffe und ohne Operator-Hilfe beweisen.
|
||||
|
||||
### Zielbild fuer den Kundenfluss
|
||||
|
||||
```
|
||||
1. Kunde kauft ueber Stripe Payment Link / Checkout
|
||||
2. Stripe Webhook verarbeitet den Kauf
|
||||
3. Backend erzeugt Lizenzschluessel und speichert ihn in der DB
|
||||
4. Resend sendet automatisch E-Mail mit Lizenzschluessel an den Kunden
|
||||
5. Success-Seite zeigt dem Kunden ebenfalls den Lizenzschluessel
|
||||
6. Kunde laedt AZA ueber offiziellen Download-Link herunter
|
||||
7. Kunde installiert AZA
|
||||
8. Kunde gibt Lizenzschluessel in der Desktop-App ein
|
||||
9. Desktop aktiviert gegen Backend (/license/activate)
|
||||
10. Desktop startet im Vollmodus
|
||||
```
|
||||
|
||||
### Was dabei noch geprueft / sichergestellt werden muss
|
||||
|
||||
- [ ] E-Mail mit Lizenzschluessel kommt automatisch beim Kauf an (nicht nur via Test-Endpunkt)
|
||||
- [ ] Download-Link in der E-Mail ist korrekt und funktioniert
|
||||
- [ ] Installer laesst sich sauber installieren
|
||||
- [ ] Erststart ohne vorherige Konfiguration funktioniert
|
||||
- [ ] Lizenzschluessel-Eingabe in der App funktioniert auf Anhieb
|
||||
- [ ] Vollmodus wird sofort nach Aktivierung erreicht
|
||||
|
||||
---
|
||||
|
||||
## 7. Download-/Installer-Entscheidung
|
||||
|
||||
**Fuer den naechsten Kundenfluss-Test:**
|
||||
- Download soll ueber die offizielle Website / Download-Seite priorisiert werden
|
||||
- Nicht zuerst einen rohen Direktlink als Hauptweg verwenden
|
||||
- Die E-Mail soll idealerweise einen klaren Download-Link enthalten
|
||||
- Der Kundentest soll moeglichst realistisch am echten Kundenablauf orientiert sein
|
||||
|
||||
**Zielbild:**
|
||||
Mail mit Lizenzschluessel + klarem Download-Link → Download → Installation → Aktivierung
|
||||
|
||||
**Aktuell verfuegbarer Direktlink:**
|
||||
`https://api.aza-medwork.ch/download/aza_desktop_setup.exe`
|
||||
|
||||
---
|
||||
|
||||
## 8. Offene Restpunkte
|
||||
|
||||
| Punkt | Prioritaet | Blocker? |
|
||||
|---|---|---|
|
||||
| Mailtext und Success-Seite inhaltlich polieren | Niedrig | Nein |
|
||||
| Admin-Token rotieren (wurde im Chat offengelegt) | Mittel | Nein, aber vor echtem Kundenbetrieb empfohlen |
|
||||
| SMTP-Reste in `.env` aufraeumen | Niedrig | Nein (inaktiv) |
|
||||
| Resend-Setup / Domain-Policy weiter polieren | Niedrig | Nein |
|
||||
| Device-Bindings-Management fuer Mehrgeraete klarer machen | Mittel | Nein |
|
||||
| translate.py, aza_email.py, diktat_app.py auf Backend-Chat migrieren | Niedrig | Nein (Nebenpfade) |
|
||||
| WooCommerce / Website-Kaufpfad professionalisieren | Mittel | Spaeterer Block |
|
||||
| Browser-AZA / Web-App | Niedrig | Spaeterer Block |
|
||||
|
||||
---
|
||||
|
||||
## 9. Arbeitsstil / Nutzerpraeferenzen
|
||||
|
||||
**Diese Regeln gelten fuer ALLE zukuenftigen Chats:**
|
||||
|
||||
### Allgemeine Regeln
|
||||
|
||||
- Nutzer bastelt nicht – alle Aenderungen kommen als fertige, vollstaendige Dateien (ready-to-paste)
|
||||
- Nutzer fuehrt nur vorgegebene Commands aus, keine manuellen Edits
|
||||
- Jede Aenderung in 1 Patch, kein schrittweises Anleiten
|
||||
- Keine risky Refactors – immer minimal und sicher
|
||||
- Root-cause-first bei jedem Problem
|
||||
- Keine Monsterpatches
|
||||
- Keine Rueckfragen-Orgien
|
||||
- Keine Variantenflut – genau 1 Weg, der beste
|
||||
|
||||
### Operator-Schritte
|
||||
|
||||
- **Immer** explizit angeben, WO ein Schritt auszufuehren ist:
|
||||
- `[Windows PowerShell]` – lokaler Rechner, Projektordner
|
||||
- `[Hetzner SSH]` – `ssh root@178.104.51.177`
|
||||
- `[Browser]` – URL angeben
|
||||
- `[Composer/IDE]` – Cursor Editor
|
||||
- **Immer** exakte Copy-Paste-Befehle liefern
|
||||
- **Immer** mit Pfad oder Ort starten
|
||||
- Nicht schreiben, was der Nutzer NICHT tun soll, sondern nur den naechsten exakten Schritt
|
||||
- Keine vagen Formulierungen wie "send this" oder "do something like"
|
||||
- Nutzer will moeglichst wenig manuelle Improvisation
|
||||
|
||||
### Uebergaben
|
||||
|
||||
- Uebergaben fuer naechste Chats sollen ausfuehrlich sein, nicht minimal
|
||||
- Wichtige Root Causes immer dokumentieren
|
||||
- Geloeste Probleme klar als geloest markieren
|
||||
- Nicht bei alten Problemen wieder anfangen
|
||||
|
||||
---
|
||||
|
||||
## 10. Empfohlener Chat-Start fuer den naechsten Chat
|
||||
|
||||
### Sinnvolle naechste Hauptbloecke (nach Prioritaet)
|
||||
|
||||
1. **End-to-End-Kundentest** (EMPFOHLEN als naechstes)
|
||||
- Kontrollierter Kauf → automatische E-Mail → Download → Installation → Aktivierung → Vollmodus
|
||||
- Beweisen, dass der gesamte Fluss ohne Basteln funktioniert
|
||||
|
||||
2. **Download-Seite / Website-Kaufpfad**
|
||||
- Offizielle Download-Seite auf der Website einrichten
|
||||
- Klaren Kundenweg von Website → Kauf → Download definieren
|
||||
|
||||
3. **Admin-Token-Rotation + Secrets-Hygiene**
|
||||
- Offengelegten Admin-Token rotieren
|
||||
- Sicherstellen, dass keine Secrets im Repo liegen
|
||||
|
||||
### Beste Empfehlung
|
||||
|
||||
**Starte mit Block 1: End-to-End-Kundentest.**
|
||||
|
||||
### Erster konkreter Operator-Schritt
|
||||
|
||||
```
|
||||
[Browser]
|
||||
Stripe Payment Link oeffnen und kontrollierten Testkauf mit einer
|
||||
frischen E-Mail-Adresse durchfuehren (NICHT admin@aza-medwork.ch,
|
||||
sondern eine neue Adresse, um den Neukundenfall zu simulieren).
|
||||
```
|
||||
|
||||
Danach pruefen:
|
||||
1. `[Hetzner SSH]` – `/stripe/license_debug?email=NEUE_EMAIL` → aktive Lizenz?
|
||||
2. `[E-Mail-Postfach]` – Lizenzschluessel-Mail angekommen?
|
||||
3. `[Browser]` – Installer herunterladen ueber Link aus der Mail
|
||||
4. `[Windows]` – Installer ausfuehren, App starten, Lizenzschluessel eingeben
|
||||
5. `[Desktop-App]` – Vollmodus bestaetigen
|
||||
|
||||
---
|
||||
|
||||
## 11. Geloeste Root Causes (Referenz)
|
||||
|
||||
| RC | Problem | Loesung | Datei | Datum |
|
||||
|---|---|---|---|---|
|
||||
| RC14 | Desktop zeigte Testversion trotz aktiver Remote-Lizenz | `_has_remote_backend()` Bypass fuer lokales Aktivierungs-Gate | `basis14.py` | 2026-03-30 |
|
||||
| RC15 | `current_period_end` war null fuer aktive Lizenz | Fallback auf `items.data[0].current_period_end` im Webhook | `stripe_routes.py` | 2026-03-30 |
|
||||
| RC16 | revenue_overview zu grob fuer Betreiber | `recent_charges` und `recent_refunds` ergaenzt | `admin_routes.py` | 2026-03-31 |
|
||||
| RC17 | SMTP von Hetzner → Hostpoint nicht erreichbar | Umstellung auf Resend HTTP API | `stripe_routes.py` | 2026-04-06 |
|
||||
| RC18 | Resend-API lehnte Request ab (Error 1010/403) | `User-Agent: AZA-MedWork/1.0` Header ergaenzt | `stripe_routes.py` | 2026-04-06 |
|
||||
|
||||
---
|
||||
|
||||
## 12. Wichtige Dateien im Projekt
|
||||
|
||||
### Backend (auf Hetzner unter `/root/aza-app/`)
|
||||
|
||||
| Datei | Rolle |
|
||||
|---|---|
|
||||
| `backend_main.py` | FastAPI-Hauptanwendung, mountet alle Router |
|
||||
| `stripe_routes.py` | Stripe-Webhook, Lizenz-DB, Mailversand, Lizenzschluessel-Erzeugung |
|
||||
| `admin_routes.py` | Admin Control Panel v1+v2 Endpunkte |
|
||||
| `aza_license_logic.py` | `compute_license_decision()` – Lizenzgueltigkeit berechnen |
|
||||
| `aza_device_enforcement.py` | Device-Bindings verwalten |
|
||||
| `aza_security.py` | `require_api_token`, `require_admin_token` |
|
||||
|
||||
### Desktop (lokal)
|
||||
|
||||
| Datei | Rolle |
|
||||
|---|---|
|
||||
| `basis14.py` | Haupt-Desktop-App (8900+ Zeilen), UI, Lizenzcheck, Aktivierung |
|
||||
| `aza_version.py` | `APP_VERSION`, `APP_CHANNEL` – zentrale Versionsquelle |
|
||||
| `aza_style.py` | UI-Styling-Konstanten |
|
||||
| `desktop_update_check.py` | Update-Checker beim App-Start |
|
||||
| `aza_desktop.spec` | PyInstaller-Spezifikation |
|
||||
|
||||
### Build / Release (lokal)
|
||||
|
||||
| Datei | Rolle |
|
||||
|---|---|
|
||||
| `build_exe.ps1` | Baut EXE mit PyInstaller |
|
||||
| `build_installer.ps1` | Baut Installer mit Inno Setup |
|
||||
| `build_release_manifest.ps1` | Erzeugt `release/version.json` |
|
||||
| `release.ps1` | Lokaler Release-Prozess (Build + Verify) |
|
||||
| `publish_update.ps1` | Upload nach Hetzner |
|
||||
| `ship_release.ps1` | **Verbindlicher Ein-Knopf-Release** (Build + Upload) |
|
||||
|
||||
---
|
||||
|
||||
## Abschluss
|
||||
|
||||
**Wenn der naechste Chat startet:**
|
||||
1. Zuerst diese Datei lesen
|
||||
2. Nicht wieder bei alten SMTP-/Deploy-/Pfadfehlern beginnen
|
||||
3. Hostpoint-SMTP ist nicht der produktive Weg – Resend funktioniert
|
||||
4. Lizenzschluessel-Flow ist produktiv – nicht neu bauen
|
||||
5. Admin-Endpunkte sind produktiv – nicht neu bauen
|
||||
6. Direkt beim naechsten sinnvollen Block weitermachen (siehe Abschnitt 10)
|
||||
14
AzA march 2026 - Kopie (18)/AZA_Deinstallieren.bat
Normal file
@@ -0,0 +1,14 @@
|
||||
@echo off
|
||||
title AZA Desktop - Clean Uninstall / Reset
|
||||
|
||||
:: Admin-Rechte anfordern (noetig fuer Program Files und Firewall)
|
||||
net session >nul 2>&1
|
||||
if %errorlevel% neq 0 (
|
||||
echo Fordere Administratorrechte an...
|
||||
powershell -Command "Start-Process -FilePath '%~f0' -Verb RunAs"
|
||||
exit /b
|
||||
)
|
||||
|
||||
cd /d "%~dp0"
|
||||
powershell -ExecutionPolicy Bypass -File "tools\aza_clean_uninstall.ps1"
|
||||
pause
|
||||
39
AzA march 2026 - Kopie (18)/AZA_Empfang.spec
Normal file
@@ -0,0 +1,39 @@
|
||||
# -*- mode: python ; coding: utf-8 -*-
|
||||
|
||||
|
||||
a = Analysis(
|
||||
['C:\\Users\\surov\\Documents\\AZA_GIT\\aza\\AzA march 2026\\aza_empfang_app.py'],
|
||||
pathex=[],
|
||||
binaries=[],
|
||||
datas=[],
|
||||
hiddenimports=[],
|
||||
hookspath=[],
|
||||
hooksconfig={},
|
||||
runtime_hooks=[],
|
||||
excludes=[],
|
||||
noarchive=False,
|
||||
optimize=0,
|
||||
)
|
||||
pyz = PYZ(a.pure)
|
||||
|
||||
exe = EXE(
|
||||
pyz,
|
||||
a.scripts,
|
||||
a.binaries,
|
||||
a.datas,
|
||||
[],
|
||||
name='AZA_Empfang',
|
||||
debug=False,
|
||||
bootloader_ignore_signals=False,
|
||||
strip=False,
|
||||
upx=True,
|
||||
upx_exclude=[],
|
||||
runtime_tmpdir=None,
|
||||
console=False,
|
||||
disable_windowed_traceback=False,
|
||||
argv_emulation=False,
|
||||
target_arch=None,
|
||||
codesign_identity=None,
|
||||
entitlements_file=None,
|
||||
icon=['C:\\Users\\surov\\Documents\\AZA_GIT\\aza\\AzA march 2026\\logo.ico'],
|
||||
)
|
||||
28
AzA march 2026 - Kopie (18)/CHANGELOG.md
Normal file
@@ -0,0 +1,28 @@
|
||||
# AZA Desktop – Changelog
|
||||
|
||||
## v1.0.0 (14. März 2026)
|
||||
Erste stabile Verkaufsversion.
|
||||
|
||||
### Module
|
||||
- **KI-Assistent** – Medizinische Fragen an die KI stellen, Befunde besprechen
|
||||
- **Krankengeschichte** – Diktat aufnehmen, transkribieren, strukturierte KG erstellen
|
||||
- **Audio-Notizen** – Sprachaufnahmen und Notizen für den Praxisalltag
|
||||
- **Übersetzer** – Medizinische Texte übersetzen, Begriffe nachschlagen (mit Lerngedächtnis)
|
||||
- **Ärzte-Netzwerk** – Kollegialer Austausch (Vorschau)
|
||||
- **Praxis-Intern** – Teamnachrichten (Vorschau)
|
||||
|
||||
### Funktionen
|
||||
- Professioneller Launcher mit 6-Modul-Übersicht
|
||||
- Automatischer lokaler Backend-Start
|
||||
- Integrierter Systemstatus / Selbsttest (12 Prüfpunkte)
|
||||
- Geführte OpenAI-Key-Einrichtung
|
||||
- Projekt-Notizen mit Tabs, Diktat, Bilder einfügen und Auto-Save
|
||||
- Fenster-Positionen und -Grössen werden gespeichert
|
||||
- Windows-Firewall wird automatisch konfiguriert
|
||||
- Wöchentliches Login
|
||||
- Standard-Startmodul wählbar
|
||||
|
||||
### System
|
||||
- Windows 10/11 (64-Bit)
|
||||
- Eigener OpenAI API-Key erforderlich
|
||||
- Professioneller Installer (automatische Deinstallation alter Versionen)
|
||||
297
AzA march 2026 - Kopie (18)/DEV_SETUP_ON_NEW_PC.md
Normal file
@@ -0,0 +1,297 @@
|
||||
# AZA Desktop – Entwicklungs-Setup auf neuem PC
|
||||
|
||||
Stand: 2026-03-23
|
||||
|
||||
---
|
||||
|
||||
## 1. Voraussetzungen installieren
|
||||
|
||||
### 1.1 Python 3.12.x
|
||||
|
||||
**Empfohlen: Python 3.12.10** (aktuell getestet und verwendet).
|
||||
|
||||
- Download: https://www.python.org/downloads/release/python-31210/
|
||||
- **Windows installer (64-bit)** wählen
|
||||
- Bei Installation: **"Add Python to PATH"** aktivieren
|
||||
- **"Install for all users"** empfohlen
|
||||
|
||||
> **Warum 3.12?** Das Projekt verwendet aktuell Python 3.12.10. PyInstaller 6.13.0 und alle Abhängigkeiten sind damit getestet. Python 3.13 ist noch nicht validiert. Python 3.11 würde vermutlich funktionieren, ist aber nicht aktuell getestet.
|
||||
|
||||
### 1.2 ffmpeg
|
||||
|
||||
Wird für Audio-/Diktat-Funktionen benötigt.
|
||||
|
||||
```powershell
|
||||
winget install Gyan.FFmpeg
|
||||
```
|
||||
|
||||
Alternativ: https://www.gyan.dev/ffmpeg/builds/ (full build) und manuell zum PATH hinzufügen.
|
||||
|
||||
**Prüfen:**
|
||||
```powershell
|
||||
ffmpeg -version
|
||||
```
|
||||
|
||||
### 1.3 Inno Setup 6 (nur für Installer-Build)
|
||||
|
||||
- Download: https://jrsoftware.org/isdl.php
|
||||
- Standard-Installation durchführen
|
||||
- Wird nur benötigt, wenn der Windows-Installer gebaut werden soll
|
||||
- `build_installer.ps1` kann Inno Setup auch automatisch nachinstallieren
|
||||
|
||||
### 1.4 Cursor IDE
|
||||
|
||||
- Download: https://cursor.sh
|
||||
- Nach Installation den Projektordner öffnen
|
||||
|
||||
---
|
||||
|
||||
## 2. Projekt einrichten
|
||||
|
||||
### 2.1 Projektordner entpacken/kopieren
|
||||
|
||||
Den gesamten Projektordner (z. B. `backup 24.2.26`) an einen beliebigen Ort kopieren. Beispiel:
|
||||
|
||||
```
|
||||
C:\Users\<USERNAME>\Documents\AZA\backup 24.2.26\
|
||||
```
|
||||
|
||||
### 2.2 Virtuelle Umgebung erstellen
|
||||
|
||||
```powershell
|
||||
cd "C:\Users\<USERNAME>\Documents\AZA\backup 24.2.26"
|
||||
python -m venv .venv
|
||||
.\.venv\Scripts\Activate.ps1
|
||||
```
|
||||
|
||||
> Falls die Aktivierung fehlschlägt (ExecutionPolicy):
|
||||
> ```powershell
|
||||
> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser
|
||||
> ```
|
||||
|
||||
### 2.3 Abhängigkeiten installieren
|
||||
|
||||
```powershell
|
||||
pip install -r requirements-dev.txt
|
||||
```
|
||||
|
||||
**Wichtig:** `numpy` MUSS Version < 2.4 sein (aktuell 2.2.5). NumPy 2.4.x bricht den PyInstaller-Build mit `ModuleNotFoundError: No module named 'numpy._core._exceptions'`.
|
||||
|
||||
### 2.4 Konfigurationsdateien vorbereiten
|
||||
|
||||
#### `.env` (Backend-Secrets)
|
||||
|
||||
```powershell
|
||||
Copy-Item .env.example .env
|
||||
```
|
||||
|
||||
Dann `.env` bearbeiten und echte Werte eintragen:
|
||||
- `STRIPE_SECRET_KEY` – Stripe Test-/Live-Key
|
||||
- `STRIPE_WEBHOOK_SECRET` – Stripe Webhook-Secret
|
||||
- `MEDWORK_API_TOKEN` / `MEDWORK_API_TOKENS` – Backend-API-Token
|
||||
|
||||
#### `config\aza_runtime.env` (OpenAI-Key)
|
||||
|
||||
```powershell
|
||||
# Falls Ordner nicht existiert:
|
||||
New-Item -ItemType Directory -Force -Path config
|
||||
```
|
||||
|
||||
In `config\aza_runtime.env` den OpenAI-Key eintragen:
|
||||
```
|
||||
OPENAI_API_KEY=sk-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
|
||||
```
|
||||
|
||||
#### `backend_url.txt`
|
||||
|
||||
Enthält die Backend-URL. Für lokale Entwicklung:
|
||||
```
|
||||
http://127.0.0.1:8000
|
||||
```
|
||||
|
||||
#### `backend_token.txt`
|
||||
|
||||
Muss den gleichen Token enthalten wie `MEDWORK_API_TOKEN` in `.env`.
|
||||
|
||||
#### `license_url.txt`
|
||||
|
||||
Enthält die License-Server-URL. Für lokale Entwicklung:
|
||||
```
|
||||
http://127.0.0.1:9000
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 3. Pfade in Build-Dateien anpassen
|
||||
|
||||
### 3.1 `aza_desktop.spec`
|
||||
|
||||
In Zeile 6 den `project_root`-Pfad anpassen:
|
||||
|
||||
```python
|
||||
project_root = Path(r"C:\Users\<USERNAME>\Documents\AZA\backup 24.2.26")
|
||||
```
|
||||
|
||||
### 3.2 `build_exe.ps1`
|
||||
|
||||
In Zeile 1 den Pfad anpassen:
|
||||
|
||||
```powershell
|
||||
$projectRoot = "C:\Users\<USERNAME>\Documents\AZA\backup 24.2.26"
|
||||
```
|
||||
|
||||
### 3.3 `build_installer.ps1`
|
||||
|
||||
In Zeile 1 den Pfad anpassen:
|
||||
|
||||
```powershell
|
||||
$projectRoot = "C:\Users\<USERNAME>\Documents\AZA\backup 24.2.26"
|
||||
```
|
||||
|
||||
### 3.4 `installer\aza_installer.iss`
|
||||
|
||||
In Zeile 7 und anderen Stellen die Pfade anpassen:
|
||||
|
||||
```
|
||||
#define MyAppSourceDir "C:\Users\<USERNAME>\Documents\AZA\backup 24.2.26\dist\aza_desktop"
|
||||
```
|
||||
|
||||
Sowie die Pfade für `OutputDir` und `SetupIconFile`.
|
||||
|
||||
> **Tipp:** Suche in allen `.ps1`, `.spec` und `.iss` Dateien nach dem alten Pfad `C:\Users\surov\Documents\AZA\backup 24.2.26` und ersetze ihn.
|
||||
|
||||
---
|
||||
|
||||
## 4. Entwicklung starten
|
||||
|
||||
### 4.1 Self-Check ausführen
|
||||
|
||||
```powershell
|
||||
.\check_dev_environment.ps1
|
||||
```
|
||||
|
||||
Alle Punkte sollten PASS zeigen.
|
||||
|
||||
### 4.2 App im Development-Modus starten
|
||||
|
||||
```powershell
|
||||
.\start_dev.ps1
|
||||
```
|
||||
|
||||
Oder manuell:
|
||||
|
||||
```powershell
|
||||
.\.venv\Scripts\Activate.ps1
|
||||
python basis14.py
|
||||
```
|
||||
|
||||
### 4.3 Nur Backend starten
|
||||
|
||||
```powershell
|
||||
.\.venv\Scripts\Activate.ps1
|
||||
python -c "import uvicorn; uvicorn.run('backend_main:app', host='127.0.0.1', port=8000)"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 5. Build erstellen
|
||||
|
||||
### 5.1 EXE-Build (PyInstaller)
|
||||
|
||||
```powershell
|
||||
.\build_exe.ps1
|
||||
```
|
||||
|
||||
Oder manuell:
|
||||
|
||||
```powershell
|
||||
.\.venv\Scripts\Activate.ps1
|
||||
pyinstaller aza_desktop.spec --noconfirm
|
||||
```
|
||||
|
||||
Ergebnis: `dist\aza_desktop\aza_desktop.exe`
|
||||
|
||||
### 5.2 Installer-Build (Inno Setup)
|
||||
|
||||
**Voraussetzung:** EXE-Build muss zuerst erfolgreich sein.
|
||||
|
||||
```powershell
|
||||
.\build_installer.ps1
|
||||
```
|
||||
|
||||
Ergebnis: `dist\installer\aza_desktop_setup.exe`
|
||||
|
||||
---
|
||||
|
||||
## 6. Bekannte Stolpersteine
|
||||
|
||||
| Problem | Ursache | Lösung |
|
||||
|---------|---------|--------|
|
||||
| `ModuleNotFoundError: numpy._core._exceptions` | numpy >= 2.4 inkompatibel mit PyInstaller 6.13.0 | `pip install "numpy==2.2.5"` |
|
||||
| PowerShell `&&` Operator unbekannt | PS 5.x kennt `&&` nicht | Befehle mit `;` trennen oder separate Aufrufe |
|
||||
| Inno Setup nicht gefunden | Nicht installiert | `build_installer.ps1` installiert automatisch |
|
||||
| `ffmpeg` nicht gefunden | Nicht im PATH | `winget install Gyan.FFmpeg` oder manuell installieren |
|
||||
| Linter-Warnungen (bcrypt, pyotp etc.) | Optionale Pakete / C-Extensions | Harmlos, ignorierbar |
|
||||
| `.env` fehlt | Nicht mit exportiert (Secrets!) | `.env.example` kopieren und Werte eintragen |
|
||||
|
||||
---
|
||||
|
||||
## 7. Projektstruktur (Übersicht)
|
||||
|
||||
```
|
||||
backup 24.2.26/
|
||||
├── basis14.py # Haupt-Desktop-App (Tkinter GUI)
|
||||
├── backend_main.py # FastAPI-Backend
|
||||
├── aza_*.py # Modulare Mixins und Hilfsmodule
|
||||
├── apps/diktat/ # Diktat-/Audio-Sub-App
|
||||
├── services/ # Backend-Services (Events, News, LLM)
|
||||
├── installer/ # Inno Setup Script
|
||||
├── deploy/ # Server-Deployment-Dateien
|
||||
├── security/ # Security-Handover-Docs
|
||||
├── legal/ # Rechtliche Dokumente
|
||||
├── data/ # Seed-Daten
|
||||
├── tools/ # Dev-Tools
|
||||
├── web/ # Web-Frontend (MVP)
|
||||
├── billing/ # Billing-Docs
|
||||
├── workforce_planner/ # Workforce-Planner-Modul
|
||||
├── aza_desktop.spec # PyInstaller Build-Spec
|
||||
├── build_exe.ps1 # EXE-Build-Script
|
||||
├── build_installer.ps1 # Installer-Build-Script
|
||||
├── start_dev.ps1 # Dev-Start-Script
|
||||
├── check_dev_environment.ps1 # Environment-Self-Check
|
||||
├── requirements-dev.txt # Gepinnte Abhängigkeiten
|
||||
├── .env.example # Env-Template (ohne Secrets)
|
||||
├── project_status.json # Projektstatus
|
||||
├── project_todos.json # Offene Aufgaben
|
||||
├── project_roadmap.json # Roadmap
|
||||
├── project_plan.json # Projektplan
|
||||
├── handover.md # Handover-Dokument
|
||||
├── project_handover.md # Projekt-Handover
|
||||
├── logo.ico / logo.png # App-Icon
|
||||
└── config/aza_runtime.env # OpenAI-Key (lokal, nicht exportiert)
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 8. Secrets – was NICHT im Export enthalten ist
|
||||
|
||||
| Datei | Inhalt | Aktion auf neuem PC |
|
||||
|-------|--------|---------------------|
|
||||
| `.env` | Stripe-Keys, API-Tokens | Aus `.env.example` erzeugen, echte Werte eintragen |
|
||||
| `config/aza_runtime.env` | OpenAI API Key | Manuell eintragen |
|
||||
| `backend_token.txt` | Backend-Auth-Token | Muss zum Token in `.env` passen |
|
||||
| `deploy/.env` | Produktions-Secrets | Nur für Server-Deployment relevant |
|
||||
|
||||
---
|
||||
|
||||
## 9. Audio-/Diktat-Abhängigkeiten im Detail
|
||||
|
||||
| Komponente | Typ | Paket | Hinweise |
|
||||
|-----------|------|-------|----------|
|
||||
| sounddevice | Python-Paket | `pip install sounddevice` | Nutzt PortAudio (in Paket enthalten) |
|
||||
| soundfile | Python-Paket | `pip install soundfile` | Nutzt libsndfile (in Paket enthalten) |
|
||||
| numpy | Python-Paket | `pip install "numpy==2.2.5"` | **MUSS < 2.4** für PyInstaller |
|
||||
| lameenc | Python-Paket | `pip install lameenc` | MP3-Encoding |
|
||||
| pygame | Python-Paket | `pip install pygame` | Audio-Playback |
|
||||
| ffmpeg | System-Tool | `winget install Gyan.FFmpeg` | Nicht Python, sondern extern im PATH |
|
||||
| pynput | Python-Paket | `pip install pynput` | Keyboard-Hotkeys für Diktat |
|
||||
64
AzA march 2026 - Kopie (18)/INSTALLATIONSANLEITUNG.md
Normal file
@@ -0,0 +1,64 @@
|
||||
# AZA Desktop – Installationsanleitung
|
||||
|
||||
## Schritt 1: Installation starten
|
||||
|
||||
1. Öffnen Sie die heruntergeladene Datei **aza_desktop_setup.exe**
|
||||
2. Falls Windows eine Sicherheitswarnung zeigt:
|
||||
- Klicken Sie auf **"Weitere Informationen"**
|
||||
- Dann auf **"Trotzdem ausführen"**
|
||||
- (Die App ist sicher – die Warnung erscheint, weil der Installer noch nicht digital signiert ist)
|
||||
3. Folgen Sie den Installationsschritten
|
||||
4. Die Installation dauert ca. 1–2 Minuten
|
||||
|
||||
## Schritt 2: AZA Desktop starten
|
||||
|
||||
1. Starten Sie **AZA Desktop** über das Desktop-Symbol oder das Startmenü
|
||||
2. Beim ersten Start erscheint der **Modulauswahl-Bildschirm** mit 6 Modulen
|
||||
|
||||
## Schritt 3: OpenAI-Key einrichten
|
||||
|
||||
Für die KI-Funktionen benötigen Sie einen eigenen OpenAI API-Key.
|
||||
|
||||
1. Beim ersten Start werden Sie automatisch zur Einrichtung geführt
|
||||
2. Falls nicht: Öffnen Sie den **Systemstatus** (unten im Launcher)
|
||||
3. Folgen Sie den Anweisungen im Setup-Helfer
|
||||
|
||||
### OpenAI-Key erhalten
|
||||
1. Erstellen Sie ein Konto auf https://platform.openai.com
|
||||
2. Gehen Sie zu **API Keys** → **Create new secret key**
|
||||
3. Kopieren Sie den Key (beginnt mit `sk-...`)
|
||||
4. Führen Sie den Setup-Helfer in AZA Desktop aus
|
||||
5. Der Key wird sicher auf Ihrem Computer gespeichert
|
||||
|
||||
## Schritt 4: Erste Verwendung
|
||||
|
||||
### KI-Assistent
|
||||
Stellen Sie medizinische Fragen – die KI liefert fachliche Antworten.
|
||||
|
||||
### Krankengeschichte
|
||||
1. Klicken Sie auf **"Krankengeschichte"**
|
||||
2. Drücken Sie den **Diktat-Button**
|
||||
3. Sprechen Sie Ihre Befunde
|
||||
4. Die KI transkribiert und strukturiert den Text
|
||||
|
||||
### Audio-Notizen
|
||||
Schnelle Sprachnotizen ohne KG-Struktur – ideal für kurze Gedanken im Praxisalltag.
|
||||
|
||||
## Hilfe bei Problemen
|
||||
|
||||
### Systemstatus prüfen
|
||||
Im Launcher unten finden Sie den **Systemstatus**. Dieser prüft automatisch:
|
||||
- Internetverbindung
|
||||
- OpenAI-Key
|
||||
- Backend-Status
|
||||
- Firewall
|
||||
- Lizenz
|
||||
|
||||
### Antivirus-Warnung
|
||||
Falls Ihr Antivirus die Installation blockiert:
|
||||
1. Deaktivieren Sie den Echtzeitschutz kurzzeitig
|
||||
2. Installieren Sie AZA Desktop
|
||||
3. Aktivieren Sie den Schutz wieder
|
||||
|
||||
### Kontakt
|
||||
Bei Fragen: info@aza-medwork.ch
|
||||
@@ -0,0 +1,17 @@
|
||||
<!DOCTYPE html><html><head></head><body><meta charset="utf-8"><title>Lernkarten Sätze</title>
|
||||
<p style="margin:8px 0; font-weight:bold;">Lernkarten Sätze</p>
|
||||
<p style="margin:8px 0; font-weight:bold;">Deutsch / Italienisch</p>
|
||||
<div style="background:#FFFFFF; padding:8px; margin:6px 0;">Ciao Paolo, wir müssen einfach vorwärts machen.</div>
|
||||
<div style="background:#E0F2F7; padding:8px; margin:6px 0;">Ciao Paolo, dobbiamo semplicemente andare avanti.</div>
|
||||
<div style="background:#FFFFFF; padding:8px; margin:6px 0;">Das alte Haus ist nicht fertig und das neue hängt hinterher.</div>
|
||||
<div style="background:#E0F2F7; padding:8px; margin:6px 0;">La vecchia casa non è finita e la nuova è in ritardo.</div>
|
||||
<div style="background:#FFFFFF; padding:8px; margin:6px 0;">Ich wollte dich nicht stressen mit dem Termin mit mir, obwohl ich<br>
|
||||
jeden Tag gewartet hatte, dass wir uns treffen.</div>
|
||||
<div style="background:#E0F2F7; padding:8px; margin:6px 0;">Non volevo metterti sotto stress con l'appuntamento con me, anche se<br>
|
||||
ho aspettato ogni giorno che ci incontrassimo.</div>
|
||||
<div style="background:#FFFFFF; padding:8px; margin:6px 0;">Wäre es allenfalls eine Option, dass jemand anders, der mehr Zeit hat,<br>
|
||||
die Bauleitung übernimmt und du den Entwurf planst, aber wir möchten<br>
|
||||
wirklich anfangen.</div>
|
||||
<div style="background:#E0F2F7; padding:8px; margin:6px 0;">Sarebbe un'opzione che qualcun altro, che ha più tempo, si occupi<br>
|
||||
della direzione dei lavori e tu pianifichi il progetto, ma vogliamo<br>
|
||||
davvero iniziare.</div></body></html>
|
||||
@@ -0,0 +1,25 @@
|
||||
Lernkarten Sätze
|
||||
|
||||
Deutsch / Italienisch
|
||||
|
||||
Ciao Paolo, wir müssen einfach vorwärts machen.
|
||||
---
|
||||
Ciao Paolo, dobbiamo semplicemente andare avanti.
|
||||
|
||||
Das alte Haus ist nicht fertig und das neue hängt hinterher.
|
||||
---
|
||||
La vecchia casa non è finita e la nuova è in ritardo.
|
||||
|
||||
Ich wollte dich nicht stressen mit dem Termin mit mir, obwohl ich
|
||||
jeden Tag gewartet hatte, dass wir uns treffen.
|
||||
---
|
||||
Non volevo metterti sotto stress con l'appuntamento con me, anche se
|
||||
ho aspettato ogni giorno che ci incontrassimo.
|
||||
|
||||
Wäre es allenfalls eine Option, dass jemand anders, der mehr Zeit hat,
|
||||
die Bauleitung übernimmt und du den Entwurf planst, aber wir möchten
|
||||
wirklich anfangen.
|
||||
---
|
||||
Sarebbe un'opzione che qualcun altro, che ha più tempo, si occupi
|
||||
della direzione dei lavori e tu pianifichi il progetto, ma vogliamo
|
||||
davvero iniziare.
|
||||
@@ -0,0 +1,23 @@
|
||||
{
|
||||
"titel": "Vorwärtsgehen: Schritte zur Veränderung",
|
||||
"datum": "15.02.2026",
|
||||
"uhrzeit": "10:09",
|
||||
"eintraege": [
|
||||
{
|
||||
"de": "Ciao Paolo, wir müssen einfach vorwärts machen.",
|
||||
"it": "Ciao Paolo, dobbiamo semplicemente andare avanti."
|
||||
},
|
||||
{
|
||||
"de": "Das alte Haus ist nicht fertig und das neue hängt hinterher.",
|
||||
"it": "La vecchia casa non è finita e la nuova è in ritardo."
|
||||
},
|
||||
{
|
||||
"de": "Ich wollte dich nicht stressen mit dem Termin mit mir, obwohl ich\njeden Tag gewartet hatte, dass wir uns treffen.",
|
||||
"it": "Non volevo metterti sotto stress con l'appuntamento con me, anche se\nho aspettato ogni giorno che ci incontrassimo."
|
||||
},
|
||||
{
|
||||
"de": "Wäre es allenfalls eine Option, dass jemand anders, der mehr Zeit hat,\ndie Bauleitung übernimmt und du den Entwurf planst, aber wir möchten\nwirklich anfangen.",
|
||||
"it": "Sarebbe un'opzione che qualcun altro, che ha più tempo, si occupi\ndella direzione dei lavori e tu pianifichi il progetto, ma vogliamo\ndavvero iniziare."
|
||||
}
|
||||
]
|
||||
}
|
||||
97
AzA march 2026 - Kopie (18)/README_ADMIN_PLAN.txt
Normal file
@@ -0,0 +1,97 @@
|
||||
AZA – Admin: Plan setzen (ohne UI)
|
||||
|
||||
Voraussetzung:
|
||||
- Setze die Umgebungsvariable AZA_ADMIN_KEY auf ein geheimes Passwort (z.B. "SUPERSECRET123")
|
||||
- Server neu starten
|
||||
|
||||
Endpoint:
|
||||
POST /admin/set_plan
|
||||
|
||||
Body (JSON):
|
||||
{
|
||||
"admin_key": "SUPERSECRET123",
|
||||
"email": "user@example.com",
|
||||
"plan": "pro"
|
||||
}
|
||||
|
||||
Beispiele für Plans:
|
||||
- basic -> Default Geräte-Limit (AZA_DEVICE_LIMIT_DEFAULT, typischerweise 2)
|
||||
- pro -> 4 Geräte (Fallback) oder via AZA_PLAN_LIMITS
|
||||
- custom -> nur sinnvoll, wenn AZA_PLAN_LIMITS gesetzt ist, z.B.:
|
||||
AZA_PLAN_LIMITS="basic:2,pro:4,business:10"
|
||||
|
||||
Hinweis:
|
||||
- Plan-Änderung wirkt beim nächsten /license/check sofort.
|
||||
|
||||
|
||||
---
|
||||
Admin: Token sperren (Logout / Sperre)
|
||||
|
||||
Endpoint:
|
||||
POST /admin/revoke_token
|
||||
|
||||
Body (JSON):
|
||||
{
|
||||
"admin_key": "SUPERSECRET123",
|
||||
"token": "PASTE_TOKEN_HERE"
|
||||
}
|
||||
|
||||
Ergebnis:
|
||||
- Das Token ist danach ungültig (revoked=1)
|
||||
- /license/check gibt dann 401 "Invalid or revoked token"
|
||||
|
||||
|
||||
---
|
||||
Admin: Account-Status setzen (Abo-Status)
|
||||
|
||||
Ziel:
|
||||
- active = normal
|
||||
- suspended = Zugriff gesperrt (z.B. Zahlung fehlgeschlagen)
|
||||
- cancelled = Zugriff gesperrt (Abo beendet)
|
||||
|
||||
Endpoint:
|
||||
POST /admin/set_status
|
||||
|
||||
Body (JSON):
|
||||
{
|
||||
"admin_key": "SUPERSECRET123",
|
||||
"email": "user@example.com",
|
||||
"status": "suspended"
|
||||
}
|
||||
|
||||
Hinweis:
|
||||
- Blockt sowohl /login als auch /license/check mit HTTP 403 "Account not active".
|
||||
|
||||
|
||||
---
|
||||
Audit-Log (Admin-Aktionen)
|
||||
|
||||
Was wird geloggt:
|
||||
- set_plan (alt -> neu)
|
||||
- set_status (alt -> neu)
|
||||
- revoke_token (active -> revoked)
|
||||
|
||||
Wo:
|
||||
- SQLite Tabelle: admin_audit
|
||||
|
||||
Spalten:
|
||||
- action, email, token, old_value, new_value, created_at
|
||||
|
||||
Hinweis:
|
||||
- Das Audit-Log ist rein serverseitig (kein UI), dient Support & Nachvollziehbarkeit.
|
||||
|
||||
|
||||
---
|
||||
Audit-Log anzeigen (Admin)
|
||||
|
||||
Endpoint:
|
||||
POST /admin/audit/list
|
||||
|
||||
Body (JSON):
|
||||
{
|
||||
"admin_key": "SUPERSECRET123",
|
||||
"limit": 50
|
||||
}
|
||||
|
||||
Ergebnis:
|
||||
- Liste der letzten Audit-Einträge (neueste zuerst)
|
||||
31
AzA march 2026 - Kopie (18)/README_ARCHITECTURE.txt
Normal file
@@ -0,0 +1,31 @@
|
||||
AZA License Server – Architektur (MVP)
|
||||
|
||||
Dateien
|
||||
1) license_server.py
|
||||
- Public API:
|
||||
- /register
|
||||
- /login
|
||||
- /license/check
|
||||
- /health
|
||||
- DB-Init/Migration (SQLite)
|
||||
- Token-Generierung + Token-Rotation (max 3 aktive Tokens/User)
|
||||
- Plan/Device-Limit Logik (basic=3, team=5)
|
||||
- Account-Status Gate (active/suspended/cancelled)
|
||||
|
||||
2) admin_routes.py
|
||||
- Admin API (AZA_ADMIN_KEY erforderlich):
|
||||
- POST /admin/set_plan
|
||||
- POST /admin/set_status
|
||||
- POST /admin/revoke_token
|
||||
- POST /admin/audit/list
|
||||
- Audit-Logging via admin_audit Tabelle
|
||||
|
||||
DB
|
||||
- users (email, plan, status, ...)
|
||||
- tokens (token, user_id, revoked, ...)
|
||||
- devices (user_id, device_id, ...)
|
||||
- admin_audit (action, email, token, old_value, new_value, created_at)
|
||||
|
||||
Hinweis
|
||||
- Admin-Endpunkte sind bewusst ohne UI (Support/MVP).
|
||||
- Payment (Stripe) kommt später über Status/Plan-Updates.
|
||||
57
AzA march 2026 - Kopie (18)/README_STRIPE_PRICES_CHF.txt
Normal file
@@ -0,0 +1,57 @@
|
||||
AZA – Stripe Preise (CHF) korrekt anlegen + in .env eintragen
|
||||
|
||||
Ziel (Pricing)
|
||||
- Basic: 59 CHF / Monat
|
||||
- Basic: 590 CHF / Jahr (entspricht ~16.7% Rabatt gegenüber 59*12=708)
|
||||
- Team: 89 CHF / Monat
|
||||
- Team: 890 CHF / Jahr (entspricht ~16.7% Rabatt gegenüber 89*12=1068)
|
||||
|
||||
Stripe Prinzip
|
||||
- Abos nutzen "recurring prices".
|
||||
- Für unterschiedliche Intervalle (monatlich/jährlich) erstellt man mehrere Prices. (Stripe Empfehlung)
|
||||
Quelle: Stripe Docs (Products & Prices).
|
||||
|
||||
Schritt A – 2 Produkte anlegen (im Stripe Dashboard)
|
||||
1) Product: "AZA Basic"
|
||||
2) Product: "AZA Team"
|
||||
Quelle: Stripe Support "How to create products and prices".
|
||||
|
||||
Schritt B – 4 Prices anlegen (CHF, recurring)
|
||||
Für "AZA Basic":
|
||||
- Price 1: 59 CHF, Recurring, Interval: Monthly
|
||||
- Price 2: 590 CHF, Recurring, Interval: Yearly
|
||||
|
||||
Für "AZA Team":
|
||||
- Price 3: 89 CHF, Recurring, Interval: Monthly
|
||||
- Price 4: 890 CHF, Recurring, Interval: Yearly
|
||||
|
||||
Wichtig (TWINT in Checkout)
|
||||
- Für TWINT muss die Währung CHF sein und Checkout muss twint als payment_method_types erlauben.
|
||||
Quelle: Stripe TWINT Docs.
|
||||
|
||||
Schritt C – Price IDs aus Stripe kopieren
|
||||
Stripe zeigt pro Price eine ID, typischerweise "price_...".
|
||||
Kopiere diese 4 IDs hier rein:
|
||||
|
||||
1) BASIC_MONTHLY = price____________________
|
||||
2) BASIC_YEARLY = price____________________
|
||||
3) TEAM_MONTHLY = price____________________
|
||||
4) TEAM_YEARLY = price____________________
|
||||
|
||||
Schritt D – .env per Composer aktualisieren (nicht manuell)
|
||||
Sobald du die 4 price_... IDs hast:
|
||||
- Du sendest mir die 4 IDs hier im Chat (nur die vier Zeilen reichen).
|
||||
- Ich gebe dir dann genau EINEN Composer-Text, der die .env 1:1 überschreibt und die Platzhalter ersetzt.
|
||||
|
||||
Hinweis zur Kündigung (wie bei uns geplant)
|
||||
- Monatsabo: kündbar, Zugriff bis Periodenende.
|
||||
- Jahresabo: kündbar, Zugriff bis Periodenende (Ende Jahresperiode).
|
||||
Stripe: "cancel_at_period_end=true" bedeutet genau das.
|
||||
Quelle: Stripe Docs "Cancel subscriptions".
|
||||
|
||||
Quellen (Stripe)
|
||||
- Manage products and prices
|
||||
- How products and prices work
|
||||
- How to create products and prices (Support)
|
||||
- TWINT payments
|
||||
- Cancel subscriptions
|
||||
52
AzA march 2026 - Kopie (18)/README_STRIPE_SETUP.txt
Normal file
@@ -0,0 +1,52 @@
|
||||
AZA – Stripe Vorbereitung (Webhook Skeleton)
|
||||
|
||||
Ziel
|
||||
- Stripe Webhooks sicher empfangen (Signaturprüfung)
|
||||
- Events auditieren (admin_audit)
|
||||
- Noch KEINE Plan/Status-Automatik
|
||||
|
||||
Voraussetzung
|
||||
1) Stripe Python SDK installieren:
|
||||
pip install stripe
|
||||
|
||||
2) Umgebungsvariablen setzen:
|
||||
- AZA_STRIPE_SECRET_KEY="sk_live_..."
|
||||
- AZA_STRIPE_WEBHOOK_SECRET="whsec_..."
|
||||
|
||||
Webhook Endpoint
|
||||
- POST /stripe/webhook
|
||||
|
||||
Was passiert aktuell
|
||||
- Signatur wird geprüft
|
||||
- Event wird in admin_audit geloggt:
|
||||
action = "stripe:<event_type>"
|
||||
token = "<event_id>"
|
||||
old_value = "received"
|
||||
new_value = "ok"
|
||||
|
||||
Wichtig
|
||||
- Der Server muss öffentlich erreichbar sein, damit Stripe den Webhook callen kann.
|
||||
- Im Stripe Dashboard muss ein Webhook mit deiner URL auf /stripe/webhook zeigen.
|
||||
|
||||
Nächster Schritt
|
||||
- Stripe Events auswerten und daraus users.status / users.plan setzen.
|
||||
(z.B. invoice.payment_succeeded -> active, invoice.payment_failed -> suspended)
|
||||
|
||||
|
||||
Test (lokal / dev)
|
||||
1) Stripe CLI installieren und einloggen:
|
||||
stripe login
|
||||
|
||||
2) Webhook lokal weiterleiten (Port/Host anpassen):
|
||||
stripe listen --forward-to http://localhost:9000/stripe/webhook
|
||||
|
||||
Die CLI zeigt dabei einen Webhook-Secret an (whsec_...), den du als
|
||||
AZA_STRIPE_WEBHOOK_SECRET setzen musst.
|
||||
|
||||
3) Test-Event auslösen:
|
||||
stripe trigger invoice.payment_succeeded
|
||||
|
||||
4) Prüfen:
|
||||
- Server sollte HTTP 200 {"ok": true} zurückgeben
|
||||
- Audit-Eintrag erscheint über:
|
||||
POST /admin/audit/list
|
||||
18
AzA march 2026 - Kopie (18)/RESTORE_PICK.bat
Normal file
@@ -0,0 +1,18 @@
|
||||
@echo off
|
||||
setlocal EnableExtensions
|
||||
cd /d "%~dp0"
|
||||
|
||||
set "TARGET=%~1"
|
||||
if "%TARGET%"=="" set "TARGET=basis14.py"
|
||||
|
||||
echo.
|
||||
echo ==========================================
|
||||
echo RESTORE PICK (letzte 10 Backups)
|
||||
echo ==========================================
|
||||
echo Ziel: %TARGET%
|
||||
echo.
|
||||
|
||||
powershell -NoProfile -ExecutionPolicy Bypass -File "%~dp0_restore_pick.ps1" "%TARGET%" 10
|
||||
echo.
|
||||
pause
|
||||
endlocal
|
||||
121
AzA march 2026 - Kopie (18)/RUN_AZA_ONECLICK.bat
Normal file
@@ -0,0 +1,121 @@
|
||||
@echo off
|
||||
setlocal EnableExtensions DisableDelayedExpansion
|
||||
cd /d "%~dp0"
|
||||
|
||||
set "URLFILE=%~dp0backend_url.txt"
|
||||
set "TOKENFILE=%~dp0backend_token.txt"
|
||||
set "CLIENT=%~dp0basis14.py"
|
||||
set "TIMEOUT_SEC=15"
|
||||
|
||||
echo.
|
||||
echo ==========================================
|
||||
echo AZA ONE-CLICK START (Backend -> AZA)
|
||||
echo Ordner: %~dp0
|
||||
echo ==========================================
|
||||
echo.
|
||||
|
||||
set "BACKEND_START=%~dp0start_backend_autoport.bat"
|
||||
if not exist "%BACKEND_START%" (
|
||||
for %%F in ("%~dp0start_backend*.bat") do (
|
||||
set "BACKEND_START=%%~fF"
|
||||
goto FOUND_BACKEND
|
||||
)
|
||||
)
|
||||
:FOUND_BACKEND
|
||||
|
||||
if not exist "%BACKEND_START%" (
|
||||
echo FEHLER: Kein Backend-Startscript gefunden.
|
||||
echo Erwartet: start_backend_autoport.bat ODER start_backend*.bat
|
||||
echo.
|
||||
dir /b "%~dp0*.bat"
|
||||
echo.
|
||||
pause
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
if not exist "%CLIENT%" (
|
||||
echo FEHLER: basis14.py nicht gefunden:
|
||||
echo %CLIENT%
|
||||
echo.
|
||||
pause
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
if not exist "%TOKENFILE%" (
|
||||
echo dev-secret-2026> "%TOKENFILE%"
|
||||
)
|
||||
|
||||
echo [1/4] Starte Backend...
|
||||
echo %BACKEND_START%
|
||||
call "%BACKEND_START%"
|
||||
if errorlevel 1 (
|
||||
echo FEHLER: Backend-Start fehlgeschlagen.
|
||||
echo Siehe ggf. logs\backend.log
|
||||
pause
|
||||
exit /b 6
|
||||
)
|
||||
|
||||
echo [2/4] Warte auf backend_url.txt (max %TIMEOUT_SEC%s)...
|
||||
set /a "i=0"
|
||||
:wait_backend_url
|
||||
if exist "%URLFILE%" goto backend_url_ready
|
||||
set /a "i+=1"
|
||||
if %i% GEQ %TIMEOUT_SEC% goto backend_url_timeout
|
||||
timeout /t 1 /nobreak >nul
|
||||
goto wait_backend_url
|
||||
|
||||
:backend_url_timeout
|
||||
echo FEHLER: backend_url.txt nicht gefunden nach %TIMEOUT_SEC%s.
|
||||
echo Pfad: %URLFILE%
|
||||
echo Siehe ggf. logs\backend.log
|
||||
pause
|
||||
exit /b 2
|
||||
|
||||
:backend_url_ready
|
||||
set "BURL="
|
||||
for /f "usebackq delims=" %%U in ("%URLFILE%") do if not defined BURL set "BURL=%%U"
|
||||
set "BTOK="
|
||||
for /f "usebackq delims=" %%T in ("%TOKENFILE%") do if not defined BTOK set "BTOK=%%T"
|
||||
|
||||
if "%BURL%"=="" (
|
||||
echo FEHLER: backend_url.txt ist leer.
|
||||
pause
|
||||
exit /b 3
|
||||
)
|
||||
if "%BTOK%"=="" (
|
||||
echo FEHLER: backend_token.txt ist leer.
|
||||
pause
|
||||
exit /b 4
|
||||
)
|
||||
|
||||
set "MEDWORK_BACKEND_URL=%BURL%"
|
||||
set "MEDWORK_API_TOKEN=%BTOK%"
|
||||
|
||||
echo [3/4] ENV gesetzt:
|
||||
echo MEDWORK_BACKEND_URL=%MEDWORK_BACKEND_URL%
|
||||
echo MEDWORK_API_TOKEN=(gesetzt)
|
||||
|
||||
echo [4/4] Starte AZA...
|
||||
echo.
|
||||
|
||||
where python >nul 2>&1
|
||||
if errorlevel 1 goto TRY_PY
|
||||
|
||||
python "%CLIENT%"
|
||||
goto DONE
|
||||
|
||||
:TRY_PY
|
||||
where py >nul 2>&1
|
||||
if errorlevel 1 goto NO_PY
|
||||
|
||||
py -3 "%CLIENT%"
|
||||
goto DONE
|
||||
|
||||
:NO_PY
|
||||
echo FEHLER: Weder python noch py gefunden.
|
||||
pause
|
||||
exit /b 5
|
||||
|
||||
:DONE
|
||||
endlocal
|
||||
exit /b 0
|
||||
10
AzA march 2026 - Kopie (18)/RUN_START_ALL_DEBUG.bat
Normal file
@@ -0,0 +1,10 @@
|
||||
@echo off
|
||||
setlocal
|
||||
cd /d "%~dp0"
|
||||
echo ===== %date% %time% ===== > "%~dp0start_all_debug_log.txt"
|
||||
call "%~dp0start_all.bat" >> "%~dp0start_all_debug_log.txt" 2>&1
|
||||
echo.>> "%~dp0start_all_debug_log.txt"
|
||||
echo ExitCode=%errorlevel% >> "%~dp0start_all_debug_log.txt"
|
||||
type "%~dp0start_all_debug_log.txt"
|
||||
echo.
|
||||
pause
|
||||
170
AzA march 2026 - Kopie (18)/SIGNING_READINESS.md
Normal file
@@ -0,0 +1,170 @@
|
||||
# AZA Desktop — Signing Readiness
|
||||
|
||||
## Warum Code Signing?
|
||||
|
||||
Windows **Smart App Control** (ab Windows 11 22H2) blockiert unbekannte und unsignierte
|
||||
Anwendungen automatisch. Auch ohne Smart App Control zeigt Windows SmartScreen bei
|
||||
unsignierten Downloads Warnungen, die Kunden verunsichern.
|
||||
|
||||
**Ziel:** Alle Kundenauslieferungen von AZA Desktop werden mit einem gültigen
|
||||
Code-Signing-Zertifikat signiert, um Blockaden und Warnungen zu vermeiden.
|
||||
|
||||
---
|
||||
|
||||
## Artefakte, die signiert werden müssen
|
||||
|
||||
### Priorität 1 — Kritisch (blockiert Smart App Control)
|
||||
|
||||
| Artefakt | Pfad (nach Build) | Warum |
|
||||
|---|---|---|
|
||||
| **Haupt-EXE** | `dist\aza_desktop\aza_desktop.exe` | Wird direkt ausgeführt, primäres Ziel von Smart App Control |
|
||||
| **Installer** | `dist\installer\aza_desktop_setup.exe` | Erste Datei die der Kunde herunterlädt und startet |
|
||||
|
||||
### Priorität 2 — Empfohlen (verhindert DLL-Warnungen)
|
||||
|
||||
| Artefakt | Pfad (nach Build) | Warum |
|
||||
|---|---|---|
|
||||
| **DLLs in _internal** | `dist\aza_desktop\_internal\*.dll` | Werden zur Laufzeit geladen; einige Sicherheits-Tools prüfen auch geladene DLLs |
|
||||
| **PYDs in _internal** | `dist\aza_desktop\_internal\*.pyd` | Python-Extensions (sind technisch DLLs) |
|
||||
|
||||
### Nicht signieren
|
||||
|
||||
- Daten-Dateien (`.json`, `.sqlite`, `.txt`, `.env`, `.png`, `.ico`)
|
||||
- Python-Bytecode (`.pyc`)
|
||||
- Konfigurationsdateien
|
||||
|
||||
---
|
||||
|
||||
## Signing-Reihenfolge
|
||||
|
||||
Die Reihenfolge ist wichtig, da der Installer die bereits signierten Dateien einpackt:
|
||||
|
||||
```
|
||||
1. build_exe.ps1 → aza_desktop.exe + _internal/
|
||||
2. sign_release.ps1 → DLLs/PYDs signieren, dann EXE signieren
|
||||
3. build_installer.ps1 → Installer aus signierten Dateien bauen
|
||||
4. sign_release.ps1 → Installer signieren (oder im selben Lauf)
|
||||
5. build_release_artifacts.ps1 → SHA256-Hashes aktualisieren
|
||||
6. build_publish_bundle.ps1 → Publish-Bundle erstellen
|
||||
```
|
||||
|
||||
In der aktuellen Pipeline (`build_and_test_release.ps1`) ist der Signing-Schritt
|
||||
bereits als optionaler Schritt zwischen Installer-Build und Smoke-Test eingefügt.
|
||||
Er wird nur ausgeführt, wenn ein Zertifikat konfiguriert ist.
|
||||
|
||||
---
|
||||
|
||||
## Zertifikat beschaffen
|
||||
|
||||
### Option A: Klassisches EV Code-Signing-Zertifikat (empfohlen für KMU)
|
||||
|
||||
- Anbieter: DigiCert, Sectigo, GlobalSign
|
||||
- Typ: **EV Code Signing** (Extended Validation)
|
||||
- Vorteile:
|
||||
- Sofortige SmartScreen-Reputation (kein Reputation-Aufbau nötig)
|
||||
- Höchstes Vertrauen bei Smart App Control
|
||||
- Lieferform: Hardware-Token (USB) oder Cloud-HSM
|
||||
- Kosten: ca. CHF 400–600 / Jahr
|
||||
- Validierung: Firma muss im Handelsregister eingetragen sein
|
||||
|
||||
### Option B: Azure Trusted Signing (Microsoft)
|
||||
|
||||
- Dienst: Azure Trusted Signing (ehemals Azure Code Signing)
|
||||
- Vorteile:
|
||||
- Direkte Microsoft-Reputation
|
||||
- Kein Hardware-Token nötig
|
||||
- CI/CD-Integration via Azure CLI
|
||||
- Voraussetzungen:
|
||||
- Azure-Konto
|
||||
- Identitätsvalidierung über Microsoft
|
||||
- Kosten: ab ca. USD 10 / Monat (Basic-Tier)
|
||||
|
||||
### Empfehlung
|
||||
|
||||
Für **AZA MedWork** empfehle ich **Option A (EV Code Signing)**, weil:
|
||||
- Sofortige SmartScreen-Reputation ohne Aufbauphase
|
||||
- Funktioniert unabhängig von Cloud-Diensten
|
||||
- Breite Akzeptanz bei allen Windows-Versionen
|
||||
|
||||
---
|
||||
|
||||
## Umgebungsvariablen für sign_release.ps1
|
||||
|
||||
| Variable | Beschreibung | Pflicht |
|
||||
|---|---|---|
|
||||
| `AZA_SIGN_CERT_THUMBPRINT` | SHA-1 Thumbprint des Zertifikats im Windows Certificate Store | Eins von beiden |
|
||||
| `AZA_SIGN_PFX_PATH` | Pfad zur .pfx-Datei | Eins von beiden |
|
||||
| `AZA_SIGN_PFX_PASSWORD` | Passwort der .pfx-Datei | Optional |
|
||||
| `AZA_SIGN_TIMESTAMP_URL` | Timestamp-Server (Standard: `http://timestamp.digicert.com`) | Optional |
|
||||
| `AZA_SIGNTOOL_PATH` | Pfad zu signtool.exe (Standard: automatische Suche) | Optional |
|
||||
|
||||
**Wichtig:** Keine Zertifikate oder Passwörter ins Repository committen!
|
||||
|
||||
---
|
||||
|
||||
## Konsistente Publisher-Identität
|
||||
|
||||
Für optimale SmartScreen-/Smart-App-Control-Reputation:
|
||||
|
||||
- **Immer denselben Publisher-Namen** verwenden
|
||||
- **Immer dasselbe Zertifikat** für alle Releases verwenden
|
||||
- Der Publisher-Name im Inno-Setup-Installer (`AppPublisher`) muss mit dem
|
||||
Zertifikats-Subject übereinstimmen
|
||||
- Nach Festlegung den Publisher-Namen **nicht mehr ändern** (Reputationsverlust)
|
||||
|
||||
### Aktuelle Namensformen im Projekt (Stand 2026-03-23)
|
||||
|
||||
| Namensform | Rolle | Signing-relevant |
|
||||
|---|---|---|
|
||||
| **AZA MedWork** | Firma / Publisher | **JA** |
|
||||
| **AZA Desktop** | Produktname | Nein |
|
||||
| **AZA Medical AI Assistant** | Interner Projektname / Marketing | Nein |
|
||||
|
||||
Aktueller Wert in `installer\aza_installer.iss`: `AppPublisher = "AZA MedWork"`
|
||||
|
||||
### Vor Zertifikatskauf prüfen
|
||||
|
||||
1. Wie lautet der offizielle Firmenname im Handelsregister?
|
||||
(z.B. "AZA MedWork", "AZA MedWork GmbH", "MedWork GmbH")
|
||||
2. EV-Zertifikats-Anbieter validiert gegen den HR-Eintrag
|
||||
3. Subject/CN/O im Zertifikat muss EXAKT zum HR-Namen passen
|
||||
4. `AppPublisher` in `aza_installer.iss` muss EXAKT zum Zertifikat passen
|
||||
5. Falls HR-Name von "AZA MedWork" abweicht, folgende Stellen gemeinsam anpassen:
|
||||
- `installer/aza_installer.iss` → `MyAppPublisher`
|
||||
- `legal/privacy_policy.md`
|
||||
- `legal/ai_consent.md`
|
||||
- `deploy/WOOCOMMERCE_PRODUCT.md` (Absendername)
|
||||
- `deploy/WORDPRESS_GOLIVE.md` (Absendername)
|
||||
- `apps/diktat/diktat_app.py` (Docstring)
|
||||
- `aza_consent.py` (Docstring)
|
||||
|
||||
### Nicht anpassen (nicht signing-relevant)
|
||||
|
||||
- `project_status.json` → "AZA Medical AI Assistant" (interner Projektname)
|
||||
- `billing/invoice_template.json` → "AZA Medical AI Assistant License" (Rechnungsposition)
|
||||
- `web/index.html`, `web/download.html` → Marketing-Footer
|
||||
|
||||
---
|
||||
|
||||
## Checkliste vor erstem signierten Release
|
||||
|
||||
- [ ] EV Code-Signing-Zertifikat beschafft
|
||||
- [ ] signtool.exe installiert (Windows SDK)
|
||||
- [ ] Umgebungsvariablen gesetzt (AZA_SIGN_CERT_THUMBPRINT oder PFX)
|
||||
- [ ] `sign_release.ps1 -DryRun` erfolgreich durchlaufen
|
||||
- [ ] `sign_release.ps1` erfolgreich durchlaufen (echter Signing-Lauf)
|
||||
- [ ] Signatur mit `Get-AuthenticodeSignature` verifiziert
|
||||
- [ ] `build_release_artifacts.ps1` erneut ausgeführt (Hashes aktualisiert)
|
||||
- [ ] Installer auf sauberem Windows-PC getestet (Smart App Control aktiv)
|
||||
- [ ] Publisher-Name im Zertifikat stimmt mit Inno-Setup `AppPublisher` überein
|
||||
|
||||
---
|
||||
|
||||
## Dateien in diesem Projekt (Signing-bezogen)
|
||||
|
||||
| Datei | Zweck |
|
||||
|---|---|
|
||||
| `sign_release.ps1` | Signing-Skript (signiert EXE, DLLs, Installer) |
|
||||
| `build_and_test_release.ps1` | Release-Pipeline (Signing-Schritt integriert) |
|
||||
| `build_release_artifacts.ps1` | Artefakt-Report (enthält jetzt Signatur-Status) |
|
||||
| `SIGNING_READINESS.md` | Diese Dokumentation |
|
||||
40
AzA march 2026 - Kopie (18)/START_AZA.bat
Normal file
@@ -0,0 +1,40 @@
|
||||
@echo off
|
||||
setlocal EnableDelayedExpansion
|
||||
cd /d "%~dp0"
|
||||
|
||||
echo === (1/3) Starte Backend (Auto-Port) ===
|
||||
call "%~dp0start_backend_autoport.bat"
|
||||
if errorlevel 1 (
|
||||
echo Backend-Start fehlgeschlagen.
|
||||
pause
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
echo === (2/3) Lese Backend URL + Token ===
|
||||
set "BACKEND_URL="
|
||||
set "API_TOKEN="
|
||||
|
||||
for /f "usebackq delims=" %%A in ("%~dp0backend_url.txt") do set "BACKEND_URL=%%A"
|
||||
for /f "usebackq delims=" %%B in ("%~dp0backend_token.txt") do set "API_TOKEN=%%B"
|
||||
|
||||
if "!BACKEND_URL!"=="" (
|
||||
echo backend_url.txt ist leer oder fehlt.
|
||||
pause
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
if "!API_TOKEN!"=="" (
|
||||
echo backend_token.txt ist leer oder fehlt.
|
||||
pause
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
echo Backend URL: !BACKEND_URL!
|
||||
echo Token: (gesetzt)
|
||||
|
||||
echo === (3/3) Starte AZA Client mit ENV ===
|
||||
set "MEDWORK_BACKEND_URL=!BACKEND_URL!"
|
||||
set "MEDWORK_API_TOKEN=!API_TOKEN!"
|
||||
python "%~dp0basis14.py"
|
||||
|
||||
pause
|
||||
1
AzA march 2026 - Kopie (18)/VERSION
Normal file
@@ -0,0 +1 @@
|
||||
0.1.0
|
||||
6
AzA march 2026 - Kopie (18)/_build_info.py
Normal file
@@ -0,0 +1,6 @@
|
||||
# Auto-generated by aza_build_stamp.py – DO NOT EDIT
|
||||
BUILD_TIME = "2026-04-18 00:22:34"
|
||||
BUILD_TIMESTAMP = "20260418_002234"
|
||||
GIT_COMMIT = "7ed33c3f"
|
||||
GIT_BRANCH = "main"
|
||||
GIT_DIRTY = True
|
||||
6
AzA march 2026 - Kopie (18)/_check_license_debug.py
Normal file
@@ -0,0 +1,6 @@
|
||||
import sqlite3
|
||||
db = r"C:\Users\surov\Documents\AZA\backup 24.2.26\data\stripe_webhook.sqlite"
|
||||
con = sqlite3.connect(db)
|
||||
print("LATEST_LICENSE_ROW =", con.execute("SELECT status, current_period_end, customer_email, updated_at FROM licenses ORDER BY updated_at DESC LIMIT 1").fetchone())
|
||||
print("DEVICES_TABLE_EXISTS =", con.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='licensed_devices'").fetchone())
|
||||
con.close()
|
||||
16
AzA march 2026 - Kopie (18)/_count_lines.py
Normal file
@@ -0,0 +1,16 @@
|
||||
import os
|
||||
files = [
|
||||
'basis14.py', 'aza_config.py', 'aza_prompts.py', 'aza_ui_helpers.py',
|
||||
'aza_persistence.py', 'aza_audio.py', 'aza_todo_mixin.py',
|
||||
'aza_text_windows_mixin.py', 'aza_diktat_mixin.py',
|
||||
'aza_settings_mixin.py', 'aza_ordner_mixin.py',
|
||||
]
|
||||
total = 0
|
||||
for f in files:
|
||||
n = sum(1 for _ in open(f, encoding='utf-8'))
|
||||
total += n
|
||||
print(f" {f:35s} {n:>5d} Zeilen")
|
||||
print(f" {'─' * 42}")
|
||||
print(f" {'GESAMT':35s} {total:>5d} Zeilen")
|
||||
print(f"\n Vorher: basis14.py allein = 11421 Zeilen")
|
||||
print(f" Nachher: basis14.py = {sum(1 for _ in open('basis14.py', encoding='utf-8'))} Zeilen ({100 - round(sum(1 for _ in open('basis14.py', encoding='utf-8')) / 11421 * 100)}% kleiner)")
|
||||
16
AzA march 2026 - Kopie (18)/_create_licensed_devices.py
Normal file
@@ -0,0 +1,16 @@
|
||||
import sqlite3
|
||||
db = r"C:\Users\surov\Documents\AZA\backup 24.2.26\data\stripe_webhook.sqlite"
|
||||
con = sqlite3.connect(db)
|
||||
con.execute("""
|
||||
CREATE TABLE IF NOT EXISTS licensed_devices (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
customer_email TEXT NOT NULL,
|
||||
user_key TEXT NOT NULL,
|
||||
device_id TEXT,
|
||||
first_seen INTEGER,
|
||||
last_seen INTEGER
|
||||
)
|
||||
""")
|
||||
con.commit()
|
||||
print("licensed_devices table ready")
|
||||
con.close()
|
||||
96
AzA march 2026 - Kopie (18)/_restore_pick.ps1
Normal file
@@ -0,0 +1,96 @@
|
||||
param(
|
||||
[string]$TargetPath = "basis14.py",
|
||||
[int]$Limit = 10
|
||||
)
|
||||
|
||||
$ErrorActionPreference = 'Stop'
|
||||
|
||||
$base = Split-Path -Parent $MyInvocation.MyCommand.Path
|
||||
$backupRoot = Join-Path $base "_auto_backups"
|
||||
|
||||
if (!(Test-Path $backupRoot)) {
|
||||
Write-Host "FEHLER: Backup-Ordner nicht gefunden: $backupRoot"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Ziel vollqualifizieren (falls Datei nicht existiert, relativ zum Projekt)
|
||||
try {
|
||||
$targetFull = (Resolve-Path $TargetPath).Path
|
||||
} catch {
|
||||
$targetFull = (Join-Path $base $TargetPath)
|
||||
}
|
||||
|
||||
# relativer Pfad (fuer exakten Match)
|
||||
$rel = $targetFull
|
||||
if ($targetFull.ToLower().StartsWith($base.ToLower())) {
|
||||
$rel = $targetFull.Substring($base.Length).TrimStart('\')
|
||||
} else {
|
||||
$rel = Split-Path -Leaf $targetFull
|
||||
}
|
||||
$leaf = Split-Path -Leaf $rel
|
||||
|
||||
# Kandidaten sammeln
|
||||
$exact = Get-ChildItem -Path $backupRoot -Recurse -File -ErrorAction SilentlyContinue |
|
||||
Where-Object { $_.FullName.ToLower().EndsWith(("\\" + $rel).ToLower()) }
|
||||
|
||||
$cands = @()
|
||||
if ($exact -and $exact.Count -gt 0) {
|
||||
$cands = $exact
|
||||
} else {
|
||||
$cands = Get-ChildItem -Path $backupRoot -Recurse -File -ErrorAction SilentlyContinue |
|
||||
Where-Object { $_.Name -ieq $leaf }
|
||||
}
|
||||
|
||||
if (!$cands -or $cands.Count -eq 0) {
|
||||
Write-Host "FEHLER: Kein Backup gefunden fuer: $TargetPath"
|
||||
Write-Host "Gesucht nach rel=$rel bzw. Name=$leaf"
|
||||
exit 2
|
||||
}
|
||||
|
||||
$latest = $cands | Sort-Object LastWriteTime -Descending | Select-Object -First $Limit
|
||||
|
||||
Write-Host ""
|
||||
Write-Host "=========================================="
|
||||
Write-Host " RESTORE PICK - Backups fuer: $TargetPath"
|
||||
Write-Host "=========================================="
|
||||
Write-Host ""
|
||||
|
||||
$idx = 1
|
||||
foreach ($f in $latest) {
|
||||
$ts = $f.LastWriteTime.ToString("yyyy-MM-dd HH:mm:ss")
|
||||
Write-Host ("[{0}] {1} {2}" -f $idx, $ts, $f.FullName)
|
||||
$idx++
|
||||
}
|
||||
|
||||
Write-Host ""
|
||||
$sel = Read-Host "Bitte Nummer (1-$($latest.Count)) eingeben"
|
||||
if (-not $sel) { Write-Host "Abbruch."; exit 0 }
|
||||
|
||||
[int]$n = 0
|
||||
if (-not [int]::TryParse($sel, [ref]$n)) { Write-Host "Ungueltige Eingabe."; exit 3 }
|
||||
if ($n -lt 1 -or $n -gt $latest.Count) { Write-Host "Ungueltige Nummer."; exit 3 }
|
||||
|
||||
$chosen = $latest[$n-1]
|
||||
|
||||
Write-Host ""
|
||||
Write-Host "GEWAEHLT:"
|
||||
Write-Host " $($chosen.FullName)"
|
||||
Write-Host ""
|
||||
|
||||
# Vor Restore: Backup der aktuellen Datei falls existiert
|
||||
if (Test-Path $targetFull) {
|
||||
try {
|
||||
& powershell -NoProfile -ExecutionPolicy Bypass -File (Join-Path $base "_make_backup.ps1") $targetFull | Out-Null
|
||||
Write-Host "OK: Aktuelle Datei vorher gesichert."
|
||||
} catch {
|
||||
Write-Host "WARNUNG: Konnte aktuelle Datei nicht backupen, fahre trotzdem fort."
|
||||
}
|
||||
} else {
|
||||
$parent = Split-Path -Parent $targetFull
|
||||
if ($parent -and !(Test-Path $parent)) {
|
||||
New-Item -ItemType Directory -Force -Path $parent | Out-Null
|
||||
}
|
||||
}
|
||||
|
||||
Copy-Item -Force $chosen.FullName $targetFull
|
||||
Write-Host "OK: Wiederhergestellt -> $targetFull"
|
||||
173
AzA march 2026 - Kopie (18)/_test_audit_integrity.py
Normal file
@@ -0,0 +1,173 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""STEP 11a – Audit-Log Integritaet Proof"""
|
||||
import os, sys, json, copy, shutil
|
||||
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
|
||||
os.chdir(os.path.dirname(os.path.abspath(__file__)))
|
||||
|
||||
from pathlib import Path
|
||||
from aza_audit_log import (
|
||||
log_event, verify_integrity, verify_all_rotations,
|
||||
get_log_stats, export_audit_log, _LOG_FILE, _GENESIS_HASH,
|
||||
_CHAIN_HEADER_PREFIX,
|
||||
)
|
||||
|
||||
if _LOG_FILE.exists():
|
||||
_LOG_FILE.unlink()
|
||||
|
||||
passed = 0
|
||||
failed = 0
|
||||
|
||||
def check(name, condition):
|
||||
global passed, failed
|
||||
if condition:
|
||||
print(f" PASS: {name}")
|
||||
passed += 1
|
||||
else:
|
||||
print(f" FAIL: {name}")
|
||||
failed += 1
|
||||
|
||||
print("=" * 70)
|
||||
print("AUDIT-LOG INTEGRITAET PROOF")
|
||||
print("=" * 70)
|
||||
|
||||
# --- 1) Hash-Kette schreiben + verifizieren ---
|
||||
print("\n--- 1. HASH-KETTE SCHREIBEN + VERIFIZIEREN ---")
|
||||
log_event("APP_START", "user_1", detail="test")
|
||||
log_event("LOGIN_OK", "user_1")
|
||||
log_event("AI_CHAT", "user_1", detail="model=gpt-5.2")
|
||||
log_event("CONSENT_GRANT", "user_1")
|
||||
log_event("APP_STOP", "user_1")
|
||||
|
||||
ok, errs = verify_integrity()
|
||||
check("5 Eintraege, Integritaet PASS", ok)
|
||||
|
||||
with open(_LOG_FILE, "r", encoding="utf-8") as f:
|
||||
lines = [l.strip() for l in f if l.strip() and not l.startswith("#")]
|
||||
check(f"5 Zeilen (gefunden: {len(lines)})", len(lines) == 5)
|
||||
|
||||
parts = [p.strip() for p in lines[0].split("|")]
|
||||
check("8 Felder pro Zeile", len(parts) == 8)
|
||||
check(f"prev_hash[0] = GENESIS", parts[6] == _GENESIS_HASH)
|
||||
check("entry_hash[0] hat 64 hex", len(parts[7]) == 64)
|
||||
|
||||
parts2 = [p.strip() for p in lines[1].split("|")]
|
||||
check("prev_hash[1] == entry_hash[0]", parts2[6] == parts[7])
|
||||
|
||||
print("\n Beispiel-Zeile (sanitized):")
|
||||
print(f" {lines[0][:80]}...")
|
||||
|
||||
# --- 2) Manipulation erkennen ---
|
||||
print("\n--- 2. MANIPULATION ERKENNEN ---")
|
||||
backup = str(_LOG_FILE) + ".bak"
|
||||
shutil.copy2(_LOG_FILE, backup)
|
||||
|
||||
with open(_LOG_FILE, "r", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
|
||||
manipulated = content.replace("LOGIN_OK", "LOGIN_XX", 1)
|
||||
with open(_LOG_FILE, "w", encoding="utf-8") as f:
|
||||
f.write(manipulated)
|
||||
|
||||
ok2, errs2 = verify_integrity()
|
||||
check("Manipulation erkannt (FAIL)", not ok2)
|
||||
if errs2:
|
||||
print(f" Fehler: {errs2[0]}")
|
||||
|
||||
shutil.copy2(backup, _LOG_FILE)
|
||||
os.remove(backup)
|
||||
|
||||
ok3, _ = verify_integrity()
|
||||
check("Nach Restore: PASS", ok3)
|
||||
|
||||
# --- 3) Rotation mit Ketten-Uebergabe ---
|
||||
print("\n--- 3. ROTATION MIT KETTEN-UEBERGABE ---")
|
||||
|
||||
if _LOG_FILE.exists():
|
||||
_LOG_FILE.unlink()
|
||||
|
||||
rot1 = _LOG_FILE.parent / f"{_LOG_FILE.stem}.1{_LOG_FILE.suffix}"
|
||||
if rot1.exists():
|
||||
rot1.unlink()
|
||||
|
||||
log_event("PRE_ROTATE_1", "user_1")
|
||||
log_event("PRE_ROTATE_2", "user_1")
|
||||
|
||||
last_hash_before = None
|
||||
with open(_LOG_FILE, "r", encoding="utf-8") as f:
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
if line and not line.startswith("#"):
|
||||
parts = [p.strip() for p in line.split("|")]
|
||||
if len(parts) >= 8:
|
||||
last_hash_before = parts[7]
|
||||
|
||||
shutil.copy2(_LOG_FILE, rot1)
|
||||
|
||||
with open(_LOG_FILE, "w", encoding="utf-8") as f:
|
||||
f.write(f"{_CHAIN_HEADER_PREFIX}{last_hash_before}\n")
|
||||
|
||||
log_event("POST_ROTATE_1", "user_1")
|
||||
log_event("POST_ROTATE_2", "user_1")
|
||||
|
||||
ok_rot1, errs_rot1 = verify_integrity(rot1)
|
||||
check("Rotierte Datei (.1) intakt", ok_rot1)
|
||||
|
||||
ok_main, errs_main = verify_integrity(_LOG_FILE)
|
||||
check("Aktuelle Datei intakt (Kette ab Chain-Header)", ok_main)
|
||||
|
||||
with open(_LOG_FILE, "r", encoding="utf-8") as f:
|
||||
first_line = f.readline().strip()
|
||||
check("Chain-Header vorhanden", first_line.startswith(_CHAIN_HEADER_PREFIX))
|
||||
|
||||
header_hash = first_line[len(_CHAIN_HEADER_PREFIX):]
|
||||
check("Header-Hash == letzter Hash der rotierten Datei", header_hash == last_hash_before)
|
||||
|
||||
with open(_LOG_FILE, "r", encoding="utf-8") as f:
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
if line and not line.startswith("#"):
|
||||
first_entry_parts = [p.strip() for p in line.split("|")]
|
||||
break
|
||||
check("prev_hash[0 in neuem File] == Chain-Header-Hash",
|
||||
first_entry_parts[6] == last_hash_before)
|
||||
|
||||
ok_all, res_all = verify_all_rotations()
|
||||
check("verify_all_rotations PASS", ok_all)
|
||||
|
||||
if rot1.exists():
|
||||
rot1.unlink()
|
||||
|
||||
# --- 4) Stats + Export ---
|
||||
print("\n--- 4. STATS + EXPORT ---")
|
||||
stats = get_log_stats()
|
||||
check("Integritaet in Stats = PASS", stats["integrity"] == "PASS")
|
||||
|
||||
path = export_audit_log()
|
||||
with open(path, "r", encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
check("Export integrity = PASS", data["integrity"] == "PASS")
|
||||
check("Export hat entries", len(data["entries"]) > 0)
|
||||
check("Entries haben entry_hash", "entry_hash" in data["entries"][0])
|
||||
os.remove(path)
|
||||
|
||||
# --- 5) Data Minimization ---
|
||||
print("\n--- 5. DATA MINIMIZATION ---")
|
||||
with open(_LOG_FILE, "r", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
check("Kein Passwort im Log", "password" not in content.lower())
|
||||
check("Kein API-Key im Log", "sk-" not in content)
|
||||
check("Kein Prompt/Transkript", "TRANSKRIPT:" not in content)
|
||||
|
||||
# Cleanup
|
||||
if _LOG_FILE.exists():
|
||||
_LOG_FILE.unlink()
|
||||
|
||||
# --- Zusammenfassung ---
|
||||
print(f"\n{'='*70}")
|
||||
print("ZUSAMMENFASSUNG")
|
||||
print(f"{'='*70}")
|
||||
print(f"ERGEBNIS: {passed} PASS, {failed} FAIL")
|
||||
if failed == 0:
|
||||
print("ALLE TESTS BESTANDEN")
|
||||
else:
|
||||
sys.exit(1)
|
||||
101
AzA march 2026 - Kopie (18)/_test_audit_log.py
Normal file
@@ -0,0 +1,101 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Test-Skript fuer aza_audit_log.py"""
|
||||
import os, sys, json
|
||||
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
|
||||
os.chdir(os.path.dirname(os.path.abspath(__file__)))
|
||||
|
||||
from pathlib import Path
|
||||
from aza_audit_log import (
|
||||
log_event, export_audit_log, get_log_stats, _LOG_FILE,
|
||||
)
|
||||
|
||||
if _LOG_FILE.exists():
|
||||
_LOG_FILE.unlink()
|
||||
|
||||
passed = 0
|
||||
failed = 0
|
||||
|
||||
def check(name, condition):
|
||||
global passed, failed
|
||||
if condition:
|
||||
print(f" PASS: {name}")
|
||||
passed += 1
|
||||
else:
|
||||
print(f" FAIL: {name}")
|
||||
failed += 1
|
||||
|
||||
print("=== TEST 1: Events schreiben ===")
|
||||
log_event("APP_START", "test_user", detail="test")
|
||||
log_event("LOGIN_OK", "test_user")
|
||||
log_event("LOGIN_FAIL", "test_user", success=False, detail="wrong password")
|
||||
log_event("CONSENT_GRANT", "test_user")
|
||||
log_event("AI_TRANSCRIBE", "test_user")
|
||||
log_event("AI_CHAT", "test_user", detail="model=gpt-5.2")
|
||||
log_event("AI_BLOCKED", "test_user", success=False, detail="kein Consent")
|
||||
log_event("CONSENT_REVOKE", "test_user")
|
||||
log_event("2FA_OK", "test_user")
|
||||
log_event("2FA_FAIL", "test_user", success=False)
|
||||
log_event("PASSWORD_CHANGE", "test_user")
|
||||
log_event("APP_STOP", "test_user")
|
||||
check("Logdatei existiert", _LOG_FILE.exists())
|
||||
|
||||
print("\n=== TEST 2: Log-Inhalt pruefen ===")
|
||||
with open(_LOG_FILE, "r", encoding="utf-8") as f:
|
||||
lines = [l.strip() for l in f if l.strip()]
|
||||
check(f"12 Zeilen (gefunden: {len(lines)})", len(lines) == 12)
|
||||
|
||||
line1 = lines[0]
|
||||
parts = [p.strip() for p in line1.split("|")]
|
||||
check("6 Felder pro Zeile", len(parts) == 6)
|
||||
check("Timestamp endet mit +00:00", "+00:00" in parts[0])
|
||||
check("Event = APP_START", parts[1] == "APP_START")
|
||||
check("User = test_user", parts[2] == "test_user")
|
||||
check("Status = OK", parts[3] == "OK")
|
||||
check("Source = desktop", parts[4] == "desktop")
|
||||
|
||||
line3 = lines[2]
|
||||
parts3 = [p.strip() for p in line3.split("|")]
|
||||
check("LOGIN_FAIL Status = FAIL", parts3[3] == "FAIL")
|
||||
|
||||
print("\n=== TEST 3: Statistiken ===")
|
||||
stats = get_log_stats()
|
||||
check("Datei existiert", stats["exists"])
|
||||
check(f"12 Zeilen (stats: {stats['total_lines']})", stats["total_lines"] == 12)
|
||||
check("APP_START in events", stats["events"].get("APP_START", 0) == 1)
|
||||
check("LOGIN_FAIL in events", stats["events"].get("LOGIN_FAIL", 0) == 1)
|
||||
check("AI_CHAT in events", stats["events"].get("AI_CHAT", 0) == 1)
|
||||
|
||||
print("\n=== TEST 4: Export ===")
|
||||
path = export_audit_log()
|
||||
check("Export-Datei existiert", os.path.exists(path))
|
||||
with open(path, "r", encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
check(f"12 Eintraege (export: {data['total_entries']})", data["total_entries"] == 12)
|
||||
check("Erster Eintrag APP_START", data["entries"][0]["event"] == "APP_START")
|
||||
check("Letzter Eintrag APP_STOP", data["entries"][-1]["event"] == "APP_STOP")
|
||||
os.remove(path)
|
||||
|
||||
print("\n=== TEST 5: Data Minimization ===")
|
||||
all_text = open(_LOG_FILE, "r", encoding="utf-8").read()
|
||||
check("Kein Passwort im Log", "password" not in all_text.lower() or "wrong password" in all_text.lower())
|
||||
check("Kein API-Key im Log", "sk-" not in all_text)
|
||||
check("Kein Prompt im Log", "TRANSKRIPT:" not in all_text)
|
||||
check("Detail < 200 Zeichen", all(len(p.strip()) < 200 for line in lines for p in [line.split("|")[-1]]))
|
||||
|
||||
print("\n=== TEST 6: Pipe-Sanitierung ===")
|
||||
log_event("TEST", "user", detail="detail|mit|pipes")
|
||||
with open(_LOG_FILE, "r", encoding="utf-8") as f:
|
||||
last = [l.strip() for l in f if l.strip()][-1]
|
||||
pipe_count = last.count("|")
|
||||
check(f"Pipes ersetzt (nur 5 Trennzeichen, gefunden: {pipe_count})", pipe_count == 5)
|
||||
|
||||
# Cleanup
|
||||
if _LOG_FILE.exists():
|
||||
_LOG_FILE.unlink()
|
||||
|
||||
print(f"\n{'='*50}")
|
||||
print(f"ERGEBNIS: {passed} PASS, {failed} FAIL")
|
||||
if failed == 0:
|
||||
print("ALLE TESTS BESTANDEN")
|
||||
else:
|
||||
sys.exit(1)
|
||||
82
AzA march 2026 - Kopie (18)/_test_consent.py
Normal file
@@ -0,0 +1,82 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Test-Skript fuer aza_consent.py"""
|
||||
import os, sys
|
||||
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
|
||||
os.chdir(os.path.dirname(os.path.abspath(__file__)))
|
||||
|
||||
from aza_consent import (
|
||||
has_valid_consent, record_consent, record_revoke,
|
||||
get_consent_status, get_user_history, export_consent_log,
|
||||
verify_chain_integrity, _CONSENT_FILE,
|
||||
)
|
||||
|
||||
# Sicherstellen: kein altes Log
|
||||
if _CONSENT_FILE.exists():
|
||||
_CONSENT_FILE.unlink()
|
||||
|
||||
uid = "TestUser"
|
||||
passed = 0
|
||||
failed = 0
|
||||
|
||||
def check(name, condition):
|
||||
global passed, failed
|
||||
if condition:
|
||||
print(f" PASS: {name}")
|
||||
passed += 1
|
||||
else:
|
||||
print(f" FAIL: {name}")
|
||||
failed += 1
|
||||
|
||||
print("=== TEST 1: Ohne Consent -> blockiert ===")
|
||||
check("has_valid_consent == False", not has_valid_consent(uid))
|
||||
|
||||
print("\n=== TEST 2: Consent erteilen -> erlaubt ===")
|
||||
entry = record_consent(uid, source="test")
|
||||
check("action == grant", entry["action"] == "grant")
|
||||
check("has_valid_consent == True", has_valid_consent(uid))
|
||||
check("hash vorhanden", len(entry.get("hash", "")) == 64)
|
||||
|
||||
print("\n=== TEST 3: Widerruf -> blockiert ===")
|
||||
entry2 = record_revoke(uid, source="test")
|
||||
check("action == revoke", entry2["action"] == "revoke")
|
||||
check("has_valid_consent == False", not has_valid_consent(uid))
|
||||
|
||||
print("\n=== TEST 4: Erneuter Consent -> erlaubt ===")
|
||||
record_consent(uid, source="test")
|
||||
check("has_valid_consent == True", has_valid_consent(uid))
|
||||
|
||||
print("\n=== TEST 5: Hash-Kette (Integritaet) ===")
|
||||
ok, errors = verify_chain_integrity()
|
||||
check("Kette intakt", ok)
|
||||
if errors:
|
||||
for e in errors:
|
||||
print(f" {e}")
|
||||
|
||||
print("\n=== TEST 6: Status-Abfrage ===")
|
||||
status = get_consent_status(uid)
|
||||
check("has_consent == True", status["has_consent"])
|
||||
check("version_match == True", status["version_match"])
|
||||
check("last_grant vorhanden", status["last_grant"] is not None)
|
||||
|
||||
print("\n=== TEST 7: Export ===")
|
||||
path = export_consent_log()
|
||||
check("Exportdatei existiert", os.path.exists(path))
|
||||
os.remove(path)
|
||||
|
||||
print("\n=== TEST 8: User-Historie ===")
|
||||
hist = get_user_history(uid)
|
||||
check(f"3 Eintraege (gefunden: {len(hist)})", len(hist) == 3)
|
||||
|
||||
print("\n=== TEST 9: Anderer User -> kein Consent ===")
|
||||
check("User2 hat keinen Consent", not has_valid_consent("User2"))
|
||||
|
||||
# Cleanup
|
||||
if _CONSENT_FILE.exists():
|
||||
_CONSENT_FILE.unlink()
|
||||
|
||||
print(f"\n{'='*50}")
|
||||
print(f"ERGEBNIS: {passed} PASS, {failed} FAIL")
|
||||
if failed == 0:
|
||||
print("ALLE TESTS BESTANDEN")
|
||||
else:
|
||||
sys.exit(1)
|
||||
201
AzA march 2026 - Kopie (18)/_test_consent_audit.py
Normal file
@@ -0,0 +1,201 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
STEP 10a – Consent Audit-Proof Skript.
|
||||
Erzeugt alle Nachweise fuer die Audit-Dokumentation.
|
||||
"""
|
||||
import os, sys, json, copy, shutil
|
||||
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
|
||||
os.chdir(os.path.dirname(os.path.abspath(__file__)))
|
||||
|
||||
from aza_consent import (
|
||||
has_valid_consent, record_consent, record_revoke,
|
||||
get_consent_status, get_user_history, export_consent_log,
|
||||
verify_chain_integrity, _CONSENT_FILE, _load_log, _save_log,
|
||||
_get_consent_version,
|
||||
)
|
||||
|
||||
if _CONSENT_FILE.exists():
|
||||
_CONSENT_FILE.unlink()
|
||||
|
||||
print("=" * 70)
|
||||
print("CONSENT AUDIT-PROOF – AZA / MedWork")
|
||||
print("=" * 70)
|
||||
|
||||
# --- 1) Speicherort + Schema ---
|
||||
print("\n--- 1. SPEICHERORT + SCHEMA ---")
|
||||
print(f"Datei: {_CONSENT_FILE}")
|
||||
print(f"Format: JSON Array (append-only)")
|
||||
print(f"Version: {_get_consent_version()}")
|
||||
print(f"Quelle: legal/ai_consent.md -> Zeile mit 'Stand:'")
|
||||
|
||||
print("\nSchema pro Eintrag:")
|
||||
print(" user_id string Benutzer-ID")
|
||||
print(" consent_type string Immer 'ai_processing'")
|
||||
print(" consent_version string Stand-Datum aus ai_consent.md")
|
||||
print(" timestamp string UTC ISO-8601")
|
||||
print(" source string 'ui' / 'test' / 'admin'")
|
||||
print(" action string 'grant' oder 'revoke'")
|
||||
print(" prev_hash string SHA-256 des vorherigen Eintrags")
|
||||
print(" hash string SHA-256 dieses Eintrags")
|
||||
|
||||
# --- 2) Beispiel-Logeintraege ---
|
||||
print("\n--- 2. BEISPIEL-LOGEINTRAEGE (sanitized) ---")
|
||||
|
||||
print("\n2a) GRANT (Zustimmung):")
|
||||
e1 = record_consent("user_1", source="ui")
|
||||
print(json.dumps(e1, indent=2, ensure_ascii=False))
|
||||
|
||||
print("\n2b) REVOKE (Widerruf):")
|
||||
e2 = record_revoke("user_1", source="ui")
|
||||
print(json.dumps(e2, indent=2, ensure_ascii=False))
|
||||
|
||||
print("\n2c) RE-GRANT (erneute Zustimmung):")
|
||||
e3 = record_consent("user_1", source="ui")
|
||||
print(json.dumps(e3, indent=2, ensure_ascii=False))
|
||||
|
||||
print("\nTimestamp-Pruefung:")
|
||||
for i, e in enumerate([e1, e2, e3], 1):
|
||||
ts = e["timestamp"]
|
||||
is_utc = ts.endswith("+00:00") or ts.endswith("Z")
|
||||
print(f" Eintrag {i}: {ts} -> UTC: {'JA' if is_utc else 'NEIN'}")
|
||||
|
||||
# --- 3) Integritaetsbeweis ---
|
||||
print("\n--- 3. INTEGRITAETSBEWEIS ---")
|
||||
|
||||
print("\n3a) Intakte Log-Datei:")
|
||||
ok, errors = verify_chain_integrity()
|
||||
print(f" verify_chain_integrity() -> {'PASS' if ok else 'FAIL'}")
|
||||
if errors:
|
||||
for e in errors:
|
||||
print(f" {e}")
|
||||
|
||||
print("\n3b) Manipulierte Log-Datei:")
|
||||
backup_path = str(_CONSENT_FILE) + ".backup"
|
||||
shutil.copy2(_CONSENT_FILE, backup_path)
|
||||
|
||||
entries = _load_log()
|
||||
manipulated = copy.deepcopy(entries)
|
||||
if manipulated:
|
||||
original_ts = manipulated[0]["timestamp"]
|
||||
manipulated[0]["timestamp"] = manipulated[0]["timestamp"][:-1] + "X"
|
||||
_save_log(manipulated)
|
||||
|
||||
ok2, errors2 = verify_chain_integrity()
|
||||
print(f" Manipulation: timestamp[0] '{original_ts}' -> '...X'")
|
||||
print(f" verify_chain_integrity() -> {'PASS' if ok2 else 'FAIL'}")
|
||||
if errors2:
|
||||
for e in errors2:
|
||||
print(f" {e}")
|
||||
|
||||
shutil.copy2(backup_path, _CONSENT_FILE)
|
||||
os.remove(backup_path)
|
||||
|
||||
ok3, _ = verify_chain_integrity()
|
||||
print(f" Nach Restore: verify_chain_integrity() -> {'PASS' if ok3 else 'FAIL'}")
|
||||
|
||||
# --- 4) Enforcement-Beweis ---
|
||||
print("\n--- 4. ENFORCEMENT-BEWEIS ---")
|
||||
|
||||
if _CONSENT_FILE.exists():
|
||||
_CONSENT_FILE.unlink()
|
||||
|
||||
print("\n4a) Ohne Consent:")
|
||||
result_no = has_valid_consent("user_1")
|
||||
print(f" has_valid_consent('user_1') -> {result_no}")
|
||||
print(f" KI-Funktion: {'BLOCKIERT' if not result_no else 'ERLAUBT'}")
|
||||
print(f" UI-Meldung: 'KI-Einwilligung fehlt oder wurde widerrufen.' (RuntimeError)")
|
||||
|
||||
print("\n4b) Nach Consent:")
|
||||
record_consent("user_1", source="audit_test")
|
||||
result_yes = has_valid_consent("user_1")
|
||||
print(f" has_valid_consent('user_1') -> {result_yes}")
|
||||
print(f" KI-Funktion: {'BLOCKIERT' if not result_yes else 'ERLAUBT'}")
|
||||
|
||||
print("\n4c) Nach Widerruf:")
|
||||
record_revoke("user_1", source="audit_test")
|
||||
result_rev = has_valid_consent("user_1")
|
||||
print(f" has_valid_consent('user_1') -> {result_rev}")
|
||||
print(f" KI-Funktion: {'BLOCKIERT' if not result_rev else 'ERLAUBT'}")
|
||||
|
||||
print("\n4d) Version-Change Simulation:")
|
||||
record_consent("user_1", source="audit_test")
|
||||
result_before = has_valid_consent("user_1")
|
||||
print(f" Vor Version-Aenderung: has_valid_consent -> {result_before}")
|
||||
|
||||
entries = _load_log()
|
||||
for e in entries:
|
||||
if e.get("user_id") == "user_1" and e.get("action") == "grant":
|
||||
e["consent_version"] = "Januar 2025"
|
||||
_save_log(entries)
|
||||
|
||||
result_after = has_valid_consent("user_1")
|
||||
print(f" consent_version im Log auf 'Januar 2025' geaendert")
|
||||
print(f" Aktuelle Version: '{_get_consent_version()}'")
|
||||
print(f" has_valid_consent -> {result_after}")
|
||||
print(f" Ergebnis: {'Neu-Consent erforderlich (KORREKT)' if not result_after else 'FEHLER: sollte False sein'}")
|
||||
|
||||
# --- 5) Data Minimization Check ---
|
||||
print("\n--- 5. DATA MINIMIZATION CHECK ---")
|
||||
|
||||
entries = _load_log()
|
||||
all_keys = set()
|
||||
for e in entries:
|
||||
all_keys.update(e.keys())
|
||||
|
||||
print(f" Gespeicherte Felder: {sorted(all_keys)}")
|
||||
|
||||
sensitive_absent = True
|
||||
sensitive_fields = ["transcript", "prompt", "response", "api_key", "secret",
|
||||
"password", "audio", "kg_text", "patient_name", "diagnosis"]
|
||||
found_sensitive = []
|
||||
for key in all_keys:
|
||||
if key.lower() in sensitive_fields:
|
||||
found_sensitive.append(key)
|
||||
sensitive_absent = False
|
||||
|
||||
for e in entries:
|
||||
for key, val in e.items():
|
||||
if isinstance(val, str) and len(val) > 200:
|
||||
found_sensitive.append(f"{key} (Wert > 200 Zeichen)")
|
||||
sensitive_absent = False
|
||||
|
||||
if sensitive_absent:
|
||||
print(" Keine sensiblen Felder gefunden.")
|
||||
print(" Kein Transkript/Prompt/KG-Inhalt im Log.")
|
||||
print(" Kein API-Key/Secret/Passwort im Log.")
|
||||
print(" Data Minimization: PASS")
|
||||
else:
|
||||
print(f" WARNUNG: Sensible Felder gefunden: {found_sensitive}")
|
||||
print(" Data Minimization: FAIL")
|
||||
|
||||
# --- Cleanup ---
|
||||
if _CONSENT_FILE.exists():
|
||||
_CONSENT_FILE.unlink()
|
||||
|
||||
# --- Zusammenfassung ---
|
||||
print("\n" + "=" * 70)
|
||||
print("ZUSAMMENFASSUNG")
|
||||
print("=" * 70)
|
||||
|
||||
checks = {
|
||||
"Speicherort + Schema": True,
|
||||
"Beispiel-Logeintraege": True,
|
||||
"Integritaet intakt": ok,
|
||||
"Integritaet manipuliert erkannt": not ok2,
|
||||
"Enforcement ohne Consent": not result_no,
|
||||
"Enforcement mit Consent": result_yes,
|
||||
"Enforcement nach Widerruf": not result_rev,
|
||||
"Version-Change erfordert Neu-Consent": not result_after,
|
||||
"Data Minimization": sensitive_absent,
|
||||
}
|
||||
|
||||
all_pass = True
|
||||
for name, passed in checks.items():
|
||||
status = "PASS" if passed else "FAIL"
|
||||
if not passed:
|
||||
all_pass = False
|
||||
print(f" [{status}] {name}")
|
||||
|
||||
print(f"\nGESAMTERGEBNIS: {'ALLE TESTS BESTANDEN' if all_pass else 'TESTS FEHLGESCHLAGEN'}")
|
||||
if not all_pass:
|
||||
sys.exit(1)
|
||||
26
AzA march 2026 - Kopie (18)/_test_live_chat.py
Normal file
@@ -0,0 +1,26 @@
|
||||
import requests, json
|
||||
|
||||
token = open("backend_token.txt").read().strip()
|
||||
url = open("backend_url.txt").read().strip()
|
||||
|
||||
print(f"Backend-URL: {url}")
|
||||
print(f"Token (erste 8 Zeichen): {token[:8]}...")
|
||||
print()
|
||||
|
||||
r = requests.post(
|
||||
f"{url}/v1/chat",
|
||||
headers={"X-API-Token": token, "Content-Type": "application/json"},
|
||||
json={"model": "gpt-4o-mini", "messages": [{"role": "user", "content": "Antworte nur mit OK"}]},
|
||||
timeout=30,
|
||||
)
|
||||
print(f"HTTP Status: {r.status_code}")
|
||||
d = r.json()
|
||||
print(f"success: {d.get('success')}")
|
||||
print(f"content: {d.get('content', '')[:200]}")
|
||||
print(f"model: {d.get('model')}")
|
||||
print(f"duration_ms: {d.get('duration_ms')}")
|
||||
|
||||
if r.status_code == 200 and d.get("success"):
|
||||
print("\n>>> VERBINDUNG ERFOLGREICH: Desktop -> Hetzner -> OpenAI funktioniert!")
|
||||
else:
|
||||
print(f"\n>>> FEHLER: {d.get('error', 'unbekannt')}")
|
||||
143
AzA march 2026 - Kopie (18)/_test_monitoring.py
Normal file
@@ -0,0 +1,143 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""STEP 12 – Monitoring & Health Checks Proof"""
|
||||
import os, sys, json
|
||||
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
|
||||
os.chdir(os.path.dirname(os.path.abspath(__file__)))
|
||||
|
||||
from pathlib import Path
|
||||
from aza_audit_log import log_event, verify_integrity, _LOG_FILE
|
||||
from aza_monitoring import (
|
||||
collect_metrics, get_alert_metrics, check_integrity, run_nightly,
|
||||
)
|
||||
|
||||
if _LOG_FILE.exists():
|
||||
_LOG_FILE.unlink()
|
||||
|
||||
passed = 0
|
||||
failed = 0
|
||||
|
||||
def check(name, condition):
|
||||
global passed, failed
|
||||
if condition:
|
||||
print(f" PASS: {name}")
|
||||
passed += 1
|
||||
else:
|
||||
print(f" FAIL: {name}")
|
||||
failed += 1
|
||||
|
||||
print("=" * 70)
|
||||
print("MONITORING & HEALTH CHECKS PROOF")
|
||||
print("=" * 70)
|
||||
|
||||
# --- 1) Metriken mit Audit-Daten ---
|
||||
print("\n--- 1. METRIKEN MIT AUDIT-DATEN ---")
|
||||
log_event("APP_START", "user_1")
|
||||
log_event("LOGIN_OK", "user_1")
|
||||
log_event("LOGIN_FAIL", "attacker", success=False, detail="wrong pw")
|
||||
log_event("LOGIN_FAIL", "attacker", success=False, detail="wrong pw")
|
||||
log_event("AI_CHAT", "user_1", detail="model=gpt-5.2")
|
||||
log_event("AI_TRANSCRIBE", "user_1")
|
||||
log_event("AI_BLOCKED", "user_2", success=False, detail="kein Consent")
|
||||
log_event("2FA_FAIL", "user_1", success=False)
|
||||
|
||||
m = collect_metrics()
|
||||
check("Audit-Log Eintraege > 0", m["audit_log"]["total_lines"] > 0)
|
||||
check("Audit-Log Integritaet PASS", m["audit_log"]["integrity"] == "PASS")
|
||||
check("Backup-Count vorhanden", "count" in m["backup"])
|
||||
|
||||
# --- 2) Alert-Metriken ---
|
||||
print("\n--- 2. ALERT-METRIKEN ---")
|
||||
alerts = get_alert_metrics()
|
||||
alert_names = [a["metric"] for a in alerts]
|
||||
check("login_fail erkannt", "login_fail_count" in alert_names)
|
||||
check("ai_calls_total vorhanden", "ai_calls_total" in alert_names)
|
||||
check("ai_blocked erkannt", "ai_blocked_count" in alert_names)
|
||||
check("2fa_fail erkannt", "2fa_fail_count" in alert_names)
|
||||
|
||||
ai_total = next((a for a in alerts if a["metric"] == "ai_calls_total"), None)
|
||||
check("AI-Calls = 2 (chat+transcribe)", ai_total and ai_total["value"] == 2)
|
||||
|
||||
login_fail = next((a for a in alerts if a["metric"] == "login_fail_count"), None)
|
||||
check("Login-Fail = 2", login_fail and login_fail["value"] == 2)
|
||||
|
||||
# --- 3) Integritaet ---
|
||||
print("\n--- 3. INTEGRITAETS-CHECK ---")
|
||||
r = check_integrity()
|
||||
check("Audit-Log Integritaet PASS", r["audit_log"]["status"] == "PASS")
|
||||
check("Consent-Log Status vorhanden", "status" in r["consent_log"])
|
||||
|
||||
# --- 4) Manipulation erkennen ---
|
||||
print("\n--- 4. MANIPULATION ERKENNEN ---")
|
||||
import shutil
|
||||
backup = str(_LOG_FILE) + ".bak"
|
||||
shutil.copy2(_LOG_FILE, backup)
|
||||
|
||||
with open(_LOG_FILE, "r", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
manipulated = content.replace("LOGIN_OK", "LOGIN_XX", 1)
|
||||
with open(_LOG_FILE, "w", encoding="utf-8") as f:
|
||||
f.write(manipulated)
|
||||
|
||||
r2 = check_integrity()
|
||||
check("Manipulation erkannt (FAIL)", r2["audit_log"]["status"] == "FAIL")
|
||||
|
||||
shutil.copy2(backup, _LOG_FILE)
|
||||
os.remove(backup)
|
||||
|
||||
r3 = check_integrity()
|
||||
check("Nach Restore: PASS", r3["audit_log"]["status"] == "PASS")
|
||||
|
||||
# --- 5) Nightly-Report ---
|
||||
print("\n--- 5. NIGHTLY-REPORT ---")
|
||||
report = run_nightly()
|
||||
check("Nightly hat 'overall'", "overall" in report)
|
||||
check("Nightly hat 'integrity'", "integrity" in report)
|
||||
check("Nightly hat 'alerts'", "alerts" in report)
|
||||
check("Nightly hat 'metrics'", "metrics" in report)
|
||||
|
||||
nightly_file = Path("monitoring_nightly_test.json")
|
||||
with open(nightly_file, "w", encoding="utf-8") as f:
|
||||
json.dump(report, f, ensure_ascii=False, indent=2)
|
||||
check("Nightly JSON geschrieben", nightly_file.exists())
|
||||
nightly_file.unlink()
|
||||
|
||||
# --- 6) Data Minimization ---
|
||||
print("\n--- 6. DATA MINIMIZATION ---")
|
||||
with open(_LOG_FILE, "r", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
check("Kein Passwort im Audit-Log", "password" not in content.lower() or "wrong pw" in content.lower())
|
||||
check("Kein API-Key im Audit-Log", "sk-" not in content)
|
||||
check("Kein Transkript im Audit-Log", "TRANSKRIPT:" not in content)
|
||||
|
||||
report_json = json.dumps(report, ensure_ascii=False)
|
||||
check("Kein Passwort im Report", "password" not in report_json.lower() or "wrong pw" not in report_json)
|
||||
check("Kein API-Key im Report", "sk-" not in report_json)
|
||||
|
||||
# --- 7) Health-Check Format ---
|
||||
print("\n--- 7. HEALTH-CHECK FORMAT ---")
|
||||
import importlib
|
||||
backend = importlib.import_module("backend_main")
|
||||
check("backend_main hat _APP_VERSION", hasattr(backend, "_APP_VERSION"))
|
||||
check("backend_main hat _START_TIME", hasattr(backend, "_START_TIME"))
|
||||
|
||||
transcribe = importlib.import_module("transcribe_server")
|
||||
check("transcribe_server hat _APP_VERSION", hasattr(transcribe, "_APP_VERSION"))
|
||||
check("transcribe_server hat _START_TIME", hasattr(transcribe, "_START_TIME"))
|
||||
|
||||
# Cleanup
|
||||
if _LOG_FILE.exists():
|
||||
_LOG_FILE.unlink()
|
||||
|
||||
# Nightly report cleanup
|
||||
for f in Path(".").glob("monitoring_nightly_*.json"):
|
||||
f.unlink()
|
||||
|
||||
# --- Zusammenfassung ---
|
||||
print(f"\n{'='*70}")
|
||||
print("ZUSAMMENFASSUNG")
|
||||
print(f"{'='*70}")
|
||||
print(f"ERGEBNIS: {passed} PASS, {failed} FAIL")
|
||||
if failed == 0:
|
||||
print("ALLE TESTS BESTANDEN")
|
||||
else:
|
||||
sys.exit(1)
|
||||
748
AzA march 2026 - Kopie (18)/admin_routes.py
Normal file
@@ -0,0 +1,748 @@
|
||||
# admin_routes.py – AZA Admin Control Panel v2 (internal JSON endpoints)
|
||||
#
|
||||
# All endpoints require X-Admin-Token header matching AZA_ADMIN_TOKEN env var.
|
||||
# This router is mounted with prefix="/admin" in backend_main.py.
|
||||
#
|
||||
# v1 endpoints: system_status, licenses_overview, backup_status, billing_overview
|
||||
# v2 endpoints: license_customer_map, revenue_overview, alerts, dashboard_summary
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import calendar
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import sqlite3
|
||||
import time
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from fastapi import APIRouter, Depends
|
||||
from fastapi.responses import JSONResponse
|
||||
|
||||
from aza_security import require_admin_token
|
||||
|
||||
router = APIRouter(tags=["admin"], dependencies=[Depends(require_admin_token)])
|
||||
|
||||
_BASE_DIR = Path(__file__).resolve().parent
|
||||
_START_TIME = time.time()
|
||||
|
||||
_BACKUP_PATHS = [
|
||||
Path("/host_backups"),
|
||||
Path("/root/aza-backups"),
|
||||
Path("/root/aza-backups/daily"),
|
||||
Path("/var/backups/aza"),
|
||||
]
|
||||
_BACKUP_LOG_PATHS = [
|
||||
Path("/host_backups/backup.log"),
|
||||
Path("/root/aza-backups/backup.log"),
|
||||
Path("/var/log/aza-backup.log"),
|
||||
]
|
||||
|
||||
_LOOKUP_KEY_PRICES_CHF: Dict[str, int] = {
|
||||
"aza_basic_monthly": 59_00,
|
||||
"aza_basic_yearly": 590_00,
|
||||
"aza_team_monthly": 89_00,
|
||||
"aza_team_yearly": 890_00,
|
||||
}
|
||||
|
||||
|
||||
def _stripe_db_path() -> Path:
|
||||
return Path(os.environ.get(
|
||||
"STRIPE_DB_PATH",
|
||||
str(_BASE_DIR / "data" / "stripe_webhook.sqlite"),
|
||||
))
|
||||
|
||||
|
||||
def _events_log_path() -> Path:
|
||||
return Path(os.environ.get(
|
||||
"STRIPE_EVENTS_LOG",
|
||||
str(_BASE_DIR / "data" / "stripe_events.log.jsonl"),
|
||||
))
|
||||
|
||||
|
||||
def _disk_usage() -> Dict[str, Any]:
|
||||
try:
|
||||
usage = shutil.disk_usage("/")
|
||||
total_gb = round(usage.total / (1024 ** 3), 2)
|
||||
used_gb = round(usage.used / (1024 ** 3), 2)
|
||||
free_gb = round(usage.free / (1024 ** 3), 2)
|
||||
used_pct = round((usage.used / usage.total) * 100, 1) if usage.total else 0
|
||||
return {
|
||||
"total_gb": total_gb,
|
||||
"used_gb": used_gb,
|
||||
"free_gb": free_gb,
|
||||
"used_percent": used_pct,
|
||||
}
|
||||
except Exception as e:
|
||||
return {"error": str(e)}
|
||||
|
||||
|
||||
def _safe_db_connect(db_path: Path):
|
||||
if not db_path.exists():
|
||||
return None
|
||||
return sqlite3.connect(str(db_path))
|
||||
|
||||
|
||||
def _stripe_env_ok() -> bool:
|
||||
return (
|
||||
bool(os.environ.get("STRIPE_SECRET_KEY", "").strip())
|
||||
and bool(os.environ.get("STRIPE_WEBHOOK_SECRET", "").strip())
|
||||
)
|
||||
|
||||
|
||||
def _newest_backup_info() -> Dict[str, Any]:
|
||||
"""Scan all backup paths and return info about the most recent backup."""
|
||||
best: Dict[str, Any] = {"found": False}
|
||||
best_mtime = 0.0
|
||||
for bp in _BACKUP_PATHS:
|
||||
if not bp.exists() or not bp.is_dir():
|
||||
continue
|
||||
try:
|
||||
for entry in bp.iterdir():
|
||||
if not entry.is_dir():
|
||||
continue
|
||||
mt = entry.stat().st_mtime
|
||||
if mt > best_mtime:
|
||||
best_mtime = mt
|
||||
age_h = round((time.time() - mt) / 3600, 1)
|
||||
best = {
|
||||
"found": True,
|
||||
"path": str(bp),
|
||||
"name": entry.name,
|
||||
"time_utc": datetime.fromtimestamp(mt, tz=timezone.utc).isoformat(),
|
||||
"age_hours": age_h,
|
||||
"age_days": round(age_h / 24, 1),
|
||||
}
|
||||
except Exception:
|
||||
continue
|
||||
return best
|
||||
|
||||
|
||||
def _license_counts() -> Dict[str, int]:
|
||||
"""Return {status: count} from licenses table."""
|
||||
db_path = _stripe_db_path()
|
||||
if not db_path.exists():
|
||||
return {}
|
||||
try:
|
||||
con = sqlite3.connect(str(db_path))
|
||||
rows = con.execute(
|
||||
"SELECT status, COUNT(*) FROM licenses GROUP BY status"
|
||||
).fetchall()
|
||||
con.close()
|
||||
return {r[0]: r[1] for r in rows}
|
||||
except Exception:
|
||||
return {}
|
||||
|
||||
|
||||
# ═══════════════════════════════════════════════════════════════════════════════
|
||||
# v1 ENDPOINTS (unchanged)
|
||||
# ═══════════════════════════════════════════════════════════════════════════════
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# 1. GET /admin/system_status
|
||||
# ---------------------------------------------------------------------------
|
||||
@router.get("/system_status")
|
||||
def system_status() -> Dict[str, Any]:
|
||||
now = datetime.now(timezone.utc)
|
||||
uptime_s = int(time.time() - _START_TIME)
|
||||
|
||||
stripe_health: Dict[str, Any] = {"ok": False, "detail": "not_checked"}
|
||||
stripe_key_set = bool(os.environ.get("STRIPE_SECRET_KEY", "").strip())
|
||||
stripe_webhook_set = bool(os.environ.get("STRIPE_WEBHOOK_SECRET", "").strip())
|
||||
if stripe_key_set and stripe_webhook_set:
|
||||
stripe_health = {"ok": True, "detail": "env_configured"}
|
||||
elif not stripe_key_set:
|
||||
stripe_health = {"ok": False, "detail": "STRIPE_SECRET_KEY missing"}
|
||||
elif not stripe_webhook_set:
|
||||
stripe_health = {"ok": False, "detail": "STRIPE_WEBHOOK_SECRET missing"}
|
||||
|
||||
db_path = _stripe_db_path()
|
||||
db_exists = db_path.exists()
|
||||
db_size_kb = round(db_path.stat().st_size / 1024, 1) if db_exists else None
|
||||
|
||||
return {
|
||||
"status": "ok",
|
||||
"timestamp_utc": now.isoformat(),
|
||||
"uptime_seconds": uptime_s,
|
||||
"disk": _disk_usage(),
|
||||
"stripe": stripe_health,
|
||||
"database": {
|
||||
"path": str(db_path),
|
||||
"exists": db_exists,
|
||||
"size_kb": db_size_kb,
|
||||
},
|
||||
"python_pid": os.getpid(),
|
||||
}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# 2. GET /admin/licenses_overview
|
||||
# ---------------------------------------------------------------------------
|
||||
@router.get("/licenses_overview")
|
||||
def licenses_overview(email: Optional[str] = None) -> Dict[str, Any]:
|
||||
db_path = _stripe_db_path()
|
||||
if not db_path.exists():
|
||||
return {
|
||||
"status": "no_database",
|
||||
"db_path": str(db_path),
|
||||
"counts_by_status": {},
|
||||
"total": 0,
|
||||
"recent": [],
|
||||
}
|
||||
|
||||
try:
|
||||
con = sqlite3.connect(str(db_path))
|
||||
con.row_factory = sqlite3.Row
|
||||
|
||||
rows = con.execute(
|
||||
"SELECT status, COUNT(*) as cnt FROM licenses GROUP BY status"
|
||||
).fetchall()
|
||||
counts = {r["status"]: r["cnt"] for r in rows}
|
||||
total = sum(counts.values())
|
||||
|
||||
if email:
|
||||
email_clean = email.strip().lower()
|
||||
recent_rows = con.execute(
|
||||
"""SELECT * FROM licenses
|
||||
WHERE lower(customer_email) = ?
|
||||
ORDER BY updated_at DESC LIMIT 20""",
|
||||
(email_clean,),
|
||||
).fetchall()
|
||||
else:
|
||||
recent_rows = con.execute(
|
||||
"SELECT * FROM licenses ORDER BY updated_at DESC LIMIT 20"
|
||||
).fetchall()
|
||||
|
||||
recent = [dict(r) for r in recent_rows]
|
||||
con.close()
|
||||
|
||||
return {
|
||||
"status": "ok",
|
||||
"counts_by_status": counts,
|
||||
"total": total,
|
||||
"recent": recent,
|
||||
"filter_email": email or None,
|
||||
}
|
||||
except Exception as e:
|
||||
return {"status": "error", "error": str(e)}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# 3. GET /admin/backup_status
|
||||
# ---------------------------------------------------------------------------
|
||||
@router.get("/backup_status")
|
||||
def backup_status() -> Dict[str, Any]:
|
||||
result: Dict[str, Any] = {
|
||||
"disk": _disk_usage(),
|
||||
"backup_locations": [],
|
||||
"backup_log": None,
|
||||
}
|
||||
|
||||
for bp in _BACKUP_PATHS:
|
||||
entry: Dict[str, Any] = {"path": str(bp), "exists": bp.exists()}
|
||||
if bp.exists() and bp.is_dir():
|
||||
try:
|
||||
items = sorted(bp.iterdir(), key=lambda p: p.stat().st_mtime, reverse=True)
|
||||
dirs = [d for d in items if d.is_dir()]
|
||||
all_files = list(bp.rglob("*"))
|
||||
total_size = sum(f.stat().st_size for f in all_files if f.is_file())
|
||||
|
||||
entry["folder_count"] = len(dirs)
|
||||
entry["file_count"] = len([f for f in all_files if f.is_file()])
|
||||
entry["total_size_mb"] = round(total_size / (1024 ** 2), 2)
|
||||
if dirs:
|
||||
newest = dirs[0]
|
||||
mtime = newest.stat().st_mtime
|
||||
entry["newest_backup"] = newest.name
|
||||
entry["newest_backup_time_utc"] = datetime.fromtimestamp(
|
||||
mtime, tz=timezone.utc
|
||||
).isoformat()
|
||||
entry["newest_backup_age_hours"] = round(
|
||||
(time.time() - mtime) / 3600, 1
|
||||
)
|
||||
except Exception as e:
|
||||
entry["error"] = str(e)
|
||||
result["backup_locations"].append(entry)
|
||||
|
||||
for lp in _BACKUP_LOG_PATHS:
|
||||
if lp.exists() and lp.is_file():
|
||||
try:
|
||||
text = lp.read_text(encoding="utf-8", errors="replace")
|
||||
lines = text.strip().splitlines()
|
||||
tail = lines[-20:] if len(lines) > 20 else lines
|
||||
result["backup_log"] = {
|
||||
"path": str(lp),
|
||||
"total_lines": len(lines),
|
||||
"last_lines": tail,
|
||||
}
|
||||
except Exception as e:
|
||||
result["backup_log"] = {"path": str(lp), "error": str(e)}
|
||||
break
|
||||
|
||||
if result["backup_log"] is None:
|
||||
result["backup_log"] = {"status": "not_found", "searched": [str(p) for p in _BACKUP_LOG_PATHS]}
|
||||
|
||||
return result
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# 4. GET /admin/billing_overview
|
||||
# ---------------------------------------------------------------------------
|
||||
@router.get("/billing_overview")
|
||||
def billing_overview() -> Dict[str, Any]:
|
||||
stripe_key_set = bool(os.environ.get("STRIPE_SECRET_KEY", "").strip())
|
||||
stripe_webhook_set = bool(os.environ.get("STRIPE_WEBHOOK_SECRET", "").strip())
|
||||
stripe_ok = stripe_key_set and stripe_webhook_set
|
||||
|
||||
db_path = _stripe_db_path()
|
||||
events_path = _events_log_path()
|
||||
|
||||
db_info: Dict[str, Any] = {"exists": db_path.exists()}
|
||||
licenses_summary: List[Dict[str, Any]] = []
|
||||
events_info: Dict[str, Any] = {"exists": events_path.exists()}
|
||||
|
||||
if db_path.exists():
|
||||
try:
|
||||
con = sqlite3.connect(str(db_path))
|
||||
con.row_factory = sqlite3.Row
|
||||
db_info["size_kb"] = round(db_path.stat().st_size / 1024, 1)
|
||||
|
||||
rows = con.execute(
|
||||
"""SELECT subscription_id, customer_email, status,
|
||||
lookup_key, current_period_end, updated_at
|
||||
FROM licenses ORDER BY updated_at DESC LIMIT 20"""
|
||||
).fetchall()
|
||||
licenses_summary = [dict(r) for r in rows]
|
||||
|
||||
processed_count = con.execute(
|
||||
"SELECT COUNT(*) FROM processed_events"
|
||||
).fetchone()[0]
|
||||
db_info["processed_events_count"] = processed_count
|
||||
|
||||
con.close()
|
||||
except Exception as e:
|
||||
db_info["error"] = str(e)
|
||||
|
||||
if events_path.exists():
|
||||
try:
|
||||
size_kb = round(events_path.stat().st_size / 1024, 1)
|
||||
events_info["size_kb"] = size_kb
|
||||
with events_path.open("r", encoding="utf-8", errors="replace") as f:
|
||||
lines = f.readlines()
|
||||
events_info["total_lines"] = len(lines)
|
||||
tail_lines = lines[-10:] if len(lines) > 10 else lines
|
||||
recent_events: List[Dict[str, Any]] = []
|
||||
for line in tail_lines:
|
||||
line = line.strip()
|
||||
if not line:
|
||||
continue
|
||||
try:
|
||||
recent_events.append(json.loads(line))
|
||||
except Exception:
|
||||
recent_events.append({"raw": line[:200]})
|
||||
events_info["recent"] = recent_events
|
||||
except Exception as e:
|
||||
events_info["error"] = str(e)
|
||||
|
||||
return {
|
||||
"stripe_health": {
|
||||
"ok": stripe_ok,
|
||||
"secret_key_set": stripe_key_set,
|
||||
"webhook_secret_set": stripe_webhook_set,
|
||||
},
|
||||
"database": db_info,
|
||||
"licenses_recent": licenses_summary,
|
||||
"events_log": events_info,
|
||||
}
|
||||
|
||||
|
||||
# ═══════════════════════════════════════════════════════════════════════════════
|
||||
# v2 ENDPOINTS
|
||||
# ═══════════════════════════════════════════════════════════════════════════════
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# 5. GET /admin/license_customer_map
|
||||
# ---------------------------------------------------------------------------
|
||||
@router.get("/license_customer_map")
|
||||
def license_customer_map(
|
||||
email: Optional[str] = None,
|
||||
status: Optional[str] = None,
|
||||
) -> Dict[str, Any]:
|
||||
db_path = _stripe_db_path()
|
||||
if not db_path.exists():
|
||||
return {"status": "no_database", "total": 0, "licenses": []}
|
||||
|
||||
try:
|
||||
con = sqlite3.connect(str(db_path))
|
||||
con.row_factory = sqlite3.Row
|
||||
|
||||
query = """
|
||||
SELECT subscription_id, customer_id, customer_email, status,
|
||||
lookup_key, allowed_users, devices_per_user,
|
||||
current_period_end, client_reference_id, updated_at
|
||||
FROM licenses
|
||||
"""
|
||||
conditions: List[str] = []
|
||||
params: List[Any] = []
|
||||
|
||||
if email:
|
||||
conditions.append("lower(customer_email) = ?")
|
||||
params.append(email.strip().lower())
|
||||
if status:
|
||||
conditions.append("status = ?")
|
||||
params.append(status.strip())
|
||||
|
||||
if conditions:
|
||||
query += " WHERE " + " AND ".join(conditions)
|
||||
query += " ORDER BY updated_at DESC LIMIT 200"
|
||||
|
||||
rows = con.execute(query, params).fetchall()
|
||||
licenses = []
|
||||
now_ts = int(time.time())
|
||||
for r in rows:
|
||||
row_dict = dict(r)
|
||||
cpe = row_dict.get("current_period_end")
|
||||
if cpe and isinstance(cpe, int):
|
||||
row_dict["period_end_human"] = datetime.fromtimestamp(
|
||||
cpe, tz=timezone.utc
|
||||
).strftime("%Y-%m-%d %H:%M UTC")
|
||||
row_dict["period_expired"] = cpe < now_ts
|
||||
ua = row_dict.get("updated_at")
|
||||
if ua and isinstance(ua, int):
|
||||
row_dict["updated_at_human"] = datetime.fromtimestamp(
|
||||
ua, tz=timezone.utc
|
||||
).strftime("%Y-%m-%d %H:%M UTC")
|
||||
licenses.append(row_dict)
|
||||
|
||||
counts = _license_counts()
|
||||
con.close()
|
||||
|
||||
return {
|
||||
"status": "ok",
|
||||
"total": len(licenses),
|
||||
"counts_by_status": counts,
|
||||
"filter_email": email or None,
|
||||
"filter_status": status or None,
|
||||
"licenses": licenses,
|
||||
}
|
||||
except Exception as e:
|
||||
return {"status": "error", "error": str(e)}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# 6. GET /admin/revenue_overview
|
||||
# ---------------------------------------------------------------------------
|
||||
@router.get("/revenue_overview")
|
||||
def revenue_overview() -> Dict[str, Any]:
|
||||
now = datetime.now(timezone.utc)
|
||||
month_start = now.replace(day=1, hour=0, minute=0, second=0, microsecond=0)
|
||||
month_start_ts = int(month_start.timestamp())
|
||||
|
||||
result: Dict[str, Any] = {
|
||||
"month": now.strftime("%Y-%m"),
|
||||
"currency": "chf",
|
||||
"data_source": "local_db",
|
||||
}
|
||||
|
||||
counts = _license_counts()
|
||||
result["active_subscriptions"] = counts.get("active", 0)
|
||||
result["canceled_subscriptions"] = counts.get("canceled", 0)
|
||||
result["total_subscriptions"] = sum(counts.values())
|
||||
result["counts_by_status"] = counts
|
||||
|
||||
db_path = _stripe_db_path()
|
||||
counts_by_key: Dict[str, int] = {}
|
||||
estimated_mrr_cents = 0
|
||||
if db_path.exists():
|
||||
try:
|
||||
con = sqlite3.connect(str(db_path))
|
||||
rows = con.execute(
|
||||
"SELECT lookup_key, COUNT(*) FROM licenses WHERE status='active' GROUP BY lookup_key"
|
||||
).fetchall()
|
||||
for lk, cnt in rows:
|
||||
lk_str = lk or "unknown"
|
||||
counts_by_key[lk_str] = cnt
|
||||
price = _LOOKUP_KEY_PRICES_CHF.get(lk_str, 0)
|
||||
if "yearly" in lk_str:
|
||||
estimated_mrr_cents += int(price / 12) * cnt
|
||||
else:
|
||||
estimated_mrr_cents += price * cnt
|
||||
con.close()
|
||||
except Exception:
|
||||
pass
|
||||
result["counts_by_lookup_key"] = counts_by_key
|
||||
result["estimated_mrr_chf"] = round(estimated_mrr_cents / 100, 2)
|
||||
|
||||
stripe_data: Dict[str, Any] = {"available": False}
|
||||
stripe_key = os.environ.get("STRIPE_SECRET_KEY", "").strip()
|
||||
if stripe_key:
|
||||
try:
|
||||
import stripe as _stripe
|
||||
_stripe.api_key = stripe_key
|
||||
|
||||
charges = _stripe.Charge.list(
|
||||
created={"gte": month_start_ts},
|
||||
limit=100,
|
||||
)
|
||||
gross_cents = 0
|
||||
charge_count = 0
|
||||
recent_charges: List[Dict[str, Any]] = []
|
||||
for ch in charges.auto_paging_iter():
|
||||
if ch.status == "succeeded" and ch.paid:
|
||||
gross_cents += ch.amount
|
||||
charge_count += 1
|
||||
recent_charges.append({
|
||||
"amount_chf": round(ch.amount / 100, 2),
|
||||
"email": ch.billing_details.email if ch.billing_details else ch.receipt_email,
|
||||
"date_utc": datetime.fromtimestamp(ch.created, tz=timezone.utc).strftime("%Y-%m-%d %H:%M UTC"),
|
||||
"description": ch.description or "",
|
||||
"charge_id": ch.id,
|
||||
})
|
||||
|
||||
refunds = _stripe.Refund.list(
|
||||
created={"gte": month_start_ts},
|
||||
limit=100,
|
||||
)
|
||||
refund_cents = 0
|
||||
refund_count = 0
|
||||
recent_refunds: List[Dict[str, Any]] = []
|
||||
for rf in refunds.auto_paging_iter():
|
||||
if rf.status == "succeeded":
|
||||
refund_cents += rf.amount
|
||||
refund_count += 1
|
||||
recent_refunds.append({
|
||||
"amount_chf": round(rf.amount / 100, 2),
|
||||
"date_utc": datetime.fromtimestamp(rf.created, tz=timezone.utc).strftime("%Y-%m-%d %H:%M UTC"),
|
||||
"refund_id": rf.id,
|
||||
})
|
||||
|
||||
stripe_data = {
|
||||
"available": True,
|
||||
"current_month_gross_chf": round(gross_cents / 100, 2),
|
||||
"current_month_charges": charge_count,
|
||||
"current_month_refunds_chf": round(refund_cents / 100, 2),
|
||||
"current_month_refund_count": refund_count,
|
||||
"current_month_net_chf": round((gross_cents - refund_cents) / 100, 2),
|
||||
"recent_charges": recent_charges,
|
||||
"recent_refunds": recent_refunds,
|
||||
}
|
||||
result["data_source"] = "stripe_api+local_db"
|
||||
except Exception as e:
|
||||
stripe_data = {"available": False, "error": str(e)}
|
||||
|
||||
result["stripe_live"] = stripe_data
|
||||
|
||||
events_path = _events_log_path()
|
||||
event_summary: Dict[str, int] = {}
|
||||
if events_path.exists():
|
||||
try:
|
||||
with events_path.open("r", encoding="utf-8", errors="replace") as f:
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
if not line:
|
||||
continue
|
||||
try:
|
||||
evt = json.loads(line)
|
||||
ts = evt.get("ts", 0)
|
||||
if ts >= month_start_ts:
|
||||
kind = evt.get("kind", "unknown")
|
||||
event_summary[kind] = event_summary.get(kind, 0) + 1
|
||||
except Exception:
|
||||
continue
|
||||
except Exception:
|
||||
pass
|
||||
result["current_month_events"] = event_summary
|
||||
|
||||
return result
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# 7. GET /admin/alerts
|
||||
# ---------------------------------------------------------------------------
|
||||
@router.get("/alerts")
|
||||
def alerts() -> Dict[str, Any]:
|
||||
alert_list: List[Dict[str, str]] = []
|
||||
|
||||
disk = _disk_usage()
|
||||
used_pct = disk.get("used_percent", 0)
|
||||
free_gb = disk.get("free_gb", 999)
|
||||
if isinstance(used_pct, (int, float)):
|
||||
if used_pct >= 95:
|
||||
alert_list.append({
|
||||
"id": "disk_critical",
|
||||
"severity": "critical",
|
||||
"message": f"Disk usage {used_pct}% – less than {free_gb} GB free",
|
||||
})
|
||||
elif used_pct >= 85:
|
||||
alert_list.append({
|
||||
"id": "disk_high",
|
||||
"severity": "warning",
|
||||
"message": f"Disk usage {used_pct}% – {free_gb} GB free",
|
||||
})
|
||||
|
||||
if not _stripe_env_ok():
|
||||
alert_list.append({
|
||||
"id": "stripe_not_configured",
|
||||
"severity": "critical",
|
||||
"message": "Stripe env vars (SECRET_KEY / WEBHOOK_SECRET) not set",
|
||||
})
|
||||
|
||||
db_path = _stripe_db_path()
|
||||
if not db_path.exists():
|
||||
alert_list.append({
|
||||
"id": "db_missing",
|
||||
"severity": "warning",
|
||||
"message": f"License database not found at {db_path}",
|
||||
})
|
||||
|
||||
counts = _license_counts()
|
||||
if not counts or counts.get("active", 0) == 0:
|
||||
alert_list.append({
|
||||
"id": "no_active_licenses",
|
||||
"severity": "info",
|
||||
"message": "No active licenses in database",
|
||||
})
|
||||
|
||||
backup = _newest_backup_info()
|
||||
if not backup["found"]:
|
||||
alert_list.append({
|
||||
"id": "backup_missing",
|
||||
"severity": "warning",
|
||||
"message": "No backup folders found in any known path",
|
||||
})
|
||||
else:
|
||||
age_h = backup.get("age_hours", 0)
|
||||
if age_h > 48:
|
||||
alert_list.append({
|
||||
"id": "backup_stale",
|
||||
"severity": "critical",
|
||||
"message": f"Latest backup is {backup.get('age_days', '?')} days old ({backup.get('name', '?')})",
|
||||
})
|
||||
elif age_h > 26:
|
||||
alert_list.append({
|
||||
"id": "backup_old",
|
||||
"severity": "warning",
|
||||
"message": f"Latest backup is {round(age_h, 0):.0f}h old ({backup.get('name', '?')})",
|
||||
})
|
||||
|
||||
severity_counts: Dict[str, int] = {"info": 0, "warning": 0, "critical": 0}
|
||||
for a in alert_list:
|
||||
sev = a.get("severity", "info")
|
||||
severity_counts[sev] = severity_counts.get(sev, 0) + 1
|
||||
|
||||
return {
|
||||
"total": len(alert_list),
|
||||
"counts_by_severity": severity_counts,
|
||||
"alerts": alert_list,
|
||||
}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# 8. GET /admin/dashboard_summary
|
||||
# ---------------------------------------------------------------------------
|
||||
@router.get("/dashboard_summary")
|
||||
def dashboard_summary() -> Dict[str, Any]:
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
disk = _disk_usage()
|
||||
counts = _license_counts()
|
||||
backup = _newest_backup_info()
|
||||
alert_data = alerts()
|
||||
|
||||
stripe_ok = _stripe_env_ok()
|
||||
|
||||
total_licenses = sum(counts.values())
|
||||
active = counts.get("active", 0)
|
||||
canceled = counts.get("canceled", 0)
|
||||
|
||||
rev: Dict[str, Any] = {
|
||||
"gross_chf": None,
|
||||
"refunds_chf": None,
|
||||
"net_chf": None,
|
||||
"data_source": "none",
|
||||
}
|
||||
stripe_key = os.environ.get("STRIPE_SECRET_KEY", "").strip()
|
||||
if stripe_key:
|
||||
try:
|
||||
import stripe as _stripe
|
||||
_stripe.api_key = stripe_key
|
||||
|
||||
month_start = now.replace(day=1, hour=0, minute=0, second=0, microsecond=0)
|
||||
month_start_ts = int(month_start.timestamp())
|
||||
|
||||
gross = 0
|
||||
for ch in _stripe.Charge.list(created={"gte": month_start_ts}, limit=100).auto_paging_iter():
|
||||
if ch.status == "succeeded" and ch.paid:
|
||||
gross += ch.amount
|
||||
refund_total = 0
|
||||
for rf in _stripe.Refund.list(created={"gte": month_start_ts}, limit=100).auto_paging_iter():
|
||||
if rf.status == "succeeded":
|
||||
refund_total += rf.amount
|
||||
|
||||
rev = {
|
||||
"gross_chf": round(gross / 100, 2),
|
||||
"refunds_chf": round(refund_total / 100, 2),
|
||||
"net_chf": round((gross - refund_total) / 100, 2),
|
||||
"data_source": "stripe_api",
|
||||
}
|
||||
except Exception:
|
||||
rev["data_source"] = "stripe_api_error"
|
||||
|
||||
return {
|
||||
"timestamp_utc": now.isoformat(),
|
||||
"system_ok": True,
|
||||
"stripe_ok": stripe_ok,
|
||||
"disk_free_gb": disk.get("free_gb"),
|
||||
"disk_used_percent": disk.get("used_percent"),
|
||||
"latest_backup": backup if backup["found"] else None,
|
||||
"licenses": {
|
||||
"total": total_licenses,
|
||||
"active": active,
|
||||
"canceled": canceled,
|
||||
"other": total_licenses - active - canceled,
|
||||
"counts_by_status": counts,
|
||||
},
|
||||
"current_month_revenue": rev,
|
||||
"alerts_total": alert_data["total"],
|
||||
"alerts_by_severity": alert_data["counts_by_severity"],
|
||||
}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# 9. GET /admin/devices?email=...
|
||||
# ---------------------------------------------------------------------------
|
||||
@router.get("/devices")
|
||||
def admin_devices(email: Optional[str] = None) -> Dict[str, Any]:
|
||||
"""Device overview for a customer email. If no email given, list all emails with devices."""
|
||||
from aza_device_enforcement import list_devices_for_email, DB_PATH as _DEV_DB
|
||||
|
||||
db_path = str(_BASE_DIR / "data" / "stripe_webhook.sqlite")
|
||||
try:
|
||||
from stripe_routes import DB_PATH as _SR_DB # type: ignore
|
||||
db_path = _SR_DB
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if email and email.strip():
|
||||
return list_devices_for_email(email.strip(), db_path=db_path)
|
||||
|
||||
try:
|
||||
con = sqlite3.connect(db_path)
|
||||
rows = con.execute(
|
||||
"""SELECT customer_email, COUNT(*) AS device_count,
|
||||
MAX(last_seen_at) AS last_active
|
||||
FROM device_bindings
|
||||
GROUP BY lower(customer_email)
|
||||
ORDER BY last_active DESC"""
|
||||
).fetchall()
|
||||
con.close()
|
||||
return {
|
||||
"customers": [
|
||||
{"email": r[0], "device_count": r[1], "last_active": r[2]}
|
||||
for r in rows
|
||||
]
|
||||
}
|
||||
except Exception as exc:
|
||||
return {"error": str(exc), "customers": []}
|
||||
1
AzA march 2026 - Kopie (18)/apps/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# AZA Add-ons (Audio-Notiz, etc.)
|
||||
@@ -0,0 +1,14 @@
|
||||
# Audio-Notiz Beispielkonfiguration
|
||||
# Datei nach ".env" kopieren und Werte anpassen.
|
||||
|
||||
OPENAI_API_KEY=sk-...
|
||||
|
||||
# Optional: eigenes API-Gateway / Reverse Proxy
|
||||
# OPENAI_BASE_URL=https://api.openai.com/v1
|
||||
|
||||
# Optional: falls Firmen-Proxy notwendig ist
|
||||
# OPENAI_HTTP_PROXY=http://user:pass@proxy.company.local:8080
|
||||
# OPENAI_HTTPS_PROXY=http://user:pass@proxy.company.local:8080
|
||||
|
||||
# Optionales Modell
|
||||
# TRANSCRIBE_MODEL=gpt-4o-mini-transcribe
|
||||
@@ -0,0 +1,13 @@
|
||||
@echo off
|
||||
title AZA Audio-Notiz
|
||||
cd /d "%~dp0"
|
||||
python audio_notiz_app.py
|
||||
if %errorlevel% neq 0 (
|
||||
echo.
|
||||
echo Fehler beim Starten. Stellen Sie sicher, dass Python installiert ist.
|
||||
echo Benoetigte Pakete: openai, sounddevice, numpy, python-dotenv
|
||||
echo.
|
||||
echo Installation: pip install openai sounddevice numpy python-dotenv
|
||||
echo.
|
||||
pause
|
||||
)
|
||||
1
AzA march 2026 - Kopie (18)/apps/diktat/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# AZA Audio-Notiz / Diktat Add-on
|
||||
23
AzA march 2026 - Kopie (18)/apps/diktat/audio_notiz_app.py
Normal file
@@ -0,0 +1,23 @@
|
||||
"""Startskript fuer die Audio-Notiz (Standalone)."""
|
||||
|
||||
import sys
|
||||
import os
|
||||
|
||||
|
||||
def main():
|
||||
"""Einstiegspunkt fuer Launcher/Import (auch wenn frozen)."""
|
||||
_diktat_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
if _diktat_dir not in sys.path:
|
||||
sys.path.insert(0, _diktat_dir)
|
||||
|
||||
try:
|
||||
from apps.diktat.diktat_app import DiktatApp
|
||||
except ImportError:
|
||||
from diktat_app import DiktatApp
|
||||
|
||||
app = DiktatApp()
|
||||
app.mainloop()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"geometry": "120x78+2019+80"
|
||||
}
|
||||
1264
AzA march 2026 - Kopie (18)/apps/diktat/diktat_app.py
Normal file
|
After Width: | Height: | Size: 39 KiB |
|
After Width: | Height: | Size: 281 KiB |
|
After Width: | Height: | Size: 82 KiB |
|
After Width: | Height: | Size: 24 KiB |
|
After Width: | Height: | Size: 77 KiB |
BIN
AzA march 2026 - Kopie (18)/assets/aza ki arbeitsplatz 2.png
Normal file
|
After Width: | Height: | Size: 37 KiB |
BIN
AzA march 2026 - Kopie (18)/assets/aza ki arbeitsplatz.png
Normal file
|
After Width: | Height: | Size: 39 KiB |
BIN
AzA march 2026 - Kopie (18)/assets/aza ki assistent.png
Normal file
|
After Width: | Height: | Size: 70 KiB |
BIN
AzA march 2026 - Kopie (18)/assets/blaufuss II.jpg
Normal file
|
After Width: | Height: | Size: 32 KiB |
BIN
AzA march 2026 - Kopie (18)/assets/logo.png
Normal file
|
After Width: | Height: | Size: 5.6 KiB |
BIN
AzA march 2026 - Kopie (18)/assets/paperclip.png
Normal file
|
After Width: | Height: | Size: 16 KiB |
BIN
AzA march 2026 - Kopie (18)/assets/setting_icon2.png
Normal file
|
After Width: | Height: | Size: 28 KiB |
BIN
AzA march 2026 - Kopie (18)/assets/settings_icon.png
Normal file
|
After Width: | Height: | Size: 6.2 KiB |
|
After Width: | Height: | Size: 413 KiB |
BIN
AzA march 2026 - Kopie (18)/assets/wassertropfen aza medwork.png
Normal file
|
After Width: | Height: | Size: 1.4 MiB |
BIN
AzA march 2026 - Kopie (18)/assets/zahnrad.png
Normal file
|
After Width: | Height: | Size: 28 KiB |
3
AzA march 2026 - Kopie (18)/audio_notiz_settings.json
Normal file
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"geometry": "350x523+1770+935"
|
||||
}
|
||||
16
AzA march 2026 - Kopie (18)/authorized_status.ps1
Normal file
@@ -0,0 +1,16 @@
|
||||
cd "$PSScriptRoot"
|
||||
|
||||
$tok = (Get-Content .\backend_token.txt -Raw).Trim()
|
||||
|
||||
$headers = @{
|
||||
"X-API-Token" = $tok
|
||||
"X-Device-Id" = "pc-winterthur"
|
||||
}
|
||||
|
||||
$r = Invoke-WebRequest `
|
||||
-UseBasicParsing `
|
||||
-Headers $headers `
|
||||
-TimeoutSec 30 `
|
||||
"http://127.0.0.1:8000/api/project/status"
|
||||
|
||||
$r.Content
|
||||
206
AzA march 2026 - Kopie (18)/aza_activation.py
Normal file
@@ -0,0 +1,206 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
AZA Aktivierungsschlüssel-System.
|
||||
|
||||
Ermöglicht dem Entwickler, Freigabeschlüssel für beliebige Geräte zu
|
||||
erzeugen. Die App prüft beim Start:
|
||||
1. Hartes Ablaufdatum (APP_HARD_EXPIRY)
|
||||
2. Gültigen Aktivierungsschlüssel (optional, verlängert über APP_HARD_EXPIRY hinaus)
|
||||
|
||||
Schlüssel-Format: AZA-YYYYMMDD-<hmac_hex[:12]>
|
||||
- YYYY-MM-DD = Ablaufdatum des Schlüssels
|
||||
- HMAC-SHA256(expiry_str, secret)[:12] = Signatur
|
||||
"""
|
||||
|
||||
import hmac
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import time
|
||||
from datetime import datetime, date
|
||||
from typing import Optional, Tuple
|
||||
|
||||
from aza_config import (
|
||||
get_writable_data_dir,
|
||||
ACTIVATION_CONFIG_FILENAME,
|
||||
ACTIVATION_HMAC_SECRET,
|
||||
APP_HARD_EXPIRY,
|
||||
APP_TRIAL_DAYS,
|
||||
)
|
||||
|
||||
|
||||
def _activation_path() -> str:
|
||||
return os.path.join(get_writable_data_dir(), ACTIVATION_CONFIG_FILENAME)
|
||||
|
||||
|
||||
def _sign(expiry_str: str) -> str:
|
||||
"""12 Zeichen HMAC-Hex-Signatur für ein Ablaufdatum."""
|
||||
sig = hmac.new(
|
||||
ACTIVATION_HMAC_SECRET.encode("utf-8"),
|
||||
expiry_str.encode("utf-8"),
|
||||
hashlib.sha256,
|
||||
).hexdigest()
|
||||
return sig[:12]
|
||||
|
||||
|
||||
# ── Schlüssel generieren (für den Entwickler) ──────────────────────
|
||||
|
||||
def generate_key(expiry_date: str) -> str:
|
||||
"""Erzeugt einen Aktivierungsschlüssel.
|
||||
|
||||
Args:
|
||||
expiry_date: Ablaufdatum als 'YYYY-MM-DD'.
|
||||
|
||||
Returns:
|
||||
Schlüssel im Format 'AZA-YYYYMMDD-<sig>'.
|
||||
"""
|
||||
dt = datetime.strptime(expiry_date, "%Y-%m-%d")
|
||||
tag = dt.strftime("%Y%m%d")
|
||||
sig = _sign(tag)
|
||||
return f"AZA-{tag}-{sig}"
|
||||
|
||||
|
||||
# ── Schlüssel validieren ───────────────────────────────────────────
|
||||
|
||||
def validate_key(key: str) -> Tuple[bool, Optional[date], str]:
|
||||
"""Prüft einen Aktivierungsschlüssel.
|
||||
|
||||
Returns:
|
||||
(valid, expiry_date_or_None, reason_text)
|
||||
"""
|
||||
if not key or not isinstance(key, str):
|
||||
return False, None, "Kein Schlüssel eingegeben."
|
||||
|
||||
key = key.strip().upper()
|
||||
parts = key.split("-")
|
||||
if len(parts) != 3 or parts[0] != "AZA":
|
||||
return False, None, "Ungültiges Schlüsselformat."
|
||||
|
||||
date_part, sig_part = parts[1], parts[2].lower()
|
||||
|
||||
try:
|
||||
expiry = datetime.strptime(date_part, "%Y%m%d").date()
|
||||
except ValueError:
|
||||
return False, None, "Ungültiges Datum im Schlüssel."
|
||||
|
||||
expected_sig = _sign(date_part)
|
||||
if not hmac.compare_digest(sig_part, expected_sig):
|
||||
return False, None, "Schlüssel-Signatur ungültig."
|
||||
|
||||
if expiry < date.today():
|
||||
return False, expiry, f"Schlüssel abgelaufen am {expiry.strftime('%d.%m.%Y')}."
|
||||
|
||||
return True, expiry, f"Gültig bis {expiry.strftime('%d.%m.%Y')}."
|
||||
|
||||
|
||||
# ── Persistenz ─────────────────────────────────────────────────────
|
||||
|
||||
def save_activation_key(key: str) -> None:
|
||||
path = _activation_path()
|
||||
data = {"key": key.strip(), "saved_at": int(time.time())}
|
||||
with open(path, "w", encoding="utf-8") as f:
|
||||
json.dump(data, f, ensure_ascii=False, indent=2)
|
||||
|
||||
|
||||
def load_activation_key() -> Optional[str]:
|
||||
path = _activation_path()
|
||||
if not os.path.isfile(path):
|
||||
return None
|
||||
try:
|
||||
with open(path, "r", encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
return data.get("key")
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
# ── Startup-Check ──────────────────────────────────────────────────
|
||||
|
||||
def _get_install_date_file() -> str:
|
||||
return os.path.join(get_writable_data_dir(), "install_date.json")
|
||||
|
||||
|
||||
def _get_install_date() -> date:
|
||||
"""Liefert das Erstinstallations-Datum; legt es beim ersten Aufruf an."""
|
||||
path = _get_install_date_file()
|
||||
if os.path.isfile(path):
|
||||
try:
|
||||
with open(path, "r", encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
return datetime.strptime(data["install_date"], "%Y-%m-%d").date()
|
||||
except Exception:
|
||||
pass
|
||||
install_d = date.today()
|
||||
try:
|
||||
os.makedirs(os.path.dirname(path), exist_ok=True)
|
||||
with open(path, "w", encoding="utf-8") as f:
|
||||
json.dump({"install_date": install_d.strftime("%Y-%m-%d")}, f)
|
||||
except Exception:
|
||||
pass
|
||||
return install_d
|
||||
|
||||
|
||||
def check_app_access() -> Tuple[bool, str]:
|
||||
"""Prüft ob die App gestartet werden darf.
|
||||
|
||||
Logik:
|
||||
1. Gespeicherter Aktivierungsschlüssel vorhanden und gültig? -> OK
|
||||
2. Testphase (APP_TRIAL_DAYS ab Erstinstallation) noch nicht abgelaufen? -> OK
|
||||
3. Hartes Ablaufdatum (APP_HARD_EXPIRY) als Sicherheitsnetz -> Gesperrt
|
||||
4. Sonst -> Gesperrt
|
||||
|
||||
Returns:
|
||||
(allowed, user_message)
|
||||
"""
|
||||
stored_key = load_activation_key()
|
||||
if stored_key:
|
||||
valid, expiry, reason = validate_key(stored_key)
|
||||
if valid:
|
||||
days_left = (expiry - date.today()).days
|
||||
return True, f"Aktiviert bis {expiry.strftime('%d.%m.%Y')} ({days_left} Tage verbleibend)."
|
||||
|
||||
install_d = _get_install_date()
|
||||
from datetime import timedelta
|
||||
trial_end = install_d + timedelta(days=APP_TRIAL_DAYS)
|
||||
today = date.today()
|
||||
|
||||
try:
|
||||
hard_expiry = datetime.strptime(APP_HARD_EXPIRY, "%Y-%m-%d").date()
|
||||
except ValueError:
|
||||
hard_expiry = None
|
||||
|
||||
if hard_expiry and today > hard_expiry:
|
||||
if stored_key:
|
||||
_, _, reason = validate_key(stored_key)
|
||||
return False, f"Testphase und Aktivierung abgelaufen.\n{reason}\nBitte neuen Aktivierungsschlüssel eingeben."
|
||||
return False, "Testphase abgelaufen.\nBitte Aktivierungsschlüssel eingeben, um fortzufahren."
|
||||
|
||||
if today <= trial_end:
|
||||
days_left = (trial_end - today).days
|
||||
return True, f"Testphase: noch {days_left} Tag(e) verbleibend (bis {trial_end.strftime('%d.%m.%Y')})."
|
||||
|
||||
if stored_key:
|
||||
_, _, reason = validate_key(stored_key)
|
||||
return False, f"Testphase abgelaufen.\n{reason}\nBitte neuen Aktivierungsschlüssel eingeben."
|
||||
|
||||
return False, f"Die {APP_TRIAL_DAYS}-tägige Testphase ist abgelaufen.\nBitte Aktivierungsschlüssel eingeben, um fortzufahren."
|
||||
|
||||
|
||||
# ── CLI-Hilfsmittel zum Generieren (für den Entwickler) ────────────
|
||||
|
||||
if __name__ == "__main__":
|
||||
import sys as _sys
|
||||
|
||||
if len(_sys.argv) < 2:
|
||||
print("Verwendung: python aza_activation.py <YYYY-MM-DD>")
|
||||
print("Beispiel: python aza_activation.py 2026-06-30")
|
||||
_sys.exit(1)
|
||||
|
||||
exp = _sys.argv[1]
|
||||
key = generate_key(exp)
|
||||
print(f"\nAktivierungsschlüssel generiert:")
|
||||
print(f" Ablaufdatum: {exp}")
|
||||
print(f" Schlüssel: {key}")
|
||||
|
||||
ok, dt, msg = validate_key(key)
|
||||
print(f" Validierung: {'OK' if ok else 'FEHLER'} – {msg}")
|
||||
309
AzA march 2026 - Kopie (18)/aza_admin.py
Normal file
@@ -0,0 +1,309 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
AZA Admin-Panel – Verstecktes Administrations-Fenster.
|
||||
Zugang ausschliesslich über Doppelklick auf das AZA-Logo im Launcher.
|
||||
"""
|
||||
|
||||
import hashlib
|
||||
import os
|
||||
import sys
|
||||
import threading
|
||||
import tkinter as tk
|
||||
from tkinter import messagebox
|
||||
|
||||
from aza_config import get_writable_data_dir, DEFAULT_TOKEN_QUOTA
|
||||
from aza_persistence import (
|
||||
load_token_usage,
|
||||
reset_token_allowance,
|
||||
get_remaining_tokens,
|
||||
get_location_display,
|
||||
log_installation_location,
|
||||
load_installation_location,
|
||||
get_install_count,
|
||||
)
|
||||
from aza_style import (
|
||||
ACCENT, ACCENT_HOVER, TEXT, SUBTLE, BORDER,
|
||||
FONT_FAMILY, format_number_de,
|
||||
)
|
||||
|
||||
_BG = "#FFFFFF"
|
||||
_SECTION_BG = "#F8FAFC"
|
||||
|
||||
_ADMIN_PW_HASH = "0fa4e974f520d0419a2f6c5a03c5d64bdf8f97097a506ff8857bd3072f29c72d"
|
||||
|
||||
|
||||
def _check_password(pw: str) -> bool:
|
||||
return hashlib.sha256(pw.encode("utf-8")).hexdigest() == _ADMIN_PW_HASH
|
||||
|
||||
|
||||
def show_admin_login(parent) -> bool:
|
||||
"""Zeigt den Admin-Login-Dialog. Gibt True zurück bei erfolgreichem Login."""
|
||||
result = {"ok": False}
|
||||
|
||||
dlg = tk.Toplevel(parent)
|
||||
dlg.title("AZA Administration")
|
||||
dlg.configure(bg=_BG)
|
||||
dlg.resizable(False, False)
|
||||
w, h = 400, 240
|
||||
dlg.geometry(f"{w}x{h}")
|
||||
dlg.attributes("-topmost", True)
|
||||
|
||||
try:
|
||||
dlg.update_idletasks()
|
||||
sw = dlg.winfo_screenwidth()
|
||||
sh = dlg.winfo_screenheight()
|
||||
dlg.geometry(f"{w}x{h}+{(sw - w) // 2}+{(sh - h) // 2}")
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
content = tk.Frame(dlg, bg=_BG)
|
||||
content.pack(fill="both", expand=True, padx=36, pady=24)
|
||||
|
||||
tk.Label(content, text="\U0001F6E0",
|
||||
font=(FONT_FAMILY, 24), fg=ACCENT, bg=_BG).pack(anchor="w")
|
||||
|
||||
tk.Label(content, text="Admin-Zugang",
|
||||
font=(FONT_FAMILY, 16, "bold"), fg=TEXT, bg=_BG
|
||||
).pack(anchor="w", pady=(4, 12))
|
||||
|
||||
pw_frame = tk.Frame(content, bg=BORDER)
|
||||
pw_frame.pack(fill="x", pady=(0, 6))
|
||||
pw_entry = tk.Entry(pw_frame, font=(FONT_FAMILY, 12), bg="white", fg=TEXT,
|
||||
relief="flat", bd=0, show="\u2022")
|
||||
pw_entry.pack(fill="x", ipady=7, padx=2, pady=2)
|
||||
|
||||
status = tk.Label(content, text="", font=(FONT_FAMILY, 9), fg="#E05050", bg=_BG)
|
||||
status.pack(anchor="w", pady=(0, 10))
|
||||
|
||||
def do_login(event=None):
|
||||
if _check_password(pw_entry.get()):
|
||||
result["ok"] = True
|
||||
dlg.destroy()
|
||||
else:
|
||||
status.configure(text="\u26A0 Falsches Passwort.")
|
||||
pw_entry.delete(0, "end")
|
||||
|
||||
pw_entry.bind("<Return>", do_login)
|
||||
|
||||
btn = tk.Button(content, text="Anmelden", font=(FONT_FAMILY, 11, "bold"),
|
||||
bg=ACCENT, fg="white", activebackground=ACCENT_HOVER,
|
||||
activeforeground="white",
|
||||
relief="flat", bd=0, padx=22, pady=8, cursor="hand2",
|
||||
command=do_login)
|
||||
btn.pack(anchor="w")
|
||||
btn.bind("<Enter>", lambda e: btn.configure(bg=ACCENT_HOVER))
|
||||
btn.bind("<Leave>", lambda e: btn.configure(bg=ACCENT))
|
||||
|
||||
dlg.protocol("WM_DELETE_WINDOW", dlg.destroy)
|
||||
pw_entry.focus_set()
|
||||
dlg.grab_set()
|
||||
parent.wait_window(dlg)
|
||||
|
||||
return result["ok"]
|
||||
|
||||
|
||||
def show_admin_panel(parent):
|
||||
"""Öffnet das Admin-Panel (nach erfolgreichem Login)."""
|
||||
win = tk.Toplevel(parent)
|
||||
win.title("AZA \u2013 MedWork Administration")
|
||||
win.configure(bg=_BG)
|
||||
win.resizable(True, True)
|
||||
w, h = 580, 640
|
||||
win.minsize(500, 520)
|
||||
win.geometry(f"{w}x{h}")
|
||||
win.attributes("-topmost", True)
|
||||
|
||||
try:
|
||||
win.update_idletasks()
|
||||
sw = win.winfo_screenwidth()
|
||||
sh = win.winfo_screenheight()
|
||||
win.geometry(f"{w}x{h}+{(sw - w) // 2}+{(sh - h) // 2}")
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
outer = tk.Frame(win, bg=_BG)
|
||||
outer.pack(fill="both", expand=True, padx=32, pady=24)
|
||||
|
||||
tk.Label(outer, text="\U0001F6E0 AZA \u2013 MedWork Administration",
|
||||
font=(FONT_FAMILY, 18, "bold"), fg=TEXT, bg=_BG
|
||||
).pack(anchor="w", pady=(0, 16))
|
||||
|
||||
# ── KI-Guthaben ──────────────────────────────────────────────────────────
|
||||
|
||||
sec1 = tk.LabelFrame(outer, text=" KI-Guthaben ",
|
||||
font=(FONT_FAMILY, 11, "bold"), fg=TEXT,
|
||||
bg=_SECTION_BG, bd=1, relief="solid",
|
||||
highlightbackground=BORDER, highlightthickness=1)
|
||||
sec1.pack(fill="x", pady=(0, 14), ipady=8)
|
||||
|
||||
inner1 = tk.Frame(sec1, bg=_SECTION_BG)
|
||||
inner1.pack(fill="x", padx=20, pady=8)
|
||||
|
||||
data = load_token_usage()
|
||||
used = data.get("used", 0)
|
||||
total = data.get("total", DEFAULT_TOKEN_QUOTA)
|
||||
remaining = get_remaining_tokens()
|
||||
|
||||
info_lines = [
|
||||
("Gesamt-Budget:", format_number_de(total)),
|
||||
("Verbraucht:", format_number_de(used)),
|
||||
("Verbleibend:", format_number_de(remaining)),
|
||||
]
|
||||
|
||||
value_labels = {}
|
||||
for label_text, value_text in info_lines:
|
||||
row = tk.Frame(inner1, bg=_SECTION_BG)
|
||||
row.pack(fill="x", pady=2)
|
||||
tk.Label(row, text=label_text, font=(FONT_FAMILY, 10),
|
||||
fg=SUBTLE, bg=_SECTION_BG, width=16, anchor="w").pack(side="left")
|
||||
vl = tk.Label(row, text=value_text, font=(FONT_FAMILY, 10, "bold"),
|
||||
fg=TEXT, bg=_SECTION_BG, anchor="w")
|
||||
vl.pack(side="left")
|
||||
value_labels[label_text] = vl
|
||||
|
||||
btn_frame = tk.Frame(inner1, bg=_SECTION_BG)
|
||||
btn_frame.pack(fill="x", pady=(10, 0))
|
||||
|
||||
def do_reset():
|
||||
answer = messagebox.askyesno(
|
||||
"Guthaben aufladen",
|
||||
f"KI-Guthaben auf {format_number_de(DEFAULT_TOKEN_QUOTA)} Einheiten aufladen?\n\n"
|
||||
"Der bisherige Verbrauch wird auf 0 gesetzt.",
|
||||
parent=win,
|
||||
)
|
||||
if answer:
|
||||
reset_token_allowance(DEFAULT_TOKEN_QUOTA)
|
||||
value_labels["Gesamt-Budget:"].configure(text=format_number_de(DEFAULT_TOKEN_QUOTA))
|
||||
value_labels["Verbraucht:"].configure(text=format_number_de(0))
|
||||
value_labels["Verbleibend:"].configure(text=format_number_de(DEFAULT_TOKEN_QUOTA))
|
||||
messagebox.showinfo(
|
||||
"Erledigt",
|
||||
f"Guthaben wurde auf {format_number_de(DEFAULT_TOKEN_QUOTA)} Einheiten aufgeladen.",
|
||||
parent=win,
|
||||
)
|
||||
|
||||
btn_reset = tk.Button(
|
||||
btn_frame,
|
||||
text=f"\u21BB Guthaben auf {format_number_de(DEFAULT_TOKEN_QUOTA)} Einheiten aufladen",
|
||||
font=(FONT_FAMILY, 10, "bold"),
|
||||
bg=ACCENT, fg="white", activebackground=ACCENT_HOVER,
|
||||
activeforeground="white",
|
||||
relief="flat", bd=0, padx=18, pady=7, cursor="hand2",
|
||||
command=do_reset,
|
||||
)
|
||||
btn_reset.pack(side="left")
|
||||
btn_reset.bind("<Enter>", lambda e: btn_reset.configure(bg=ACCENT_HOVER))
|
||||
btn_reset.bind("<Leave>", lambda e: btn_reset.configure(bg=ACCENT))
|
||||
|
||||
# ── Installation & Standort ──────────────────────────────────────────────
|
||||
|
||||
sec2 = tk.LabelFrame(outer, text=" Installation ",
|
||||
font=(FONT_FAMILY, 11, "bold"), fg=TEXT,
|
||||
bg=_SECTION_BG, bd=1, relief="solid",
|
||||
highlightbackground=BORDER, highlightthickness=1)
|
||||
sec2.pack(fill="x", pady=(0, 14), ipady=8)
|
||||
|
||||
inner2 = tk.Frame(sec2, bg=_SECTION_BG)
|
||||
inner2.pack(fill="x", padx=20, pady=8)
|
||||
|
||||
data_dir = get_writable_data_dir()
|
||||
exe_dir = (os.path.dirname(os.path.abspath(sys.executable))
|
||||
if getattr(sys, "frozen", False)
|
||||
else os.path.dirname(os.path.abspath(__file__)))
|
||||
|
||||
device_id = _get_device_id_short()
|
||||
|
||||
vault_status = "Nicht eingerichtet"
|
||||
try:
|
||||
from security_vault import has_vault_key, get_masked_key
|
||||
if has_vault_key():
|
||||
vault_status = f"Aktiviert ({get_masked_key()})"
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
location_text = get_location_display()
|
||||
|
||||
install_lines = [
|
||||
("Geräte-ID:", device_id),
|
||||
("Standort:", location_text),
|
||||
("Datenverzeichnis:", data_dir),
|
||||
("Installationsordner:", exe_dir),
|
||||
("API-Key Tresor:", vault_status),
|
||||
]
|
||||
|
||||
location_value_label = None
|
||||
for label_text, value_text in install_lines:
|
||||
row = tk.Frame(inner2, bg=_SECTION_BG)
|
||||
row.pack(fill="x", pady=2)
|
||||
tk.Label(row, text=label_text, font=(FONT_FAMILY, 10),
|
||||
fg=SUBTLE, bg=_SECTION_BG, width=18, anchor="w").pack(side="left")
|
||||
vl = tk.Label(row, text=value_text, font=(FONT_FAMILY, 9),
|
||||
fg=TEXT, bg=_SECTION_BG, anchor="w",
|
||||
wraplength=300, justify="left")
|
||||
vl.pack(side="left", fill="x")
|
||||
if label_text == "Standort:":
|
||||
location_value_label = vl
|
||||
|
||||
if location_text == "Nicht ermittelt" and location_value_label:
|
||||
def _fetch_location():
|
||||
loc = log_installation_location()
|
||||
if loc and location_value_label.winfo_exists():
|
||||
display = get_location_display()
|
||||
try:
|
||||
location_value_label.configure(text=display)
|
||||
except Exception:
|
||||
pass
|
||||
threading.Thread(target=_fetch_location, daemon=True).start()
|
||||
|
||||
# ── AZA Netzwerk ─────────────────────────────────────────────────────────
|
||||
|
||||
sec3 = tk.LabelFrame(outer, text=" AZA Netzwerk ",
|
||||
font=(FONT_FAMILY, 11, "bold"), fg=TEXT,
|
||||
bg=_SECTION_BG, bd=1, relief="solid",
|
||||
highlightbackground=BORDER, highlightthickness=1)
|
||||
sec3.pack(fill="x", pady=(0, 14), ipady=8)
|
||||
|
||||
inner3 = tk.Frame(sec3, bg=_SECTION_BG)
|
||||
inner3.pack(fill="x", padx=20, pady=8)
|
||||
|
||||
net_row = tk.Frame(inner3, bg=_SECTION_BG)
|
||||
net_row.pack(fill="x", pady=2)
|
||||
tk.Label(net_row, text="Aktive Praxen:", font=(FONT_FAMILY, 10),
|
||||
fg=SUBTLE, bg=_SECTION_BG, width=18, anchor="w").pack(side="left")
|
||||
count_label = tk.Label(net_row, text="Wird geladen\u2026",
|
||||
font=(FONT_FAMILY, 10, "bold"),
|
||||
fg=TEXT, bg=_SECTION_BG, anchor="w")
|
||||
count_label.pack(side="left")
|
||||
|
||||
def _load_count():
|
||||
count, is_live = get_install_count()
|
||||
if not count_label.winfo_exists():
|
||||
return
|
||||
suffix = "" if is_live else " (lokal)"
|
||||
try:
|
||||
count_label.configure(
|
||||
text=f"{format_number_de(count)} Computer{suffix}")
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
threading.Thread(target=_load_count, daemon=True).start()
|
||||
|
||||
# ── Schliessen ───────────────────────────────────────────────────────────
|
||||
|
||||
tk.Button(outer, text="Schliessen", font=(FONT_FAMILY, 10),
|
||||
bg=_BG, fg=SUBTLE, activebackground="#F0F0F0",
|
||||
relief="solid", bd=1, padx=18, pady=6, cursor="hand2",
|
||||
highlightbackground=BORDER,
|
||||
command=win.destroy).pack(anchor="e", pady=(10, 0))
|
||||
|
||||
win.grab_set()
|
||||
|
||||
|
||||
def _get_device_id_short() -> str:
|
||||
"""Kurzform der Geräte-ID (anonymisiert)."""
|
||||
try:
|
||||
import platform
|
||||
raw = f"{platform.node()}-{platform.machine()}-{os.getlogin()}"
|
||||
return hashlib.sha256(raw.encode()).hexdigest()[:12].upper()
|
||||
except Exception:
|
||||
return "unbekannt"
|
||||
211
AzA march 2026 - Kopie (18)/aza_ai_client.py
Normal file
@@ -0,0 +1,211 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Zentraler KI-Client für AZA.
|
||||
|
||||
Liefert entweder einen Backend-Proxy oder einen lokalen OpenAI-Client.
|
||||
Im Remote-Backend-Modus wird KEIN lokaler API-Key benötigt –
|
||||
alle Anfragen gehen über POST /v1/chat an das Backend.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
from types import SimpleNamespace
|
||||
|
||||
import requests
|
||||
|
||||
|
||||
def _search_dirs() -> list[str]:
|
||||
dirs: list[str] = []
|
||||
if getattr(sys, "frozen", False):
|
||||
_exe = os.path.dirname(os.path.abspath(sys.executable))
|
||||
dirs.append(_exe)
|
||||
dirs.append(os.path.join(_exe, "_internal"))
|
||||
_src = os.path.dirname(os.path.abspath(__file__))
|
||||
dirs.append(_src)
|
||||
dirs.append(os.path.join(_src, "_internal"))
|
||||
dirs.append(os.getcwd())
|
||||
return dirs
|
||||
|
||||
|
||||
def _read_file_value(filename: str) -> str | None:
|
||||
seen: set[str] = set()
|
||||
for base in _search_dirs():
|
||||
if not base or base in seen:
|
||||
continue
|
||||
seen.add(base)
|
||||
p = os.path.join(base, filename)
|
||||
if os.path.isfile(p):
|
||||
try:
|
||||
with open(p, "r", encoding="utf-8-sig") as f:
|
||||
v = f.read().replace("\ufeff", "").strip()
|
||||
if v:
|
||||
return v
|
||||
except Exception:
|
||||
continue
|
||||
return None
|
||||
|
||||
|
||||
def _clean(value) -> str | None:
|
||||
if value is None:
|
||||
return None
|
||||
v = str(value).replace("\ufeff", "").strip()
|
||||
return v if v else None
|
||||
|
||||
|
||||
def read_backend_url() -> str | None:
|
||||
url = _clean(os.getenv("MEDWORK_BACKEND_URL"))
|
||||
if url:
|
||||
return url.rstrip("/")
|
||||
url = _read_file_value("backend_url.txt")
|
||||
return url.rstrip("/") if url else None
|
||||
|
||||
|
||||
def read_backend_token() -> str | None:
|
||||
token = _read_file_value("backend_token.txt")
|
||||
if token:
|
||||
return token
|
||||
tokens_env = os.getenv("MEDWORK_API_TOKENS", "").strip()
|
||||
if tokens_env:
|
||||
t = _clean(tokens_env.split(",")[0])
|
||||
if t:
|
||||
return t
|
||||
return _clean(os.getenv("MEDWORK_API_TOKEN"))
|
||||
|
||||
|
||||
def has_remote_backend() -> bool:
|
||||
url = read_backend_url()
|
||||
if not url:
|
||||
return False
|
||||
return not any(h in url for h in ("127.0.0.1", "localhost", "0.0.0.0"))
|
||||
|
||||
|
||||
# ── Backend-Proxy-Klassen (Drop-In für OpenAI-Client) ──────────────
|
||||
|
||||
|
||||
class _BackendCompletions:
|
||||
def __init__(self, backend_url: str, backend_token: str):
|
||||
self._url = backend_url
|
||||
self._token = backend_token
|
||||
|
||||
def create(self, **kwargs):
|
||||
payload: dict = {
|
||||
"model": kwargs.get("model", "gpt-4o"),
|
||||
"messages": [
|
||||
{"role": m["role"], "content": m["content"]}
|
||||
if isinstance(m, dict)
|
||||
else {"role": m.role, "content": m.content}
|
||||
for m in kwargs.get("messages", [])
|
||||
],
|
||||
}
|
||||
if kwargs.get("temperature") is not None:
|
||||
payload["temperature"] = kwargs["temperature"]
|
||||
if kwargs.get("max_tokens") is not None:
|
||||
payload["max_tokens"] = kwargs["max_tokens"]
|
||||
if kwargs.get("top_p") is not None:
|
||||
payload["top_p"] = kwargs["top_p"]
|
||||
|
||||
r = requests.post(
|
||||
f"{self._url}/v1/chat",
|
||||
json=payload,
|
||||
headers={"X-API-Token": self._token},
|
||||
timeout=(5, 180),
|
||||
)
|
||||
r.raise_for_status()
|
||||
data = r.json()
|
||||
if not data.get("success"):
|
||||
raise RuntimeError(data.get("error", "Backend-Chat fehlgeschlagen"))
|
||||
|
||||
content = (data.get("content") or "").replace("ß", "ss")
|
||||
msg = SimpleNamespace(content=content, role="assistant")
|
||||
choice = SimpleNamespace(message=msg, finish_reason=data.get("finish_reason"))
|
||||
usage = None
|
||||
usage_data = data.get("usage")
|
||||
if usage_data:
|
||||
usage = SimpleNamespace(
|
||||
prompt_tokens=usage_data.get("prompt_tokens", 0),
|
||||
completion_tokens=usage_data.get("completion_tokens", 0),
|
||||
total_tokens=usage_data.get("total_tokens", 0),
|
||||
)
|
||||
return SimpleNamespace(choices=[choice], usage=usage, model=data.get("model", ""))
|
||||
|
||||
|
||||
class _BackendChat:
|
||||
def __init__(self, backend_url: str, backend_token: str):
|
||||
self.completions = _BackendCompletions(backend_url, backend_token)
|
||||
|
||||
|
||||
class _BackendTranscriptions:
|
||||
def __init__(self, backend_url: str, backend_token: str):
|
||||
self._url = backend_url
|
||||
self._token = backend_token
|
||||
|
||||
def create(self, **kwargs):
|
||||
file_obj = kwargs.get("file")
|
||||
if file_obj is None:
|
||||
raise ValueError("Kein 'file'-Argument für Transkription übergeben.")
|
||||
language = kwargs.get("language", "de")
|
||||
prompt = kwargs.get("prompt", "")
|
||||
|
||||
fname = getattr(file_obj, "name", "audio.m4a")
|
||||
ext = os.path.splitext(fname)[1].lower() if fname else ".m4a"
|
||||
ct = "audio/mp4" if ext == ".m4a" else "audio/wav"
|
||||
|
||||
r = requests.post(
|
||||
f"{self._url}/v1/transcribe",
|
||||
files={"file": (os.path.basename(fname), file_obj, ct)},
|
||||
data={"language": language, "prompt": prompt},
|
||||
headers={"X-API-Token": self._token},
|
||||
timeout=(5, 300),
|
||||
)
|
||||
r.raise_for_status()
|
||||
data = r.json()
|
||||
if not data.get("success"):
|
||||
raise RuntimeError(data.get("error", "Transkription fehlgeschlagen"))
|
||||
return SimpleNamespace(text=data.get("transcript", ""))
|
||||
|
||||
|
||||
class _BackendAudio:
|
||||
def __init__(self, backend_url: str, backend_token: str):
|
||||
self.transcriptions = _BackendTranscriptions(backend_url, backend_token)
|
||||
|
||||
|
||||
class BackendChatProxy:
|
||||
"""Drop-in-Ersatz für den OpenAI-Client im Remote-Backend-Modus.
|
||||
|
||||
Unterstützt:
|
||||
- proxy.chat.completions.create(model=..., messages=..., ...)
|
||||
- proxy.audio.transcriptions.create(model=..., file=..., language=...)
|
||||
"""
|
||||
|
||||
def __init__(self, backend_url: str, backend_token: str):
|
||||
self.chat = _BackendChat(backend_url, backend_token)
|
||||
self.audio = _BackendAudio(backend_url, backend_token)
|
||||
|
||||
|
||||
def get_ai_client():
|
||||
"""Liefert einen KI-Client.
|
||||
|
||||
- Remote-Backend verfügbar → BackendChatProxy (kein lokaler Key nötig)
|
||||
- Sonst → lokaler OpenAI-Client (braucht API-Key)
|
||||
- Beides nicht möglich → RuntimeError
|
||||
"""
|
||||
if has_remote_backend():
|
||||
url = read_backend_url()
|
||||
token = read_backend_token()
|
||||
if url and token:
|
||||
return BackendChatProxy(url, token)
|
||||
|
||||
try:
|
||||
from openai_runtime_config import get_openai_api_key
|
||||
api_key = get_openai_api_key()
|
||||
if api_key:
|
||||
from openai import OpenAI
|
||||
return OpenAI(api_key=api_key)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
raise RuntimeError(
|
||||
"KI-Verbindung nicht verfügbar.\n\n"
|
||||
"Weder ein Remote-Backend noch ein lokaler\n"
|
||||
"OpenAI-Schlüssel ist konfiguriert."
|
||||
)
|
||||
1681
AzA march 2026 - Kopie (18)/aza_arbeitsplan_mixin.py
Normal file
504
AzA march 2026 - Kopie (18)/aza_audio.py
Normal file
@@ -0,0 +1,504 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
AudioRecorder – Aufnahme direkt als M4A (AAC via ffmpeg-Pipe).
|
||||
Kein WAV-Zwischenschritt. Fallback auf WAV nur wenn ffmpeg fehlt.
|
||||
"""
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import tempfile
|
||||
import wave
|
||||
from datetime import datetime
|
||||
from typing import List, Optional
|
||||
|
||||
import numpy as np
|
||||
|
||||
try:
|
||||
import sounddevice as sd
|
||||
except Exception:
|
||||
sd = None
|
||||
|
||||
CHUNK_MAX_SECONDS = 600
|
||||
|
||||
_AUDIO_BACKUP_SUBDIR = "Audio_Backup"
|
||||
|
||||
|
||||
def get_audio_backup_dir() -> str:
|
||||
"""Gibt den sicheren Backup-Ordner für Audio zurück und erstellt ihn bei Bedarf."""
|
||||
docs = os.path.join(os.path.expanduser("~"), "Documents")
|
||||
if not os.path.isdir(docs):
|
||||
docs = os.path.expanduser("~")
|
||||
backup_dir = os.path.join(docs, "KG_Diktat_Ablage", _AUDIO_BACKUP_SUBDIR)
|
||||
os.makedirs(backup_dir, exist_ok=True)
|
||||
return backup_dir
|
||||
|
||||
|
||||
def persist_audio_safe(temp_path: str) -> str:
|
||||
"""Kopiert Audio in den sicheren Backup-Ordner. Gibt neuen Pfad zurück."""
|
||||
backup_dir = get_audio_backup_dir()
|
||||
ext = os.path.splitext(temp_path)[1] or ".m4a"
|
||||
ts = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
safe_name = f"aufnahme_{ts}{ext}"
|
||||
safe_path = os.path.join(backup_dir, safe_name)
|
||||
shutil.copy2(temp_path, safe_path)
|
||||
return safe_path
|
||||
|
||||
|
||||
def cleanup_old_audio_backups(max_age_days: int = 30):
|
||||
"""Löscht Audio-Backups älter als max_age_days (nur erfolgreich transkribierte)."""
|
||||
backup_dir = get_audio_backup_dir()
|
||||
cutoff = datetime.now().timestamp() - max_age_days * 86400
|
||||
try:
|
||||
for f in os.listdir(backup_dir):
|
||||
fp = os.path.join(backup_dir, f)
|
||||
if os.path.isfile(fp) and os.path.getmtime(fp) < cutoff:
|
||||
try:
|
||||
os.remove(fp)
|
||||
except Exception:
|
||||
pass
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
_NO_WINDOW = getattr(subprocess, "CREATE_NO_WINDOW", 0)
|
||||
|
||||
_WINDOWS_SOUND_SETTINGS = "Einstellungen > System > Sound > Eingabe"
|
||||
|
||||
_mic_check_cache: dict = {}
|
||||
|
||||
|
||||
def _fail(msg: str, dev_name=None, dev_index=None) -> dict:
|
||||
return {"ok": False, "device_name": dev_name, "device_index": dev_index, "message": msg}
|
||||
|
||||
|
||||
def check_microphone(force: bool = False) -> dict:
|
||||
"""Prüft ob ein brauchbares Mikrofon verfügbar ist.
|
||||
|
||||
Returns dict:
|
||||
ok (bool), device_name (str|None), device_index (int|None),
|
||||
message (str – deutsch, benutzerfreundlich)
|
||||
"""
|
||||
if not force and _mic_check_cache.get("result"):
|
||||
return _mic_check_cache["result"]
|
||||
|
||||
def _cache(r):
|
||||
_mic_check_cache["result"] = r
|
||||
return r
|
||||
|
||||
if sd is None:
|
||||
return _cache(_fail(
|
||||
"Audio-Modul nicht verfügbar.\n\n"
|
||||
"Das Paket 'sounddevice' konnte nicht geladen werden.\n"
|
||||
"Aufnahme und Diktat sind nicht möglich."
|
||||
))
|
||||
|
||||
# --- Schritt 1: Default-Input-Device abfragen ---
|
||||
dev_index = None
|
||||
dev_name = None
|
||||
try:
|
||||
info = sd.query_devices(kind="input")
|
||||
dev_name = info["name"]
|
||||
dev_index = sd.default.device[0]
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# --- Schritt 2: Fallback – alle Geräte durchsuchen ---
|
||||
if dev_name is None:
|
||||
try:
|
||||
all_devs = sd.query_devices()
|
||||
for i, d in enumerate(all_devs):
|
||||
try:
|
||||
if d["max_input_channels"] > 0:
|
||||
dev_name = d["name"]
|
||||
dev_index = i
|
||||
break
|
||||
except (KeyError, TypeError, IndexError):
|
||||
continue
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if dev_name is None:
|
||||
return _cache(_fail(
|
||||
"Kein Mikrofon gefunden.\n\n"
|
||||
"Bitte schliessen Sie ein Mikrofon an oder\n"
|
||||
"aktivieren Sie es in den Windows-Einstellungen:\n\n"
|
||||
f" {_WINDOWS_SOUND_SETTINGS}"
|
||||
))
|
||||
|
||||
# --- Schritt 3: Kanäle prüfen ---
|
||||
try:
|
||||
info = sd.query_devices(dev_index) if dev_index is not None else sd.query_devices(kind="input")
|
||||
max_ch = info["max_input_channels"]
|
||||
except Exception:
|
||||
max_ch = 0
|
||||
|
||||
if max_ch < 1:
|
||||
return _cache(_fail(
|
||||
f"Gerät '{dev_name}' hat keine Eingangskanäle.\n\n"
|
||||
"Bitte ein anderes Mikrofon auswählen:\n\n"
|
||||
f" {_WINDOWS_SOUND_SETTINGS}",
|
||||
dev_name, dev_index,
|
||||
))
|
||||
|
||||
# --- Schritt 4: Kurzer Öffnungstest ---
|
||||
try:
|
||||
test_stream = sd.InputStream(
|
||||
device=dev_index,
|
||||
samplerate=16000,
|
||||
channels=1,
|
||||
dtype="float32",
|
||||
blocksize=1024,
|
||||
)
|
||||
test_stream.close()
|
||||
except Exception as e:
|
||||
err = str(e)
|
||||
return _cache(_fail(
|
||||
f"Mikrofon '{dev_name}' konnte nicht geöffnet werden.\n\n"
|
||||
"Mögliche Ursachen:\n"
|
||||
" - Mikrofon ist von einer anderen App belegt\n"
|
||||
" - Zugriff in Windows-Datenschutz blockiert\n"
|
||||
" - Gerät ist deaktiviert oder getrennt\n\n"
|
||||
f"Windows-Einstellungen:\n {_WINDOWS_SOUND_SETTINGS}\n\n"
|
||||
f"(Technisch: {err[:120]})",
|
||||
dev_name, dev_index,
|
||||
))
|
||||
|
||||
result = {
|
||||
"ok": True,
|
||||
"device_name": dev_name,
|
||||
"device_index": dev_index,
|
||||
"message": f"Mikrofon bereit: {dev_name}",
|
||||
}
|
||||
return _cache(result)
|
||||
|
||||
|
||||
def invalidate_mic_cache():
|
||||
"""Setzt den Mikrofon-Cache zurück (z.B. nach Gerätewechsel)."""
|
||||
_mic_check_cache.clear()
|
||||
|
||||
|
||||
def _find_ffmpeg() -> Optional[str]:
|
||||
path = shutil.which("ffmpeg")
|
||||
if path:
|
||||
return path
|
||||
script_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
for candidate in (
|
||||
os.path.join(script_dir, "ffmpeg.exe"),
|
||||
os.path.join(script_dir, "_internal", "ffmpeg.exe"),
|
||||
):
|
||||
if os.path.isfile(candidate):
|
||||
return candidate
|
||||
return None
|
||||
|
||||
|
||||
class AudioRecorder:
|
||||
"""Nimmt Audio auf und streamt es direkt in ffmpeg (M4A/AAC).
|
||||
|
||||
Wenn ffmpeg verfuegbar: Audio wird waehrend der Aufnahme in Echtzeit
|
||||
als M4A kodiert – kein WAV-Zwischenschritt, sofort kleine Datei.
|
||||
Wenn ffmpeg fehlt: Fallback auf WAV (16kHz mono 16-bit PCM).
|
||||
"""
|
||||
|
||||
def __init__(self, samplerate=16000, channels=1):
|
||||
self.samplerate = samplerate
|
||||
self.channels = channels
|
||||
self._stream = None
|
||||
self._ffmpeg_proc: Optional[subprocess.Popen] = None
|
||||
self._output_path: Optional[str] = None
|
||||
self._recording = False
|
||||
self._wav_fallback = False
|
||||
self._frames: list = []
|
||||
|
||||
def start(self):
|
||||
mic = check_microphone()
|
||||
if not mic["ok"]:
|
||||
raise RuntimeError(mic["message"])
|
||||
|
||||
self._recording = True
|
||||
self._wav_fallback = False
|
||||
self._frames = []
|
||||
self._ffmpeg_proc = None
|
||||
self._device_index = mic.get("device_index")
|
||||
|
||||
ffmpeg = _find_ffmpeg()
|
||||
if ffmpeg:
|
||||
fd, self._output_path = tempfile.mkstemp(suffix=".m4a", prefix="kg_rec_")
|
||||
os.close(fd)
|
||||
try:
|
||||
self._ffmpeg_proc = subprocess.Popen(
|
||||
[ffmpeg, "-y",
|
||||
"-f", "s16le", "-ar", str(self.samplerate),
|
||||
"-ac", str(self.channels), "-i", "pipe:0",
|
||||
"-c:a", "aac", "-b:a", "64k",
|
||||
"-movflags", "+faststart",
|
||||
self._output_path],
|
||||
stdin=subprocess.PIPE,
|
||||
stdout=subprocess.DEVNULL,
|
||||
stderr=subprocess.DEVNULL,
|
||||
creationflags=_NO_WINDOW,
|
||||
)
|
||||
except Exception:
|
||||
self._ffmpeg_proc = None
|
||||
self._wav_fallback = True
|
||||
self._output_path = None
|
||||
else:
|
||||
self._wav_fallback = True
|
||||
|
||||
def callback(indata, frames, time_info, status):
|
||||
if not self._recording:
|
||||
return
|
||||
pcm = (np.clip(indata, -1.0, 1.0) * 32767.0).astype(np.int16)
|
||||
if self._ffmpeg_proc and self._ffmpeg_proc.stdin:
|
||||
try:
|
||||
self._ffmpeg_proc.stdin.write(pcm.tobytes())
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
self._frames.append(indata.copy())
|
||||
|
||||
try:
|
||||
self._stream = sd.InputStream(
|
||||
device=self._device_index,
|
||||
samplerate=self.samplerate,
|
||||
channels=self.channels,
|
||||
callback=callback,
|
||||
dtype="float32",
|
||||
blocksize=0,
|
||||
)
|
||||
self._stream.start()
|
||||
except Exception as e:
|
||||
invalidate_mic_cache()
|
||||
err = str(e)
|
||||
if "device" in err.lower() or "portaudio" in err.lower() or "-1" in err:
|
||||
raise RuntimeError(
|
||||
"Mikrofon konnte nicht geöffnet werden.\n\n"
|
||||
"Bitte prüfen Sie:\n"
|
||||
" - Ist ein Mikrofon angeschlossen?\n"
|
||||
" - Ist es in Windows aktiviert?\n\n"
|
||||
f"Windows: {_WINDOWS_SOUND_SETTINGS}\n\n"
|
||||
f"(Technisch: {err[:120]})"
|
||||
) from None
|
||||
raise
|
||||
|
||||
def stop_and_save(self) -> str:
|
||||
"""Stoppt Aufnahme, gibt Pfad zur fertigen Audiodatei zurueck."""
|
||||
if not self._stream:
|
||||
raise RuntimeError("Recorder wurde nicht gestartet.")
|
||||
|
||||
self._recording = False
|
||||
self._stream.stop()
|
||||
self._stream.close()
|
||||
self._stream = None
|
||||
|
||||
if self._ffmpeg_proc and self._ffmpeg_proc.stdin:
|
||||
try:
|
||||
self._ffmpeg_proc.stdin.close()
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
self._ffmpeg_proc.wait(timeout=30)
|
||||
except Exception:
|
||||
try:
|
||||
self._ffmpeg_proc.kill()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if (self._output_path
|
||||
and os.path.isfile(self._output_path)
|
||||
and os.path.getsize(self._output_path) > 0):
|
||||
self._ffmpeg_proc = None
|
||||
return self._output_path
|
||||
|
||||
self._ffmpeg_proc = None
|
||||
self._wav_fallback = True
|
||||
|
||||
if self._wav_fallback or not self._output_path:
|
||||
return self._save_wav_fallback()
|
||||
|
||||
return self._output_path
|
||||
|
||||
def stop_and_save_wav(self) -> str:
|
||||
"""Legacy-Alias."""
|
||||
return self.stop_and_save()
|
||||
|
||||
def _save_wav_fallback(self) -> str:
|
||||
if not self._frames:
|
||||
raise RuntimeError("Keine Audio-Daten aufgenommen (leer).")
|
||||
|
||||
audio = np.concatenate(self._frames, axis=0)
|
||||
audio = np.clip(audio, -1.0, 1.0)
|
||||
pcm16 = (audio * 32767.0).astype(np.int16)
|
||||
|
||||
fd, path = tempfile.mkstemp(suffix=".wav", prefix="kg_rec_")
|
||||
os.close(fd)
|
||||
with wave.open(path, "wb") as wf:
|
||||
wf.setnchannels(self.channels)
|
||||
wf.setsampwidth(2)
|
||||
wf.setframerate(self.samplerate)
|
||||
wf.writeframes(pcm16.tobytes())
|
||||
return path
|
||||
|
||||
|
||||
# ── Chunking ──────────────────────────────────────────────────────────
|
||||
|
||||
def split_audio_into_chunks(audio_path: str, max_seconds: int = CHUNK_MAX_SECONDS) -> List[str]:
|
||||
ext = os.path.splitext(audio_path)[1].lower()
|
||||
if ext == ".m4a":
|
||||
return _split_m4a(audio_path, max_seconds)
|
||||
return _split_wav(audio_path, max_seconds)
|
||||
|
||||
|
||||
def _split_m4a(m4a_path: str, max_seconds: int) -> List[str]:
|
||||
ffmpeg = _find_ffmpeg()
|
||||
if not ffmpeg:
|
||||
return [m4a_path]
|
||||
|
||||
try:
|
||||
probe = subprocess.run(
|
||||
[ffmpeg, "-i", m4a_path, "-f", "null", "-"],
|
||||
capture_output=True, timeout=30, creationflags=_NO_WINDOW,
|
||||
)
|
||||
duration_s = None
|
||||
for line in (probe.stderr or b"").decode("utf-8", errors="replace").splitlines():
|
||||
if "Duration:" in line:
|
||||
parts = line.split("Duration:")[1].split(",")[0].strip()
|
||||
h, m, s = parts.split(":")
|
||||
duration_s = int(h) * 3600 + int(m) * 60 + float(s)
|
||||
break
|
||||
if duration_s is None or duration_s <= max_seconds:
|
||||
return [m4a_path]
|
||||
except Exception:
|
||||
return [m4a_path]
|
||||
|
||||
chunks: List[str] = []
|
||||
offset = 0.0
|
||||
idx = 0
|
||||
while offset < duration_s:
|
||||
fd, chunk_path = tempfile.mkstemp(suffix=f"_chunk{idx}.m4a", prefix="kg_rec_")
|
||||
os.close(fd)
|
||||
result = subprocess.run(
|
||||
[ffmpeg, "-y", "-ss", str(offset), "-i", m4a_path,
|
||||
"-t", str(max_seconds), "-c", "copy", chunk_path],
|
||||
capture_output=True, timeout=120, creationflags=_NO_WINDOW,
|
||||
)
|
||||
if result.returncode == 0 and os.path.isfile(chunk_path) and os.path.getsize(chunk_path) > 0:
|
||||
chunks.append(chunk_path)
|
||||
else:
|
||||
try:
|
||||
os.remove(chunk_path)
|
||||
except Exception:
|
||||
pass
|
||||
break
|
||||
offset += max_seconds
|
||||
idx += 1
|
||||
|
||||
return chunks if chunks else [m4a_path]
|
||||
|
||||
|
||||
def _split_wav(wav_path: str, max_seconds: int) -> List[str]:
|
||||
with wave.open(wav_path, "rb") as wf:
|
||||
n_channels = wf.getnchannels()
|
||||
sampwidth = wf.getsampwidth()
|
||||
framerate = wf.getframerate()
|
||||
n_frames = wf.getnframes()
|
||||
|
||||
duration_s = n_frames / framerate
|
||||
if duration_s <= max_seconds:
|
||||
return [wav_path]
|
||||
|
||||
chunk_frames = int(max_seconds * framerate)
|
||||
chunks: List[str] = []
|
||||
|
||||
with wave.open(wav_path, "rb") as wf:
|
||||
frames_remaining = n_frames
|
||||
idx = 0
|
||||
while frames_remaining > 0:
|
||||
read_count = min(chunk_frames, frames_remaining)
|
||||
data = wf.readframes(read_count)
|
||||
fd, chunk_path = tempfile.mkstemp(suffix=f"_chunk{idx}.wav", prefix="kg_rec_")
|
||||
os.close(fd)
|
||||
with wave.open(chunk_path, "wb") as cf:
|
||||
cf.setnchannels(n_channels)
|
||||
cf.setsampwidth(sampwidth)
|
||||
cf.setframerate(framerate)
|
||||
cf.writeframes(data)
|
||||
chunks.append(chunk_path)
|
||||
frames_remaining -= read_count
|
||||
idx += 1
|
||||
|
||||
return chunks
|
||||
|
||||
|
||||
split_wav_into_chunks = split_audio_into_chunks
|
||||
|
||||
|
||||
def test_audio_device(duration_sec: float = 1.5) -> dict:
|
||||
"""Quick microphone test: records briefly and checks for signal.
|
||||
|
||||
Returns dict with keys:
|
||||
ok (bool), device (str|None), message (str)
|
||||
"""
|
||||
if sd is None:
|
||||
return {
|
||||
"ok": False,
|
||||
"device": None,
|
||||
"message": "Python-Paket 'sounddevice' ist nicht verfügbar.\n"
|
||||
"Audio-Aufnahme nicht möglich.",
|
||||
}
|
||||
|
||||
try:
|
||||
dev_info = sd.query_devices(kind="input")
|
||||
device_name = dev_info.get("name", "Unbekanntes Gerät")
|
||||
except Exception:
|
||||
return {
|
||||
"ok": False,
|
||||
"device": None,
|
||||
"message": "Kein Eingabegerät (Mikrofon) gefunden.\n"
|
||||
"Bitte Mikrofon anschliessen und erneut versuchen.",
|
||||
}
|
||||
|
||||
try:
|
||||
audio = sd.rec(
|
||||
int(duration_sec * 16000),
|
||||
samplerate=16000,
|
||||
channels=1,
|
||||
dtype="float32",
|
||||
blocking=True,
|
||||
)
|
||||
except Exception as exc:
|
||||
return {
|
||||
"ok": False,
|
||||
"device": device_name,
|
||||
"message": f"Aufnahmetest fehlgeschlagen:\n{exc}",
|
||||
}
|
||||
|
||||
if audio is None or len(audio) == 0:
|
||||
return {
|
||||
"ok": False,
|
||||
"device": device_name,
|
||||
"message": "Keine Audio-Daten empfangen.\n"
|
||||
"Bitte Mikrofon-Zugriff in den Windows-Einstellungen prüfen.",
|
||||
}
|
||||
|
||||
peak = float(np.max(np.abs(audio)))
|
||||
rms = float(np.sqrt(np.mean(audio ** 2)))
|
||||
|
||||
if peak < 0.001:
|
||||
return {
|
||||
"ok": False,
|
||||
"device": device_name,
|
||||
"message": f"Gerät: {device_name}\n\n"
|
||||
f"Kein Signal erkannt (Peak={peak:.4f}).\n"
|
||||
"Mikrofon ist möglicherweise stummgeschaltet oder defekt.",
|
||||
}
|
||||
|
||||
level_pct = min(100, int(rms * 1000))
|
||||
return {
|
||||
"ok": True,
|
||||
"device": device_name,
|
||||
"message": f"Gerät: {device_name}\n\n"
|
||||
f"Audio-Signal erkannt.\n"
|
||||
f"Pegel: {level_pct}% (Peak={peak:.3f}, RMS={rms:.4f})\n\n"
|
||||
"Mikrofon funktioniert.",
|
||||
}
|
||||
365
AzA march 2026 - Kopie (18)/aza_audit_log.py
Normal file
@@ -0,0 +1,365 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
AZA MedWork – Audit-Logging (DSG-konform, tamper-evident).
|
||||
|
||||
Protokolliert sicherheitsrelevante Ereignisse in einer
|
||||
Append-only-Logdatei mit SHA-256-Hash-Kette.
|
||||
Keine Patientendaten, keine Prompts, keine KI-Antworten.
|
||||
|
||||
Format pro Zeile (pipe-separiert, 8 Felder):
|
||||
TIMESTAMP | EVENT | USER | STATUS | SOURCE | DETAIL | PREV_HASH | ENTRY_HASH
|
||||
|
||||
Konfiguration:
|
||||
AZA_AUDIT_LOG – Pfad zur Logdatei (Standard: aza_audit.log)
|
||||
AZA_AUDIT_ROTATE_MB – Max. Groesse in MB vor Rotation (Standard: 10)
|
||||
AZA_AUDIT_KEEP – Anzahl rotierter Dateien (Standard: 12)
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
import hashlib
|
||||
import shutil
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
from aza_config import get_writable_data_dir
|
||||
|
||||
_LOG_FILE = Path(os.getenv("AZA_AUDIT_LOG", str(Path(get_writable_data_dir()) / "aza_audit.log")))
|
||||
_ROTATE_MB = float(os.getenv("AZA_AUDIT_ROTATE_MB", "10"))
|
||||
_KEEP_COUNT = int(os.getenv("AZA_AUDIT_KEEP", "12"))
|
||||
|
||||
_GENESIS_HASH = "0" * 64
|
||||
_CHAIN_HEADER_PREFIX = "#CHAIN_FROM="
|
||||
|
||||
|
||||
def _compute_entry_hash(payload: str, prev_hash: str) -> str:
|
||||
"""SHA-256 ueber prev_hash + payload (ohne Hash-Felder)."""
|
||||
raw = prev_hash + payload
|
||||
return hashlib.sha256(raw.encode("utf-8")).hexdigest()
|
||||
|
||||
|
||||
def _get_last_hash(path: Path = None) -> str:
|
||||
"""Liest den letzten entry_hash aus einer Logdatei."""
|
||||
if path is None:
|
||||
path = _LOG_FILE
|
||||
if not path.exists():
|
||||
return _GENESIS_HASH
|
||||
last_hash = _GENESIS_HASH
|
||||
try:
|
||||
with open(path, "r", encoding="utf-8") as f:
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
if not line or line.startswith("#"):
|
||||
if line.startswith(_CHAIN_HEADER_PREFIX):
|
||||
last_hash = line[len(_CHAIN_HEADER_PREFIX):].strip()
|
||||
continue
|
||||
parts = [p.strip() for p in line.split("|")]
|
||||
if len(parts) >= 8:
|
||||
last_hash = parts[7]
|
||||
except OSError:
|
||||
pass
|
||||
return last_hash
|
||||
|
||||
|
||||
def _rotate_if_needed():
|
||||
"""Rotiert die Logdatei wenn sie die Maximalgroesse ueberschreitet.
|
||||
Speichert den letzten Hash als Chain-Header in der neuen Datei."""
|
||||
if not _LOG_FILE.exists():
|
||||
return False
|
||||
try:
|
||||
size_mb = _LOG_FILE.stat().st_size / (1024 * 1024)
|
||||
except OSError:
|
||||
return False
|
||||
if size_mb < _ROTATE_MB:
|
||||
return False
|
||||
|
||||
last_hash = _get_last_hash(_LOG_FILE)
|
||||
|
||||
for i in range(_KEEP_COUNT - 1, 0, -1):
|
||||
src = _LOG_FILE.parent / f"{_LOG_FILE.stem}.{i}{_LOG_FILE.suffix}"
|
||||
dst = _LOG_FILE.parent / f"{_LOG_FILE.stem}.{i + 1}{_LOG_FILE.suffix}"
|
||||
if src.exists():
|
||||
try:
|
||||
shutil.move(str(src), str(dst))
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
rotated = _LOG_FILE.parent / f"{_LOG_FILE.stem}.1{_LOG_FILE.suffix}"
|
||||
try:
|
||||
shutil.move(str(_LOG_FILE), str(rotated))
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
try:
|
||||
with open(_LOG_FILE, "w", encoding="utf-8") as f:
|
||||
f.write(f"{_CHAIN_HEADER_PREFIX}{last_hash}\n")
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def log_event(
|
||||
event: str,
|
||||
user_id: str = "",
|
||||
success: bool = True,
|
||||
detail: str = "",
|
||||
source: str = "desktop",
|
||||
):
|
||||
"""Schreibt einen Audit-Eintrag mit Hash-Kette.
|
||||
|
||||
Format: TS | EVENT | USER | STATUS | SOURCE | DETAIL | PREV_HASH | ENTRY_HASH
|
||||
"""
|
||||
_rotate_if_needed()
|
||||
|
||||
ts = datetime.now(timezone.utc).isoformat(timespec="milliseconds")
|
||||
status = "OK" if success else "FAIL"
|
||||
|
||||
safe_detail = detail.replace("|", "/").replace("\n", " ").replace("\r", "")
|
||||
if len(safe_detail) > 200:
|
||||
safe_detail = safe_detail[:197] + "..."
|
||||
|
||||
prev_hash = _get_last_hash()
|
||||
payload = f"{ts} | {event} | {user_id} | {status} | {source} | {safe_detail}"
|
||||
entry_hash = _compute_entry_hash(payload, prev_hash)
|
||||
|
||||
line = f"{payload} | {prev_hash} | {entry_hash}\n"
|
||||
|
||||
try:
|
||||
_LOG_FILE.parent.mkdir(parents=True, exist_ok=True)
|
||||
with open(_LOG_FILE, "a", encoding="utf-8") as f:
|
||||
f.write(line)
|
||||
except OSError as e:
|
||||
print(f"AUDIT-LOG FEHLER: {e}", file=sys.stderr)
|
||||
|
||||
|
||||
def verify_integrity(path: Path = None) -> tuple[bool, list]:
|
||||
"""Prueft die Integritaet der Hash-Kette einer Logdatei.
|
||||
|
||||
Returns: (ok, errors) – errors enthaelt Zeilennummer + Beschreibung.
|
||||
"""
|
||||
if path is None:
|
||||
path = _LOG_FILE
|
||||
path = Path(path)
|
||||
|
||||
if not path.exists():
|
||||
return True, []
|
||||
|
||||
errors = []
|
||||
prev_hash = _GENESIS_HASH
|
||||
line_num = 0
|
||||
|
||||
try:
|
||||
with open(path, "r", encoding="utf-8") as f:
|
||||
for raw_line in f:
|
||||
raw_line = raw_line.strip()
|
||||
if not raw_line:
|
||||
continue
|
||||
|
||||
if raw_line.startswith("#"):
|
||||
if raw_line.startswith(_CHAIN_HEADER_PREFIX):
|
||||
prev_hash = raw_line[len(_CHAIN_HEADER_PREFIX):].strip()
|
||||
continue
|
||||
|
||||
line_num += 1
|
||||
parts = [p.strip() for p in raw_line.split("|")]
|
||||
|
||||
if len(parts) < 8:
|
||||
errors.append(f"Zeile {line_num}: Ungueltig ({len(parts)} Felder, erwartet 8)")
|
||||
continue
|
||||
|
||||
stored_prev = parts[6]
|
||||
stored_hash = parts[7]
|
||||
|
||||
if stored_prev != prev_hash:
|
||||
errors.append(
|
||||
f"Zeile {line_num}: prev_hash stimmt nicht "
|
||||
f"(erwartet {prev_hash[:16]}..., gefunden {stored_prev[:16]}...)"
|
||||
)
|
||||
|
||||
payload = " | ".join(parts[:6])
|
||||
expected_hash = _compute_entry_hash(payload, stored_prev)
|
||||
|
||||
if stored_hash != expected_hash:
|
||||
errors.append(
|
||||
f"Zeile {line_num}: entry_hash stimmt nicht "
|
||||
f"(erwartet {expected_hash[:16]}..., gefunden {stored_hash[:16]}...)"
|
||||
)
|
||||
|
||||
prev_hash = stored_hash
|
||||
|
||||
except OSError as e:
|
||||
errors.append(f"Dateifehler: {e}")
|
||||
|
||||
return len(errors) == 0, errors
|
||||
|
||||
|
||||
def verify_all_rotations() -> tuple[bool, dict]:
|
||||
"""Prueft die Integritaet ueber alle rotierten Logdateien hinweg.
|
||||
|
||||
Returns: (all_ok, results_per_file)
|
||||
"""
|
||||
results = {}
|
||||
all_ok = True
|
||||
|
||||
rotation_files = []
|
||||
for i in range(_KEEP_COUNT, 0, -1):
|
||||
rp = _LOG_FILE.parent / f"{_LOG_FILE.stem}.{i}{_LOG_FILE.suffix}"
|
||||
if rp.exists():
|
||||
rotation_files.append(rp)
|
||||
|
||||
if _LOG_FILE.exists():
|
||||
rotation_files.append(_LOG_FILE)
|
||||
|
||||
for fp in rotation_files:
|
||||
ok, errs = verify_integrity(fp)
|
||||
results[str(fp.name)] = {"ok": ok, "errors": errs}
|
||||
if not ok:
|
||||
all_ok = False
|
||||
|
||||
return all_ok, results
|
||||
|
||||
|
||||
def export_audit_log(output_path: Optional[str] = None) -> str:
|
||||
"""Exportiert das Audit-Log als JSON."""
|
||||
entries = _parse_log_file(_LOG_FILE)
|
||||
|
||||
if output_path is None:
|
||||
ts = datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
|
||||
output_path = str(Path(get_writable_data_dir()) / f"audit_export_{ts}.json")
|
||||
|
||||
ok, errs = verify_integrity(_LOG_FILE)
|
||||
|
||||
export_data = {
|
||||
"export_timestamp": datetime.now(timezone.utc).isoformat(),
|
||||
"total_entries": len(entries),
|
||||
"source_file": str(_LOG_FILE),
|
||||
"integrity": "PASS" if ok else "FAIL",
|
||||
"integrity_errors": errs,
|
||||
"entries": entries,
|
||||
}
|
||||
|
||||
with open(output_path, "w", encoding="utf-8") as f:
|
||||
json.dump(export_data, f, ensure_ascii=False, indent=2)
|
||||
|
||||
return output_path
|
||||
|
||||
|
||||
def _parse_log_file(path: Path) -> list:
|
||||
"""Parst eine Logdatei in eine Liste von Dicts."""
|
||||
entries = []
|
||||
if not path.exists():
|
||||
return entries
|
||||
try:
|
||||
with open(path, "r", encoding="utf-8") as f:
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
if not line or line.startswith("#"):
|
||||
continue
|
||||
parts = [p.strip() for p in line.split("|")]
|
||||
if len(parts) >= 6:
|
||||
entry = {
|
||||
"timestamp": parts[0],
|
||||
"event": parts[1],
|
||||
"user_id": parts[2],
|
||||
"status": parts[3],
|
||||
"source": parts[4],
|
||||
"detail": parts[5],
|
||||
}
|
||||
if len(parts) >= 8:
|
||||
entry["prev_hash"] = parts[6]
|
||||
entry["entry_hash"] = parts[7]
|
||||
entries.append(entry)
|
||||
except OSError:
|
||||
pass
|
||||
return entries
|
||||
|
||||
|
||||
def get_log_stats() -> dict:
|
||||
"""Gibt Statistiken ueber das Audit-Log zurueck."""
|
||||
entries = _parse_log_file(_LOG_FILE)
|
||||
ok, _ = verify_integrity(_LOG_FILE)
|
||||
|
||||
stats = {
|
||||
"log_file": str(_LOG_FILE),
|
||||
"exists": _LOG_FILE.exists(),
|
||||
"size_mb": 0.0,
|
||||
"total_lines": len(entries),
|
||||
"integrity": "PASS" if ok else "FAIL",
|
||||
"events": {},
|
||||
"first_entry": None,
|
||||
"last_entry": None,
|
||||
}
|
||||
|
||||
if _LOG_FILE.exists():
|
||||
try:
|
||||
stats["size_mb"] = round(_LOG_FILE.stat().st_size / (1024 * 1024), 3)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
for e in entries:
|
||||
ev = e.get("event", "")
|
||||
stats["events"][ev] = stats["events"].get(ev, 0) + 1
|
||||
|
||||
if entries:
|
||||
stats["first_entry"] = entries[0].get("timestamp")
|
||||
stats["last_entry"] = entries[-1].get("timestamp")
|
||||
|
||||
return stats
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import argparse
|
||||
parser = argparse.ArgumentParser(description="AZA MedWork Audit-Log")
|
||||
sub = parser.add_subparsers(dest="command")
|
||||
|
||||
p_verify = sub.add_parser("verify", help="Integritaet pruefen")
|
||||
p_verify.add_argument("--file", help="Logdatei (Standard: aktuelle)", default=None)
|
||||
p_verify.add_argument("--all", action="store_true", help="Alle Rotationsdateien pruefen")
|
||||
|
||||
sub.add_parser("stats", help="Statistiken anzeigen")
|
||||
|
||||
p_export = sub.add_parser("export", help="Log exportieren")
|
||||
p_export.add_argument("--output", help="Ausgabepfad", default=None)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.command == "verify":
|
||||
if args.all:
|
||||
ok, results = verify_all_rotations()
|
||||
for fname, res in results.items():
|
||||
status = "PASS" if res["ok"] else "FAIL"
|
||||
print(f" {fname}: {status}")
|
||||
for e in res["errors"]:
|
||||
print(f" {e}")
|
||||
print(f"\nGESAMT: {'PASS' if ok else 'FAIL'}")
|
||||
sys.exit(0 if ok else 1)
|
||||
else:
|
||||
fp = Path(args.file) if args.file else _LOG_FILE
|
||||
ok, errs = verify_integrity(fp)
|
||||
print(f"Datei: {fp}")
|
||||
print(f"Integritaet: {'PASS' if ok else 'FAIL'}")
|
||||
for e in errs:
|
||||
print(f" {e}")
|
||||
sys.exit(0 if ok else 1)
|
||||
elif args.command == "stats":
|
||||
s = get_log_stats()
|
||||
print(f"Datei: {s['log_file']}")
|
||||
print(f"Existiert: {s['exists']}")
|
||||
print(f"Groesse: {s['size_mb']} MB")
|
||||
print(f"Eintraege: {s['total_lines']}")
|
||||
print(f"Integritaet: {s['integrity']}")
|
||||
if s["first_entry"]:
|
||||
print(f"Erster: {s['first_entry']}")
|
||||
print(f"Letzter: {s['last_entry']}")
|
||||
if s["events"]:
|
||||
print("Events:")
|
||||
for ev, cnt in sorted(s["events"].items()):
|
||||
print(f" {ev}: {cnt}")
|
||||
elif args.command == "export":
|
||||
path = export_audit_log(args.output)
|
||||
print(f"Exportiert: {path}")
|
||||
else:
|
||||
parser.print_help()
|
||||
489
AzA march 2026 - Kopie (18)/aza_backup.py
Normal file
@@ -0,0 +1,489 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
AZA MedWork – Automatisiertes Backup & Restore.
|
||||
|
||||
Erstellt versionierte, verschlüsselte Backups aller Praxisdaten.
|
||||
Konfiguration über Umgebungsvariablen:
|
||||
AZA_BACKUP_DIR – Zielverzeichnis (Standard: ./backups)
|
||||
AZA_BACKUP_KEEP_DAYS – Aufbewahrungsdauer in Tagen (Standard: 90)
|
||||
AZA_BACKUP_PASSWORD – Passwort für ZIP-Verschlüsselung (optional)
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
import shutil
|
||||
import zipfile
|
||||
import hashlib
|
||||
import time
|
||||
from datetime import datetime, timedelta
|
||||
from pathlib import Path
|
||||
|
||||
_BASE_DIR = Path(__file__).resolve().parent
|
||||
|
||||
_BACKUP_DIR = Path(os.getenv("AZA_BACKUP_DIR", str(_BASE_DIR / "backups")))
|
||||
_KEEP_DAYS = int(os.getenv("AZA_BACKUP_KEEP_DAYS", "90"))
|
||||
|
||||
_MEDICAL_JSON_FILES = [
|
||||
"kg_diktat_user_profile.json",
|
||||
"kg_diktat_todos.json",
|
||||
"kg_diktat_todo_inbox.json",
|
||||
"kg_diktat_notes.json",
|
||||
"kg_diktat_checklists.json",
|
||||
"kg_diktat_korrekturen.json",
|
||||
"kg_diktat_textbloecke.json",
|
||||
"kg_diktat_autotext.json",
|
||||
"kg_diktat_soap_presets.json",
|
||||
"kg_diktat_soap_order.json",
|
||||
"kg_diktat_soap_visibility.json",
|
||||
"kg_diktat_brief_presets.json",
|
||||
"kg_diktat_medwork_contacts.json",
|
||||
"aza_email_contacts.json",
|
||||
"aza_medwork_messages.json",
|
||||
"medwork_backup.json",
|
||||
"kg_diktat_cloud_sync.json",
|
||||
]
|
||||
|
||||
_CONFIG_FILES = [
|
||||
"kg_diktat_config.txt",
|
||||
"kg_diktat_signature.txt",
|
||||
"kg_diktat_arztbrief_vorlage.txt",
|
||||
"kg_diktat_op_bericht_template.txt",
|
||||
"kg_diktat_todo_settings.json",
|
||||
"aza_email_config.json",
|
||||
"aza_docapp_config.json",
|
||||
"translate_config.json",
|
||||
"aza_whatsapp_config.json",
|
||||
"text_font_sizes.json",
|
||||
"paned_positions.json",
|
||||
"kg_diktat_button_heat.json",
|
||||
]
|
||||
|
||||
_UI_STATE_FILES = [
|
||||
"kg_diktat_window.txt",
|
||||
"kg_diktat_todo_window.txt",
|
||||
"kg_diktat_pruefen_window.txt",
|
||||
"kg_diktat_ordner_window.txt",
|
||||
"kg_diktat_text_window.txt",
|
||||
"kg_diktat_diktat_window.txt",
|
||||
"kg_diktat_notizen_geometry.txt",
|
||||
"kg_diktat_arbeitsplan_geometry.txt",
|
||||
"kg_diktat_brief_vorlage_geometry.txt",
|
||||
"kg_diktat_opacity.txt",
|
||||
"kg_diktat_token_usage.txt",
|
||||
]
|
||||
|
||||
_ABLAGE_DIR = "kg_diktat_ablage"
|
||||
_LERNMODUS_DIR = "Lernmodus_Export"
|
||||
_WP_DB_FILE = "workforce_planner.db"
|
||||
|
||||
_SENSITIVE_PATTERNS = [
|
||||
"password", "secret", "token", "api_key", "anon_key",
|
||||
]
|
||||
|
||||
|
||||
def _sha256_file(path: Path) -> str:
|
||||
h = hashlib.sha256()
|
||||
with open(path, "rb") as f:
|
||||
for chunk in iter(lambda: f.read(8192), b""):
|
||||
h.update(chunk)
|
||||
return h.hexdigest()
|
||||
|
||||
|
||||
def _is_sensitive_content(filepath: Path) -> bool:
|
||||
"""Prüft ob eine Datei sensible Daten enthält (für Manifest-Markierung)."""
|
||||
name = filepath.name.lower()
|
||||
return any(p in name for p in ("user_profile", "email_config", "contact"))
|
||||
|
||||
|
||||
def create_backup(label: str = "") -> Path:
|
||||
"""Erstellt ein vollständiges Backup als ZIP-Archiv.
|
||||
|
||||
Returns: Pfad zum erstellten Backup-Archiv.
|
||||
"""
|
||||
timestamp = datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
|
||||
backup_name = f"aza_backup_{timestamp}"
|
||||
if label:
|
||||
backup_name += f"_{label}"
|
||||
|
||||
_BACKUP_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
staging = _BACKUP_DIR / f".staging_{backup_name}"
|
||||
staging.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
manifest = {
|
||||
"backup_version": 1,
|
||||
"created_at": datetime.now().isoformat(),
|
||||
"label": label,
|
||||
"source_dir": str(_BASE_DIR),
|
||||
"hostname": os.environ.get("COMPUTERNAME", os.environ.get("HOSTNAME", "unknown")),
|
||||
"files": [],
|
||||
}
|
||||
|
||||
copied = 0
|
||||
|
||||
for filename in _MEDICAL_JSON_FILES + _CONFIG_FILES + _UI_STATE_FILES:
|
||||
src = _BASE_DIR / filename
|
||||
if src.exists():
|
||||
dst_dir = staging / "data"
|
||||
dst_dir.mkdir(exist_ok=True)
|
||||
dst = dst_dir / filename
|
||||
shutil.copy2(src, dst)
|
||||
manifest["files"].append({
|
||||
"path": f"data/{filename}",
|
||||
"category": "medical" if filename in _MEDICAL_JSON_FILES else
|
||||
"config" if filename in _CONFIG_FILES else "ui_state",
|
||||
"size": src.stat().st_size,
|
||||
"sha256": _sha256_file(src),
|
||||
"sensitive": _is_sensitive_content(src),
|
||||
})
|
||||
copied += 1
|
||||
|
||||
ablage_src = _BASE_DIR / _ABLAGE_DIR
|
||||
if ablage_src.exists() and ablage_src.is_dir():
|
||||
ablage_dst = staging / "data" / _ABLAGE_DIR
|
||||
shutil.copytree(ablage_src, ablage_dst, dirs_exist_ok=True)
|
||||
for root, _dirs, files in os.walk(ablage_dst):
|
||||
for fname in files:
|
||||
fp = Path(root) / fname
|
||||
rel = fp.relative_to(staging)
|
||||
manifest["files"].append({
|
||||
"path": str(rel).replace("\\", "/"),
|
||||
"category": "medical_documents",
|
||||
"size": fp.stat().st_size,
|
||||
"sha256": _sha256_file(fp),
|
||||
"sensitive": True,
|
||||
})
|
||||
copied += 1
|
||||
|
||||
lern_src = _BASE_DIR / _LERNMODUS_DIR
|
||||
if lern_src.exists() and lern_src.is_dir():
|
||||
lern_dst = staging / "data" / _LERNMODUS_DIR
|
||||
shutil.copytree(lern_src, lern_dst, dirs_exist_ok=True)
|
||||
for root, _dirs, files in os.walk(lern_dst):
|
||||
for fname in files:
|
||||
fp = Path(root) / fname
|
||||
rel = fp.relative_to(staging)
|
||||
manifest["files"].append({
|
||||
"path": str(rel).replace("\\", "/"),
|
||||
"category": "learning",
|
||||
"size": fp.stat().st_size,
|
||||
"sha256": _sha256_file(fp),
|
||||
"sensitive": False,
|
||||
})
|
||||
copied += 1
|
||||
|
||||
wp_db = _BASE_DIR / _WP_DB_FILE
|
||||
if wp_db.exists():
|
||||
db_dir = staging / "data"
|
||||
db_dir.mkdir(exist_ok=True)
|
||||
shutil.copy2(wp_db, db_dir / _WP_DB_FILE)
|
||||
manifest["files"].append({
|
||||
"path": f"data/{_WP_DB_FILE}",
|
||||
"category": "database",
|
||||
"size": wp_db.stat().st_size,
|
||||
"sha256": _sha256_file(wp_db),
|
||||
"sensitive": True,
|
||||
})
|
||||
copied += 1
|
||||
|
||||
manifest["total_files"] = copied
|
||||
|
||||
manifest_path = staging / "manifest.json"
|
||||
with open(manifest_path, "w", encoding="utf-8") as f:
|
||||
json.dump(manifest, f, ensure_ascii=False, indent=2)
|
||||
|
||||
zip_path = _BACKUP_DIR / f"{backup_name}.zip"
|
||||
with zipfile.ZipFile(zip_path, "w", zipfile.ZIP_DEFLATED, compresslevel=9) as zf:
|
||||
for root, _dirs, files in os.walk(staging):
|
||||
for fname in files:
|
||||
fp = Path(root) / fname
|
||||
arcname = fp.relative_to(staging)
|
||||
zf.write(fp, arcname)
|
||||
|
||||
shutil.rmtree(staging)
|
||||
|
||||
size_mb = zip_path.stat().st_size / (1024 * 1024)
|
||||
print(f"BACKUP ERSTELLT: {zip_path}")
|
||||
print(f" Dateien: {copied}")
|
||||
print(f" Groesse: {size_mb:.2f} MB")
|
||||
print(f" Zeitstempel: {timestamp}")
|
||||
|
||||
return zip_path
|
||||
|
||||
|
||||
def verify_backup(zip_path: Path) -> bool:
|
||||
"""Prüft die Integrität eines Backup-Archivs anhand der SHA-256-Checksummen."""
|
||||
zip_path = Path(zip_path)
|
||||
if not zip_path.exists():
|
||||
print(f"FEHLER: Backup nicht gefunden: {zip_path}", file=sys.stderr)
|
||||
return False
|
||||
|
||||
with zipfile.ZipFile(zip_path, "r") as zf:
|
||||
bad = zf.testzip()
|
||||
if bad:
|
||||
print(f"FEHLER: Korrupte Datei im Archiv: {bad}", file=sys.stderr)
|
||||
return False
|
||||
|
||||
try:
|
||||
manifest_data = zf.read("manifest.json")
|
||||
manifest = json.loads(manifest_data)
|
||||
except (KeyError, json.JSONDecodeError) as e:
|
||||
print(f"FEHLER: Manifest nicht lesbar: {e}", file=sys.stderr)
|
||||
return False
|
||||
|
||||
errors = 0
|
||||
with zipfile.ZipFile(zip_path, "r") as zf:
|
||||
for entry in manifest.get("files", []):
|
||||
fpath = entry["path"]
|
||||
expected_hash = entry.get("sha256", "")
|
||||
try:
|
||||
data = zf.read(fpath)
|
||||
actual_hash = hashlib.sha256(data).hexdigest()
|
||||
if actual_hash != expected_hash:
|
||||
print(f" HASH MISMATCH: {fpath}", file=sys.stderr)
|
||||
errors += 1
|
||||
except KeyError:
|
||||
print(f" DATEI FEHLT: {fpath}", file=sys.stderr)
|
||||
errors += 1
|
||||
|
||||
if errors == 0:
|
||||
print(f"BACKUP VERIFIZIERT: {zip_path} ({manifest.get('total_files', '?')} Dateien, OK)")
|
||||
return True
|
||||
else:
|
||||
print(f"BACKUP FEHLERHAFT: {errors} Fehler in {zip_path}", file=sys.stderr)
|
||||
return False
|
||||
|
||||
|
||||
def restore_backup(zip_path: Path, target_dir: Path = None, dry_run: bool = False) -> bool:
|
||||
"""Stellt ein Backup wieder her.
|
||||
|
||||
Args:
|
||||
zip_path: Pfad zum Backup-Archiv.
|
||||
target_dir: Zielverzeichnis (Standard: Originalverzeichnis aus Manifest).
|
||||
dry_run: Wenn True, wird nur geprüft aber nicht wiederhergestellt.
|
||||
"""
|
||||
zip_path = Path(zip_path)
|
||||
if not zip_path.exists():
|
||||
print(f"FEHLER: Backup nicht gefunden: {zip_path}", file=sys.stderr)
|
||||
return False
|
||||
|
||||
if not verify_backup(zip_path):
|
||||
print("FEHLER: Backup-Verifikation fehlgeschlagen. Restore abgebrochen.", file=sys.stderr)
|
||||
return False
|
||||
|
||||
with zipfile.ZipFile(zip_path, "r") as zf:
|
||||
manifest = json.loads(zf.read("manifest.json"))
|
||||
|
||||
if target_dir is None:
|
||||
target_dir = Path(manifest.get("source_dir", str(_BASE_DIR)))
|
||||
target_dir = Path(target_dir)
|
||||
|
||||
if dry_run:
|
||||
print(f"DRY-RUN: Restore von {zip_path}")
|
||||
print(f" Ziel: {target_dir}")
|
||||
print(f" Dateien: {manifest.get('total_files', '?')}")
|
||||
for entry in manifest.get("files", []):
|
||||
fpath = entry["path"]
|
||||
dest = target_dir / fpath.replace("data/", "", 1)
|
||||
exists = dest.exists()
|
||||
print(f" {'UEBERSCHREIBEN' if exists else 'NEU'}: {dest}")
|
||||
return True
|
||||
|
||||
pre_restore_dir = _BACKUP_DIR / f".pre_restore_{datetime.now().strftime('%Y%m%d_%H%M%S')}"
|
||||
pre_restore_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
restored = 0
|
||||
for entry in manifest.get("files", []):
|
||||
fpath = entry["path"]
|
||||
dest_rel = fpath.replace("data/", "", 1)
|
||||
dest = target_dir / dest_rel
|
||||
|
||||
if dest.exists():
|
||||
pre_dest = pre_restore_dir / dest_rel
|
||||
pre_dest.parent.mkdir(parents=True, exist_ok=True)
|
||||
shutil.copy2(dest, pre_dest)
|
||||
|
||||
dest.parent.mkdir(parents=True, exist_ok=True)
|
||||
data = zf.read(fpath)
|
||||
with open(dest, "wb") as f:
|
||||
f.write(data)
|
||||
restored += 1
|
||||
|
||||
print(f"RESTORE ABGESCHLOSSEN: {restored} Dateien wiederhergestellt")
|
||||
print(f" Quelle: {zip_path}")
|
||||
print(f" Ziel: {target_dir}")
|
||||
print(f" Pre-Restore-Sicherung: {pre_restore_dir}")
|
||||
return True
|
||||
|
||||
|
||||
def cleanup_old_backups():
|
||||
"""Entfernt Backups, die älter als AZA_BACKUP_KEEP_DAYS sind."""
|
||||
if not _BACKUP_DIR.exists():
|
||||
return
|
||||
|
||||
cutoff = time.time() - (_KEEP_DAYS * 86400)
|
||||
removed = 0
|
||||
|
||||
for f in _BACKUP_DIR.glob("aza_backup_*.zip"):
|
||||
if f.stat().st_mtime < cutoff:
|
||||
f.unlink()
|
||||
print(f"ENTFERNT: {f.name} (aelter als {_KEEP_DAYS} Tage)")
|
||||
removed += 1
|
||||
|
||||
if removed:
|
||||
print(f"CLEANUP: {removed} alte Backups entfernt")
|
||||
else:
|
||||
print(f"CLEANUP: Keine alten Backups (Aufbewahrung: {_KEEP_DAYS} Tage)")
|
||||
|
||||
|
||||
def list_backups():
|
||||
"""Listet alle vorhandenen Backups auf."""
|
||||
if not _BACKUP_DIR.exists():
|
||||
print("Kein Backup-Verzeichnis vorhanden.")
|
||||
return []
|
||||
|
||||
backups = sorted(_BACKUP_DIR.glob("aza_backup_*.zip"), key=lambda p: p.stat().st_mtime, reverse=True)
|
||||
if not backups:
|
||||
print("Keine Backups vorhanden.")
|
||||
return []
|
||||
|
||||
print(f"BACKUPS IN: {_BACKUP_DIR}")
|
||||
print(f"{'Nr':>3} {'Datum':20} {'Groesse':>10} {'Datei'}")
|
||||
print("-" * 70)
|
||||
for i, bp in enumerate(backups, 1):
|
||||
mtime = datetime.fromtimestamp(bp.stat().st_mtime).strftime("%Y-%m-%d %H:%M:%S")
|
||||
size_mb = bp.stat().st_size / (1024 * 1024)
|
||||
print(f"{i:3d} {mtime:20} {size_mb:>8.2f} MB {bp.name}")
|
||||
return backups
|
||||
|
||||
|
||||
def delete_patient_data(patient_name: str, dry_run: bool = True) -> dict:
|
||||
"""Löscht alle Daten eines Patienten (Recht auf Vergessenwerden).
|
||||
|
||||
ACHTUNG: Prüft nur die lokalen JSON-Dateien und die Ablage.
|
||||
Cloud-Daten (Supabase) und Backups müssen separat behandelt werden.
|
||||
|
||||
Args:
|
||||
patient_name: Name des Patienten (exakter Match).
|
||||
dry_run: Wenn True, wird nur geprüft aber nicht gelöscht.
|
||||
"""
|
||||
result = {
|
||||
"patient": patient_name,
|
||||
"dry_run": dry_run,
|
||||
"found_in": [],
|
||||
"deleted_from": [],
|
||||
"errors": [],
|
||||
"backup_warning": False,
|
||||
}
|
||||
|
||||
kg_dir = _BASE_DIR / _ABLAGE_DIR / "KG"
|
||||
if kg_dir.exists():
|
||||
for f in kg_dir.iterdir():
|
||||
if patient_name.lower() in f.name.lower():
|
||||
result["found_in"].append(str(f))
|
||||
if not dry_run:
|
||||
try:
|
||||
f.unlink()
|
||||
result["deleted_from"].append(str(f))
|
||||
except OSError as e:
|
||||
result["errors"].append(f"Fehler beim Loeschen {f}: {e}")
|
||||
|
||||
for subdir in ("Briefe", "Rezepte", "Kostengutsprachen", "Diktat"):
|
||||
sub_path = _BASE_DIR / _ABLAGE_DIR / subdir
|
||||
if sub_path.exists():
|
||||
for f in sub_path.iterdir():
|
||||
if patient_name.lower() in f.name.lower():
|
||||
result["found_in"].append(str(f))
|
||||
if not dry_run:
|
||||
try:
|
||||
f.unlink()
|
||||
result["deleted_from"].append(str(f))
|
||||
except OSError as e:
|
||||
result["errors"].append(f"Fehler beim Loeschen {f}: {e}")
|
||||
|
||||
text_files = ["kg_diktat_notes.json", "kg_diktat_todos.json", "kg_diktat_todo_inbox.json"]
|
||||
for fname in text_files:
|
||||
fpath = _BASE_DIR / fname
|
||||
if not fpath.exists():
|
||||
continue
|
||||
try:
|
||||
with open(fpath, "r", encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
except (json.JSONDecodeError, OSError):
|
||||
continue
|
||||
|
||||
if isinstance(data, list):
|
||||
original_len = len(data)
|
||||
filtered = [item for item in data if patient_name.lower() not in json.dumps(item, ensure_ascii=False).lower()]
|
||||
if len(filtered) < original_len:
|
||||
result["found_in"].append(f"{fname} ({original_len - len(filtered)} Eintraege)")
|
||||
if not dry_run:
|
||||
with open(fpath, "w", encoding="utf-8") as f:
|
||||
json.dump(filtered, f, ensure_ascii=False, indent=2)
|
||||
result["deleted_from"].append(fname)
|
||||
|
||||
if _BACKUP_DIR.exists() and list(_BACKUP_DIR.glob("aza_backup_*.zip")):
|
||||
result["backup_warning"] = True
|
||||
|
||||
if dry_run:
|
||||
print(f"\nDRY-RUN: Loeschung fuer Patient '{patient_name}'")
|
||||
else:
|
||||
print(f"\nLOESCHUNG DURCHGEFUEHRT: Patient '{patient_name}'")
|
||||
|
||||
if result["found_in"]:
|
||||
print(" Gefunden in:")
|
||||
for loc in result["found_in"]:
|
||||
print(f" - {loc}")
|
||||
else:
|
||||
print(" Keine Daten gefunden.")
|
||||
|
||||
if result["backup_warning"]:
|
||||
print("\n WARNUNG: Bestehende Backups enthalten moeglicherweise noch Daten")
|
||||
print(" dieses Patienten. Diese muessen separat behandelt werden.")
|
||||
print(" Cloud-Daten (Supabase) muessen manuell geloescht werden.")
|
||||
|
||||
return result
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import argparse
|
||||
parser = argparse.ArgumentParser(description="AZA MedWork Backup & Restore")
|
||||
sub = parser.add_subparsers(dest="command")
|
||||
|
||||
sub.add_parser("backup", help="Backup erstellen")
|
||||
sub.add_parser("list", help="Backups auflisten")
|
||||
sub.add_parser("cleanup", help="Alte Backups entfernen")
|
||||
|
||||
p_verify = sub.add_parser("verify", help="Backup verifizieren")
|
||||
p_verify.add_argument("file", help="Pfad zum Backup-Archiv")
|
||||
|
||||
p_restore = sub.add_parser("restore", help="Backup wiederherstellen")
|
||||
p_restore.add_argument("file", help="Pfad zum Backup-Archiv")
|
||||
p_restore.add_argument("--target", help="Zielverzeichnis", default=None)
|
||||
p_restore.add_argument("--dry-run", action="store_true", help="Nur pruefen")
|
||||
|
||||
p_delete = sub.add_parser("delete-patient", help="Patientendaten loeschen")
|
||||
p_delete.add_argument("name", help="Patientenname")
|
||||
p_delete.add_argument("--execute", action="store_true", help="Tatsaechlich loeschen (ohne: dry-run)")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.command == "backup":
|
||||
path = create_backup()
|
||||
cleanup_old_backups()
|
||||
elif args.command == "list":
|
||||
list_backups()
|
||||
elif args.command == "cleanup":
|
||||
cleanup_old_backups()
|
||||
elif args.command == "verify":
|
||||
ok = verify_backup(Path(args.file))
|
||||
sys.exit(0 if ok else 1)
|
||||
elif args.command == "restore":
|
||||
target = Path(args.target) if args.target else None
|
||||
ok = restore_backup(Path(args.file), target, dry_run=args.dry_run)
|
||||
sys.exit(0 if ok else 1)
|
||||
elif args.command == "delete-patient":
|
||||
delete_patient_data(args.name, dry_run=not args.execute)
|
||||
else:
|
||||
parser.print_help()
|
||||
59
AzA march 2026 - Kopie (18)/aza_build_stamp.py
Normal file
@@ -0,0 +1,59 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Erzeugt _build_info.py mit Build-Zeitstempel, Git-Commit, Branch und Dirty-Status.
|
||||
|
||||
Aufruf: python aza_build_stamp.py
|
||||
Ergebnis: _build_info.py im selben Verzeichnis
|
||||
"""
|
||||
|
||||
import datetime
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
|
||||
def _run_git(*args: str) -> str:
|
||||
try:
|
||||
r = subprocess.run(
|
||||
["git"] + list(args),
|
||||
capture_output=True, text=True, timeout=5,
|
||||
cwd=os.path.dirname(os.path.abspath(__file__)),
|
||||
)
|
||||
return r.stdout.strip() if r.returncode == 0 else ""
|
||||
except Exception:
|
||||
return ""
|
||||
|
||||
|
||||
def generate() -> dict:
|
||||
now = datetime.datetime.now()
|
||||
info = {
|
||||
"BUILD_TIME": now.strftime("%Y-%m-%d %H:%M:%S"),
|
||||
"BUILD_TIMESTAMP": now.strftime("%Y%m%d_%H%M%S"),
|
||||
"GIT_COMMIT": _run_git("rev-parse", "--short=8", "HEAD"),
|
||||
"GIT_BRANCH": _run_git("rev-parse", "--abbrev-ref", "HEAD"),
|
||||
"GIT_DIRTY": bool(_run_git("status", "--porcelain")),
|
||||
}
|
||||
return info
|
||||
|
||||
|
||||
def write_module(info: dict, path: str | None = None):
|
||||
if path is None:
|
||||
path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "_build_info.py")
|
||||
|
||||
lines = [
|
||||
'# Auto-generated by aza_build_stamp.py – DO NOT EDIT',
|
||||
f'BUILD_TIME = "{info["BUILD_TIME"]}"',
|
||||
f'BUILD_TIMESTAMP = "{info["BUILD_TIMESTAMP"]}"',
|
||||
f'GIT_COMMIT = "{info["GIT_COMMIT"]}"',
|
||||
f'GIT_BRANCH = "{info["GIT_BRANCH"]}"',
|
||||
f'GIT_DIRTY = {info["GIT_DIRTY"]}',
|
||||
]
|
||||
with open(path, "w", encoding="utf-8") as f:
|
||||
f.write("\n".join(lines) + "\n")
|
||||
print(f"[BUILD-STAMP] {path} geschrieben ({info['BUILD_TIME']}, {info['GIT_COMMIT']}, dirty={info['GIT_DIRTY']})")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
info = generate()
|
||||
write_module(info)
|
||||
246
AzA march 2026 - Kopie (18)/aza_config.py
Normal file
@@ -0,0 +1,246 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Konfigurationskonstanten und Prompt-Strings für KG-Diktat Desktop.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
_APPDATA_FOLDER_NAME = "AZA Desktop"
|
||||
|
||||
_writable_data_dir_cache = None
|
||||
|
||||
|
||||
def get_writable_data_dir() -> str:
|
||||
"""Writable directory for ALL user config/data files.
|
||||
|
||||
Frozen (installed to Program Files): %APPDATA%/AZA Desktop
|
||||
Dev (running from source): directory containing this file
|
||||
"""
|
||||
global _writable_data_dir_cache
|
||||
if _writable_data_dir_cache is not None:
|
||||
return _writable_data_dir_cache
|
||||
|
||||
if getattr(sys, "frozen", False):
|
||||
appdata = os.environ.get("APPDATA")
|
||||
if appdata:
|
||||
d = os.path.join(appdata, _APPDATA_FOLDER_NAME)
|
||||
else:
|
||||
d = os.path.join(str(Path.home()), "AppData", "Roaming", _APPDATA_FOLDER_NAME)
|
||||
os.makedirs(d, exist_ok=True)
|
||||
_writable_data_dir_cache = d
|
||||
else:
|
||||
_writable_data_dir_cache = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
return _writable_data_dir_cache
|
||||
|
||||
|
||||
# ─── Modelle ───
|
||||
TRANSCRIBE_MODEL = "gpt-4o-mini-transcribe"
|
||||
DEFAULT_SUMMARY_MODEL = "gpt-5.2"
|
||||
ALLOWED_SUMMARY_MODELS = ["gpt-5.2", "gpt-5-mini", "gpt-5-nano"]
|
||||
|
||||
MODEL_LABELS = {
|
||||
"gpt-5.2": 'schnell ("teures" KI)',
|
||||
"gpt-5-mini": "mittleres KI",
|
||||
"gpt-5-nano": "langsameres (günstigeres KI)",
|
||||
}
|
||||
|
||||
# ─── Config-Dateinamen ───
|
||||
CONFIG_FILENAME = "kg_diktat_config.txt"
|
||||
WINDOW_CONFIG_FILENAME = "kg_diktat_window.txt"
|
||||
SIGNATURE_CONFIG_FILENAME = "kg_diktat_signature.txt"
|
||||
KORREKTUREN_CONFIG_FILENAME = "kg_diktat_korrekturen.json"
|
||||
ABLAGE_BASE_DIR = "kg_diktat_ablage"
|
||||
ABLAGE_SUBFOLDERS = ("KG", "Briefe", "Rezepte", "Kostengutsprachen", "Diktat", "Transkript")
|
||||
ABLAGE_LABELS = {
|
||||
"KG": "KG",
|
||||
"Briefe": "Brief",
|
||||
"Rezepte": "Rezept",
|
||||
"Kostengutsprachen": "KOGU",
|
||||
"Diktat": "Diktat",
|
||||
"Transkript": "Transkript",
|
||||
}
|
||||
PRUEFEN_WINDOW_CONFIG_FILENAME = "kg_diktat_pruefen_window.txt"
|
||||
ORDNER_WINDOW_CONFIG_FILENAME = "kg_diktat_ordner_window.txt"
|
||||
TEXT_WINDOW_CONFIG_FILENAME = "kg_diktat_text_window.txt"
|
||||
DIKTAT_WINDOW_CONFIG_FILENAME = "kg_diktat_diktat_window.txt"
|
||||
DISKUSSION_WINDOW_CONFIG_FILENAME = "kg_diktat_diskussion_window.txt"
|
||||
SETTINGS_WINDOW_CONFIG_FILENAME = "kg_diktat_settings_window.txt"
|
||||
TEXTBLOECKE_CONFIG_FILENAME = "kg_diktat_textbloecke.json"
|
||||
TEMPLATES_CONFIG_FILENAME = "kg_diktat_templates.txt"
|
||||
OP_BERICHT_TEMPLATE_CONFIG_FILENAME = "kg_diktat_op_bericht_template.txt"
|
||||
ARZTBRIEF_VORLAGE_CONFIG_FILENAME = "kg_diktat_arztbrief_vorlage.txt"
|
||||
TODO_CONFIG_FILENAME = "kg_diktat_todos.json"
|
||||
TODO_WINDOW_CONFIG_FILENAME = "kg_diktat_todo_window.txt"
|
||||
TODO_INBOX_CONFIG_FILENAME = "kg_diktat_todo_inbox.json"
|
||||
TODO_SETTINGS_CONFIG_FILENAME = "kg_diktat_todo_settings.json"
|
||||
NOTES_CONFIG_FILENAME = "kg_diktat_notes.json"
|
||||
CHECKLIST_CONFIG_FILENAME = "kg_diktat_checklists.json"
|
||||
USER_PROFILE_CONFIG_FILENAME = "kg_diktat_user_profile.json"
|
||||
OPACITY_CONFIG_FILENAME = "kg_diktat_opacity.txt"
|
||||
AUTOTEXT_CONFIG_FILENAME = "kg_diktat_autotext.json"
|
||||
FONT_SCALE_CONFIG_FILENAME = "kg_diktat_font_scale.txt"
|
||||
BUTTON_SCALE_CONFIG_FILENAME = "kg_diktat_button_scale.txt"
|
||||
TOKEN_USAGE_CONFIG_FILENAME = "kg_diktat_token_usage.txt"
|
||||
KG_DETAIL_LEVEL_CONFIG_FILENAME = "kg_diktat_detail_level.txt"
|
||||
SOAP_SECTION_LEVELS_CONFIG_FILENAME = "kg_diktat_soap_section_levels.json"
|
||||
FONT_SIZES_CONFIG_FILENAME = "text_font_sizes.json"
|
||||
PANED_POSITIONS_CONFIG_FILENAME = "paned_positions.json"
|
||||
KOGU_GRUSS_CONFIG_FILENAME = "kg_diktat_kogu_gruss.txt"
|
||||
KOGU_TEMPLATES_CONFIG_FILENAME = "kg_diktat_kogu_templates.txt"
|
||||
DISKUSSION_VORLAGE_CONFIG_FILENAME = "kg_diktat_diskussion_vorlage.txt"
|
||||
BRIEF_STYLE_PROFILES_FILENAME = "kg_diktat_brief_style_profiles.json"
|
||||
|
||||
# ─── Opacity / Font / Button Scale Defaults ───
|
||||
DEFAULT_OPACITY = 1.0
|
||||
MIN_OPACITY = 0.4
|
||||
FIXED_FONT_SCALE = 0.6
|
||||
FIXED_BUTTON_SCALE = 1.4
|
||||
DEFAULT_FONT_SCALE = FIXED_FONT_SCALE
|
||||
MIN_FONT_SCALE = FIXED_FONT_SCALE
|
||||
MAX_FONT_SCALE = FIXED_FONT_SCALE
|
||||
DEFAULT_BUTTON_SCALE = FIXED_BUTTON_SCALE
|
||||
MIN_BUTTON_SCALE = FIXED_BUTTON_SCALE
|
||||
MAX_BUTTON_SCALE = FIXED_BUTTON_SCALE
|
||||
OPTIMAL_FONT_SCALE = FIXED_FONT_SCALE
|
||||
OPTIMAL_BUTTON_SCALE = FIXED_BUTTON_SCALE
|
||||
|
||||
# ─── Fenster-Defaults ───
|
||||
DEFAULT_WINDOW_WIDTH = 850
|
||||
DEFAULT_WINDOW_HEIGHT = 920
|
||||
|
||||
# ─── SOAP ───
|
||||
_SOAP_SECTIONS = ("A", "S", "O", "B", "D", "T", "P")
|
||||
_SOAP_LABELS = {"A": "Anamnese", "S": "Subjektiv", "O": "Objektiv", "B": "Beurteilung", "D": "Diagnose", "T": "Therapie", "P": "Procedere"}
|
||||
SOAP_ORDER_CONFIG_FILENAME = "kg_diktat_soap_order.json"
|
||||
SOAP_VISIBILITY_CONFIG_FILENAME = "kg_diktat_soap_visibility.json"
|
||||
SOAP_PRESETS_CONFIG_FILENAME = "kg_diktat_soap_presets.json"
|
||||
DEFAULT_SOAP_ORDER = ["A", "S", "O", "B", "D", "T", "P"]
|
||||
NUM_SOAP_PRESETS = 3
|
||||
|
||||
# ─── Brief-Abschnitts-Profile ───
|
||||
BRIEF_PRESETS_CONFIG_FILENAME = "kg_diktat_brief_presets.json"
|
||||
NUM_BRIEF_PRESETS = 3
|
||||
|
||||
BRIEF_PROFILE_DEFAULTS = [
|
||||
{
|
||||
"name": "Arztbrief",
|
||||
"order": ["DI", "AN", "BF", "BE", "TP"],
|
||||
"labels": {
|
||||
"DI": "Diagnosen",
|
||||
"AN": "Anamnese",
|
||||
"BF": "Befunde",
|
||||
"BE": "Beurteilung",
|
||||
"TP": "Therapie / Procedere",
|
||||
},
|
||||
"visibility": {"DI": True, "AN": True, "BF": True, "BE": True, "TP": True},
|
||||
},
|
||||
{
|
||||
"name": "Spitalarztbrief",
|
||||
"order": ["DI", "ZF", "EP", "AE", "BF", "VL", "TA", "MA", "PE"],
|
||||
"labels": {
|
||||
"DI": "Diagnosen",
|
||||
"ZF": "Zusammenfassung",
|
||||
"EP": "Epikrise",
|
||||
"AE": "Anamnese bei Eintritt",
|
||||
"BF": "Befunde",
|
||||
"VL": "Verlauf",
|
||||
"TA": "Therapie im Aufenthalt",
|
||||
"MA": "Medikation bei Austritt",
|
||||
"PE": "Procedere / Empfehlungen",
|
||||
},
|
||||
"visibility": {k: True for k in ["DI", "ZF", "EP", "AE", "BF", "VL", "TA", "MA", "PE"]},
|
||||
},
|
||||
{
|
||||
"name": "Einfach",
|
||||
"order": ["DI", "BF", "BE", "TH"],
|
||||
"labels": {
|
||||
"DI": "Diagnose",
|
||||
"BF": "Befund",
|
||||
"BE": "Beurteilung",
|
||||
"TH": "Therapie",
|
||||
},
|
||||
"visibility": {"DI": True, "BF": True, "BE": True, "TH": True},
|
||||
},
|
||||
]
|
||||
|
||||
# ─── KOGU Gruss ───
|
||||
KOGU_GRUSS_OPTIONS = [
|
||||
"Mit freundlichen Grüssen",
|
||||
"Freundliche Grüsse",
|
||||
"Mit freundlichen Grüssen und besten Wünschen",
|
||||
"Hochachtungsvoll",
|
||||
]
|
||||
|
||||
# ─── Korrekturen Defaults ───
|
||||
_DEFAULT_KORREKTUREN = {
|
||||
"diagnosen": {
|
||||
"gesamte lassen": "Xanthelasma",
|
||||
"Gesamtlassen": "Xanthelasma",
|
||||
"Xanthel Asma": "Xanthelasma",
|
||||
"Xantel Asma": "Xanthelasma",
|
||||
"gesamte Lasma": "Xanthelasma",
|
||||
"Aktin ische": "Aktinische",
|
||||
"Kerato Akanthom": "Keratoakanthom",
|
||||
"Basalzell Karzinom": "Basalzellkarzinom",
|
||||
"Platte Nepithel": "Plattenepithel",
|
||||
},
|
||||
"medikamente": {},
|
||||
}
|
||||
|
||||
# ─── Arztbrief Vorlage Default ───
|
||||
ARZTBRIEF_VORLAGE_DEFAULT = """Reihenfolge: 1. Diagnose, 2. Anlass, 3. Befunde, 4. Empfehlung, Therapie
|
||||
|
||||
Ordne den Arztbrief zwingend nach obiger Reihenfolge. Jeder Abschnitt mit eigener Überschrift (z. B. Diagnose:, Anlass:, Befunde:, Therapie:). Fehlende Abschnitte weglassen. Keine Sternchen (*). Diagnosen mit ICD-10-Code beibehalten."""
|
||||
|
||||
# ─── KG-Kommentar-Keywords ───
|
||||
COMMENT_KEYWORDS = (
|
||||
"ärztlich zu prüfen",
|
||||
"wirkstoff nicht genannt",
|
||||
"kontrolle",
|
||||
"vorsicht",
|
||||
"warnung",
|
||||
"weiterverordnung",
|
||||
"überprüfung",
|
||||
"blutkontrolle",
|
||||
"leberwerte",
|
||||
"red flag",
|
||||
"interaktion",
|
||||
"nebenwirkung",
|
||||
"kontraindikation",
|
||||
"aufklärung",
|
||||
)
|
||||
|
||||
# ─── Cloud-Sync ───
|
||||
_SUPABASE_URL = "https://cqbqkejdlxesxjdtkpfr.supabase.co"
|
||||
_SUPABASE_ANON_KEY = "sb_publishable_r8YMw01FF-fYD9vHLnz7FQ_d8mhpVVQ"
|
||||
|
||||
# ─── Aktivierung / Ablauf ───
|
||||
APP_HARD_EXPIRY = "2027-03-31"
|
||||
APP_TRIAL_DAYS = 21
|
||||
ACTIVATION_CONFIG_FILENAME = "kg_diktat_activation.json"
|
||||
ACTIVATION_HMAC_SECRET = "AZA-Praxis-2026-xK9mQ7"
|
||||
|
||||
# ─── Token-Guthaben / Kapazität ───
|
||||
DEFAULT_TOKEN_QUOTA = 500_000
|
||||
SOFT_LOCK_THRESHOLD = 5_000
|
||||
AVG_TOKENS_PER_REPORT = 3_000
|
||||
|
||||
# ─── Launcher ───
|
||||
LAUNCHER_CONFIG_FILENAME = "kg_diktat_launcher.json"
|
||||
LAUNCHER_MODULES = ["kg", "praxis_chat", "empfang", "notizen", "translator", "medwork_chat"]
|
||||
LAUNCHER_MODULE_LABELS = {
|
||||
"ki": "KI-Assistent",
|
||||
"kg": "AzA Office",
|
||||
"empfang": "Empfang",
|
||||
"notizen": "Diktieren",
|
||||
"translator": "\u00dcbersetzer",
|
||||
"medwork_chat": "\u00c4rzte-Netzwerk",
|
||||
"praxis_chat": "Praxis-Chat",
|
||||
}
|
||||
LAUNCHER_DISABLED_MODULES = {"medwork_chat"}
|
||||
|
||||
# ─── Globale Liste aller offenen Fenster für Skalierung ───
|
||||
_ALL_WINDOWS = []
|
||||
211
AzA march 2026 - Kopie (18)/aza_consent.py
Normal file
@@ -0,0 +1,211 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
AZA MedWork – Consent Management (Einwilligungs-Protokollierung).
|
||||
|
||||
Append-only Log mit SHA-256-Integritaetskette (jeder Eintrag
|
||||
referenziert den Hash des vorherigen Eintrags).
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
import hashlib
|
||||
import time
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
from aza_config import get_writable_data_dir
|
||||
|
||||
_BUNDLE_DIR = Path(__file__).resolve().parent
|
||||
_CONSENT_VERSION_FILE = _BUNDLE_DIR / "legal" / "ai_consent.md"
|
||||
_CONSENT_FILE = Path(get_writable_data_dir()) / "aza_consent_log.json"
|
||||
|
||||
_CONSENT_TYPE_AI = "ai_processing"
|
||||
|
||||
|
||||
def _get_consent_version() -> str:
|
||||
"""Extrahiert die Version (Stand-Datum) aus ai_consent.md."""
|
||||
try:
|
||||
with open(_CONSENT_VERSION_FILE, "r", encoding="utf-8") as f:
|
||||
for line in f:
|
||||
if line.strip().lower().startswith("stand:"):
|
||||
return line.strip().split(":", 1)[1].strip()
|
||||
except (FileNotFoundError, OSError):
|
||||
pass
|
||||
return "unknown"
|
||||
|
||||
|
||||
def _load_log() -> list:
|
||||
if not _CONSENT_FILE.exists():
|
||||
return []
|
||||
try:
|
||||
with open(_CONSENT_FILE, "r", encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
if isinstance(data, list):
|
||||
return data
|
||||
except (json.JSONDecodeError, OSError):
|
||||
pass
|
||||
return []
|
||||
|
||||
|
||||
def _save_log(entries: list) -> None:
|
||||
with open(_CONSENT_FILE, "w", encoding="utf-8") as f:
|
||||
json.dump(entries, f, ensure_ascii=False, indent=2)
|
||||
|
||||
|
||||
def _compute_hash(entry: dict, prev_hash: str) -> str:
|
||||
"""SHA-256 ueber den Eintrag + vorherigen Hash (Integritaetskette)."""
|
||||
canonical = json.dumps({
|
||||
"user_id": entry.get("user_id", ""),
|
||||
"consent_type": entry.get("consent_type", ""),
|
||||
"consent_version": entry.get("consent_version", ""),
|
||||
"timestamp": entry.get("timestamp", ""),
|
||||
"source": entry.get("source", ""),
|
||||
"action": entry.get("action", ""),
|
||||
"prev_hash": prev_hash,
|
||||
}, sort_keys=True, ensure_ascii=False)
|
||||
return hashlib.sha256(canonical.encode("utf-8")).hexdigest()
|
||||
|
||||
|
||||
def _get_last_hash(entries: list) -> str:
|
||||
if not entries:
|
||||
return "0" * 64
|
||||
return entries[-1].get("hash", "0" * 64)
|
||||
|
||||
|
||||
def record_consent(user_id: str, source: str = "ui") -> dict:
|
||||
"""Protokolliert eine neue Einwilligung (append-only)."""
|
||||
entries = _load_log()
|
||||
prev_hash = _get_last_hash(entries)
|
||||
|
||||
entry = {
|
||||
"user_id": user_id,
|
||||
"consent_type": _CONSENT_TYPE_AI,
|
||||
"consent_version": _get_consent_version(),
|
||||
"timestamp": datetime.now(timezone.utc).isoformat(),
|
||||
"source": source,
|
||||
"action": "grant",
|
||||
}
|
||||
entry["prev_hash"] = prev_hash
|
||||
entry["hash"] = _compute_hash(entry, prev_hash)
|
||||
|
||||
entries.append(entry)
|
||||
_save_log(entries)
|
||||
return entry
|
||||
|
||||
|
||||
def record_revoke(user_id: str, source: str = "ui") -> dict:
|
||||
"""Protokolliert einen Widerruf (append-only)."""
|
||||
entries = _load_log()
|
||||
prev_hash = _get_last_hash(entries)
|
||||
|
||||
entry = {
|
||||
"user_id": user_id,
|
||||
"consent_type": _CONSENT_TYPE_AI,
|
||||
"consent_version": _get_consent_version(),
|
||||
"timestamp": datetime.now(timezone.utc).isoformat(),
|
||||
"source": source,
|
||||
"action": "revoke",
|
||||
}
|
||||
entry["prev_hash"] = prev_hash
|
||||
entry["hash"] = _compute_hash(entry, prev_hash)
|
||||
|
||||
entries.append(entry)
|
||||
_save_log(entries)
|
||||
return entry
|
||||
|
||||
|
||||
def has_valid_consent(user_id: str) -> bool:
|
||||
"""Prueft ob eine gueltige, nicht widerrufene Einwilligung vorliegt
|
||||
UND ob die Consent-Version aktuell ist."""
|
||||
entries = _load_log()
|
||||
current_version = _get_consent_version()
|
||||
|
||||
last_action = None
|
||||
last_version = None
|
||||
|
||||
for e in entries:
|
||||
if e.get("user_id") == user_id and e.get("consent_type") == _CONSENT_TYPE_AI:
|
||||
last_action = e.get("action")
|
||||
last_version = e.get("consent_version")
|
||||
|
||||
if last_action != "grant":
|
||||
return False
|
||||
if last_version != current_version:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def get_consent_status(user_id: str) -> dict:
|
||||
"""Gibt den aktuellen Consent-Status zurueck."""
|
||||
entries = _load_log()
|
||||
current_version = _get_consent_version()
|
||||
|
||||
last_grant = None
|
||||
last_revoke = None
|
||||
|
||||
for e in entries:
|
||||
if e.get("user_id") == user_id and e.get("consent_type") == _CONSENT_TYPE_AI:
|
||||
if e.get("action") == "grant":
|
||||
last_grant = e
|
||||
elif e.get("action") == "revoke":
|
||||
last_revoke = e
|
||||
|
||||
return {
|
||||
"user_id": user_id,
|
||||
"has_consent": has_valid_consent(user_id),
|
||||
"current_version": current_version,
|
||||
"last_grant": last_grant,
|
||||
"last_revoke": last_revoke,
|
||||
"version_match": (last_grant or {}).get("consent_version") == current_version,
|
||||
}
|
||||
|
||||
|
||||
def get_user_history(user_id: str) -> list:
|
||||
"""Gibt die komplette Consent-Historie eines Users zurueck."""
|
||||
entries = _load_log()
|
||||
return [e for e in entries if e.get("user_id") == user_id]
|
||||
|
||||
|
||||
def export_consent_log(output_path: Optional[str] = None) -> str:
|
||||
"""Exportiert das komplette Consent-Log als JSON.
|
||||
Returns: Pfad zur exportierten Datei."""
|
||||
entries = _load_log()
|
||||
if output_path is None:
|
||||
ts = datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
|
||||
output_path = str(Path(get_writable_data_dir()) / f"consent_export_{ts}.json")
|
||||
|
||||
export_data = {
|
||||
"export_timestamp": datetime.now(timezone.utc).isoformat(),
|
||||
"total_entries": len(entries),
|
||||
"current_consent_version": _get_consent_version(),
|
||||
"entries": entries,
|
||||
}
|
||||
|
||||
with open(output_path, "w", encoding="utf-8") as f:
|
||||
json.dump(export_data, f, ensure_ascii=False, indent=2)
|
||||
|
||||
return output_path
|
||||
|
||||
|
||||
def verify_chain_integrity() -> tuple[bool, list]:
|
||||
"""Prueft die Integritaet der Hash-Kette.
|
||||
Returns: (ok, errors)"""
|
||||
entries = _load_log()
|
||||
errors = []
|
||||
prev_hash = "0" * 64
|
||||
|
||||
for i, entry in enumerate(entries):
|
||||
expected = _compute_hash(entry, prev_hash)
|
||||
actual = entry.get("hash", "")
|
||||
stored_prev = entry.get("prev_hash", "")
|
||||
|
||||
if stored_prev != prev_hash:
|
||||
errors.append(f"Eintrag {i}: prev_hash stimmt nicht (erwartet {prev_hash[:16]}..., gefunden {stored_prev[:16]}...)")
|
||||
if actual != expected:
|
||||
errors.append(f"Eintrag {i}: Hash stimmt nicht (erwartet {expected[:16]}..., gefunden {actual[:16]}...)")
|
||||
|
||||
prev_hash = actual
|
||||
|
||||
return len(errors) == 0, errors
|
||||
156
AzA march 2026 - Kopie (18)/aza_desktop.spec
Normal file
@@ -0,0 +1,156 @@
|
||||
# -*- mode: python ; coding: utf-8 -*-
|
||||
|
||||
from pathlib import Path
|
||||
from PyInstaller.utils.hooks import collect_submodules
|
||||
|
||||
project_root = Path(SPECPATH)
|
||||
data_dir = project_root / "data"
|
||||
|
||||
hiddenimports = []
|
||||
hiddenimports += collect_submodules("uvicorn")
|
||||
hiddenimports += collect_submodules("fastapi")
|
||||
hiddenimports += collect_submodules("starlette")
|
||||
hiddenimports += collect_submodules("anyio")
|
||||
hiddenimports += collect_submodules("pydantic")
|
||||
|
||||
hiddenimports += [
|
||||
# --- local modules used by basis14.py (desktop) ---
|
||||
"aza_config",
|
||||
"aza_prompts",
|
||||
"aza_persistence",
|
||||
"aza_ui_helpers",
|
||||
"aza_audio",
|
||||
"aza_todo_mixin",
|
||||
"aza_text_windows_mixin",
|
||||
"aza_diktat_mixin",
|
||||
"aza_settings_mixin",
|
||||
"aza_ordner_mixin",
|
||||
"aza_arbeitsplan_mixin",
|
||||
"aza_notizen_mixin",
|
||||
"aza_totp",
|
||||
"aza_consent",
|
||||
"aza_audit_log",
|
||||
"desktop_backend_autostart",
|
||||
"desktop_update_check",
|
||||
"aza_version",
|
||||
"_build_info",
|
||||
"openai_runtime_config",
|
||||
"aza_launcher",
|
||||
"aza_activation",
|
||||
"security_vault",
|
||||
"aza_med_validator",
|
||||
"aza_style",
|
||||
"aza_admin",
|
||||
"aza_systemstatus",
|
||||
"aza_global_paste",
|
||||
"aza_firewall",
|
||||
"aza_docapp",
|
||||
"translate",
|
||||
"apps.diktat.audio_notiz_app",
|
||||
"apps.diktat.diktat_app",
|
||||
# --- local modules used by backend_main.py ---
|
||||
"backend_main",
|
||||
"aza_tls",
|
||||
"aza_rate_limit",
|
||||
"aza_security",
|
||||
"aza_license_logic",
|
||||
"aza_device_enforcement",
|
||||
"aza_news_backend",
|
||||
"aza_monitoring",
|
||||
"aza_stripe_idempotency",
|
||||
"aza_backup",
|
||||
"aza_macro",
|
||||
"stripe_routes",
|
||||
"admin_routes",
|
||||
"project_status_routes",
|
||||
"services",
|
||||
"services.live_event_search",
|
||||
"services.event_llm_direct",
|
||||
"services.news_llm_search",
|
||||
"services.event_extract_llm",
|
||||
"services.link_verify",
|
||||
# --- third-party that PyInstaller may miss ---
|
||||
"bcrypt",
|
||||
"pyotp",
|
||||
"qrcode",
|
||||
"qrcode.image.pure",
|
||||
"PIL",
|
||||
"PIL.Image",
|
||||
"PIL.ImageTk",
|
||||
"pynput",
|
||||
"pynput.keyboard",
|
||||
"pynput.keyboard._win32",
|
||||
"pynput.mouse",
|
||||
"pynput.mouse._win32",
|
||||
"pynput._util",
|
||||
"pynput._util.win32",
|
||||
"dotenv",
|
||||
"openai",
|
||||
"requests",
|
||||
"stripe",
|
||||
"httpx",
|
||||
"sounddevice",
|
||||
"_sounddevice_data",
|
||||
"numpy",
|
||||
"docx",
|
||||
"docx.opc",
|
||||
"docx.opc.constants",
|
||||
"docx.opc.part",
|
||||
"docx.opc.pkgreader",
|
||||
"lxml",
|
||||
"lxml.etree",
|
||||
]
|
||||
|
||||
datas = [
|
||||
(str(project_root / "logo.png"), "."),
|
||||
(str(project_root / "logo.ico"), "."),
|
||||
(str(project_root / "apps"), "apps"),
|
||||
(str(project_root / "backend_url.txt"), "."),
|
||||
(str(project_root / "backend_token.txt"), "."),
|
||||
(str(project_root / "project_status.json"), "."),
|
||||
(str(project_root / "project_plan.json"), "."),
|
||||
(str(project_root / "project_todos.json"), "."),
|
||||
(str(project_root / "project_roadmap.json"), "."),
|
||||
(str(project_root / "assets"), "assets"),
|
||||
(str(project_root / "legal"), "legal"),
|
||||
]
|
||||
|
||||
a = Analysis(
|
||||
[str(project_root / "basis14.py")],
|
||||
pathex=[str(project_root)],
|
||||
binaries=[],
|
||||
datas=datas,
|
||||
hiddenimports=hiddenimports,
|
||||
hookspath=[],
|
||||
hooksconfig={},
|
||||
runtime_hooks=[],
|
||||
excludes=[],
|
||||
noarchive=False,
|
||||
)
|
||||
|
||||
pyz = PYZ(a.pure)
|
||||
|
||||
exe = EXE(
|
||||
pyz,
|
||||
a.scripts,
|
||||
[],
|
||||
exclude_binaries=True,
|
||||
name="aza_desktop",
|
||||
debug=False,
|
||||
bootloader_ignore_signals=False,
|
||||
strip=False,
|
||||
upx=True,
|
||||
console=True,
|
||||
disable_windowed_traceback=False,
|
||||
icon=str(project_root / "logo.ico"),
|
||||
)
|
||||
|
||||
coll = COLLECT(
|
||||
exe,
|
||||
a.binaries,
|
||||
a.datas,
|
||||
strip=False,
|
||||
upx=True,
|
||||
upx_exclude=[],
|
||||
name="aza_desktop",
|
||||
)
|
||||
339
AzA march 2026 - Kopie (18)/aza_device_enforcement.py
Normal file
@@ -0,0 +1,339 @@
|
||||
import hashlib
|
||||
import sqlite3
|
||||
import sys
|
||||
import time
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
|
||||
DEVICES_PER_LICENSE_FLOOR = 2
|
||||
|
||||
|
||||
def _runtime_base_dir() -> Path:
|
||||
if getattr(sys, "frozen", False):
|
||||
return Path(getattr(sys, "_MEIPASS", Path(sys.executable).resolve().parent))
|
||||
return Path(__file__).resolve().parent
|
||||
|
||||
|
||||
def _resolve_db_path() -> str:
|
||||
if getattr(sys, "frozen", False):
|
||||
import os as _os
|
||||
try:
|
||||
from aza_config import get_writable_data_dir
|
||||
writable = Path(get_writable_data_dir()) / "data"
|
||||
except Exception:
|
||||
writable = Path(_os.environ.get("APPDATA", "")) / "AZA Desktop" / "data"
|
||||
writable.mkdir(parents=True, exist_ok=True)
|
||||
return str(writable / "stripe_webhook.sqlite")
|
||||
return str(_runtime_base_dir() / "data" / "stripe_webhook.sqlite")
|
||||
|
||||
|
||||
DB_PATH = _resolve_db_path()
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class DeviceDecision:
|
||||
allowed: bool
|
||||
reason: str # "ok"|"missing_device_id"|"license_not_found"|"device_limit_reached"|"user_limit_reached"
|
||||
devices_used: int
|
||||
devices_allowed: int
|
||||
users_used: int
|
||||
users_allowed: int
|
||||
license_active: bool = False
|
||||
license_count: int = 0
|
||||
|
||||
|
||||
def _hash_device_id(device_id: str) -> str:
|
||||
return hashlib.sha256(device_id.encode("utf-8")).hexdigest()
|
||||
|
||||
|
||||
def ensure_device_table(conn: sqlite3.Connection) -> None:
|
||||
conn.execute(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS device_bindings (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
customer_email TEXT NOT NULL,
|
||||
user_key TEXT NOT NULL,
|
||||
device_hash TEXT NOT NULL,
|
||||
first_seen_at INTEGER NOT NULL,
|
||||
last_seen_at INTEGER NOT NULL,
|
||||
UNIQUE(customer_email, user_key, device_hash)
|
||||
);
|
||||
"""
|
||||
)
|
||||
cols = [row[1] for row in conn.execute("PRAGMA table_info(device_bindings)").fetchall()]
|
||||
if "device_name" not in cols:
|
||||
conn.execute("ALTER TABLE device_bindings ADD COLUMN device_name TEXT DEFAULT ''")
|
||||
if "is_active" not in cols:
|
||||
conn.execute("ALTER TABLE device_bindings ADD COLUMN is_active INTEGER DEFAULT 1")
|
||||
if "app_version" not in cols:
|
||||
conn.execute("ALTER TABLE device_bindings ADD COLUMN app_version TEXT DEFAULT ''")
|
||||
if "device_fingerprint" not in cols:
|
||||
conn.execute("ALTER TABLE device_bindings ADD COLUMN device_fingerprint TEXT DEFAULT ''")
|
||||
conn.commit()
|
||||
|
||||
|
||||
def _count_active_licenses(conn: sqlite3.Connection, customer_email: str) -> Tuple[int, int]:
|
||||
"""Returns (n_active_licenses, total_allowed_devices).
|
||||
|
||||
Multiple active licenses for the same email stack:
|
||||
1 license = 2 devices, 2 licenses = 4 devices, etc.
|
||||
"""
|
||||
cur = conn.execute(
|
||||
"""SELECT COUNT(*),
|
||||
COALESCE(SUM(
|
||||
CASE WHEN devices_per_user IS NULL OR devices_per_user < ?
|
||||
THEN ? ELSE devices_per_user END
|
||||
), 0)
|
||||
FROM licenses
|
||||
WHERE lower(customer_email) = lower(?)
|
||||
AND status = 'active'""",
|
||||
(DEVICES_PER_LICENSE_FLOOR, DEVICES_PER_LICENSE_FLOOR, customer_email),
|
||||
)
|
||||
row = cur.fetchone()
|
||||
if not row or int(row[0]) == 0:
|
||||
return (0, 0)
|
||||
return (int(row[0]), int(row[1]))
|
||||
|
||||
|
||||
def _get_license_row(conn: sqlite3.Connection, customer_email: str) -> Optional[Tuple[int, int]]:
|
||||
"""Backward-compat wrapper. Returns (allowed_users, total_devices)."""
|
||||
n_licenses, total_devices = _count_active_licenses(conn, customer_email)
|
||||
if n_licenses == 0:
|
||||
return None
|
||||
return (1, total_devices)
|
||||
|
||||
|
||||
def enforce_and_touch_device(
|
||||
customer_email: str,
|
||||
user_key: str,
|
||||
device_id: Optional[str],
|
||||
db_path: Optional[str] = None,
|
||||
device_name: str = "",
|
||||
app_version: str = "",
|
||||
device_fingerprint: str = "",
|
||||
) -> DeviceDecision:
|
||||
"""Enforce device limit for a customer email.
|
||||
|
||||
Rules (in order):
|
||||
1. device_hash match -> allow, update last_seen + store fingerprint
|
||||
2. device_fingerprint match -> rebind hash (same HW, new install)
|
||||
3. Legacy hostname match -> rebind hash + store fingerprint (migration)
|
||||
4. Under limit -> register new device
|
||||
5. At/over limit -> deny
|
||||
"""
|
||||
if not device_id:
|
||||
print("[DEVICE-ENFORCE] REJECT: missing device_id")
|
||||
return DeviceDecision(
|
||||
allowed=False, reason="missing_device_id",
|
||||
devices_used=0, devices_allowed=0,
|
||||
users_used=0, users_allowed=0,
|
||||
)
|
||||
|
||||
device_hash = _hash_device_id(device_id)
|
||||
now = int(time.time())
|
||||
print(f"[DEVICE-ENFORCE] enforce email={customer_email} "
|
||||
f"hash={device_hash[:12]}... name={device_name} "
|
||||
f"fp={device_fingerprint[:12] + '...' if device_fingerprint else 'none'}")
|
||||
|
||||
conn = sqlite3.connect(db_path or DB_PATH)
|
||||
try:
|
||||
ensure_device_table(conn)
|
||||
|
||||
n_licenses, total_devices = _count_active_licenses(conn, customer_email)
|
||||
if n_licenses == 0:
|
||||
print(f"[DEVICE-ENFORCE] no active license for {customer_email}")
|
||||
return DeviceDecision(
|
||||
allowed=False, reason="license_not_found",
|
||||
devices_used=0, devices_allowed=0,
|
||||
users_used=0, users_allowed=0,
|
||||
license_active=False, license_count=0,
|
||||
)
|
||||
|
||||
cur_devices = conn.execute(
|
||||
"""SELECT COUNT(*) FROM device_bindings
|
||||
WHERE lower(customer_email) = lower(?) AND user_key = ?
|
||||
AND COALESCE(is_active, 1) = 1""",
|
||||
(customer_email, user_key),
|
||||
)
|
||||
used_devices = int(cur_devices.fetchone()[0])
|
||||
|
||||
cur_users = conn.execute(
|
||||
"SELECT COUNT(DISTINCT user_key) FROM device_bindings WHERE lower(customer_email) = lower(?)",
|
||||
(customer_email,),
|
||||
)
|
||||
users_used = int(cur_users.fetchone()[0])
|
||||
|
||||
_ok = DeviceDecision(
|
||||
allowed=True, reason="ok",
|
||||
devices_used=used_devices, devices_allowed=total_devices,
|
||||
users_used=users_used, users_allowed=1,
|
||||
license_active=True, license_count=n_licenses,
|
||||
)
|
||||
|
||||
# --- Step 1: exact device_hash match (normal case) ---
|
||||
cur = conn.execute(
|
||||
"""SELECT id FROM device_bindings
|
||||
WHERE lower(customer_email) = lower(?) AND user_key = ? AND device_hash = ?
|
||||
LIMIT 1""",
|
||||
(customer_email, user_key, device_hash),
|
||||
)
|
||||
existing = cur.fetchone()
|
||||
if existing:
|
||||
conn.execute(
|
||||
"""UPDATE device_bindings
|
||||
SET last_seen_at = ?,
|
||||
device_name = COALESCE(NULLIF(?, ''), device_name),
|
||||
app_version = COALESCE(NULLIF(?, ''), app_version),
|
||||
device_fingerprint = COALESCE(NULLIF(?, ''), device_fingerprint),
|
||||
is_active = 1
|
||||
WHERE id = ?""",
|
||||
(now, device_name, app_version, device_fingerprint, int(existing[0])),
|
||||
)
|
||||
conn.commit()
|
||||
print(f"[DEVICE-ENFORCE] step1 hash-match id={existing[0]} -> allowed")
|
||||
return _ok
|
||||
|
||||
# --- Step 2: fingerprint match (reinstall, new device_id, same HW) ---
|
||||
if device_fingerprint:
|
||||
fp_row = conn.execute(
|
||||
"""SELECT id FROM device_bindings
|
||||
WHERE lower(customer_email) = lower(?) AND user_key = ?
|
||||
AND device_fingerprint = ? AND device_fingerprint != ''
|
||||
AND COALESCE(is_active, 1) = 1
|
||||
LIMIT 1""",
|
||||
(customer_email, user_key, device_fingerprint),
|
||||
).fetchone()
|
||||
if fp_row:
|
||||
conn.execute(
|
||||
"""UPDATE device_bindings
|
||||
SET device_hash = ?, last_seen_at = ?,
|
||||
device_name = COALESCE(NULLIF(?, ''), device_name),
|
||||
app_version = COALESCE(NULLIF(?, ''), app_version),
|
||||
is_active = 1
|
||||
WHERE id = ?""",
|
||||
(device_hash, now, device_name, app_version, int(fp_row[0])),
|
||||
)
|
||||
conn.commit()
|
||||
print(f"[DEVICE-ENFORCE] step2 fingerprint-rebind id={fp_row[0]} -> allowed")
|
||||
return _ok
|
||||
|
||||
# --- Step 3: legacy hostname match (old entry without fingerprint) ---
|
||||
if device_fingerprint and device_name:
|
||||
legacy_row = conn.execute(
|
||||
"""SELECT id, device_hash FROM device_bindings
|
||||
WHERE lower(customer_email) = lower(?) AND user_key = ?
|
||||
AND (device_fingerprint IS NULL OR device_fingerprint = '')
|
||||
AND device_name = ?
|
||||
AND COALESCE(is_active, 1) = 1
|
||||
LIMIT 1""",
|
||||
(customer_email, user_key, device_name),
|
||||
).fetchone()
|
||||
if legacy_row:
|
||||
rebind_id = int(legacy_row[0])
|
||||
conn.execute(
|
||||
"""UPDATE device_bindings
|
||||
SET device_hash = ?, device_fingerprint = ?,
|
||||
last_seen_at = ?,
|
||||
app_version = COALESCE(NULLIF(?, ''), app_version),
|
||||
is_active = 1
|
||||
WHERE id = ?""",
|
||||
(device_hash, device_fingerprint, now, app_version, rebind_id),
|
||||
)
|
||||
stale = conn.execute(
|
||||
"""UPDATE device_bindings SET is_active = 0
|
||||
WHERE lower(customer_email) = lower(?) AND user_key = ?
|
||||
AND device_name = ?
|
||||
AND (device_fingerprint IS NULL OR device_fingerprint = '')
|
||||
AND id != ?
|
||||
AND COALESCE(is_active, 1) = 1""",
|
||||
(customer_email, user_key, device_name, rebind_id),
|
||||
)
|
||||
stale_count = stale.rowcount
|
||||
conn.commit()
|
||||
used_after = used_devices - stale_count
|
||||
print(f"[DEVICE-ENFORCE] step3 legacy-hostname-rebind id={rebind_id} "
|
||||
f"old_hash={legacy_row[1][:12]}... stale_deactivated={stale_count} "
|
||||
f"slots={used_after}/{total_devices} -> allowed")
|
||||
return DeviceDecision(
|
||||
allowed=True, reason="ok",
|
||||
devices_used=max(1, used_after),
|
||||
devices_allowed=total_devices,
|
||||
users_used=users_used, users_allowed=1,
|
||||
license_active=True, license_count=n_licenses,
|
||||
)
|
||||
|
||||
# --- Step 4: new device -- check limit ---
|
||||
if used_devices >= total_devices:
|
||||
print(f"[DEVICE-ENFORCE] step4 limit-reached {used_devices}/{total_devices} "
|
||||
f"name={device_name} fp={device_fingerprint[:12] if device_fingerprint else 'none'}")
|
||||
return DeviceDecision(
|
||||
allowed=False, reason="device_limit_reached",
|
||||
devices_used=used_devices, devices_allowed=total_devices,
|
||||
users_used=users_used, users_allowed=1,
|
||||
license_active=True, license_count=n_licenses,
|
||||
)
|
||||
|
||||
# --- Step 5: register new device ---
|
||||
conn.execute(
|
||||
"""INSERT INTO device_bindings
|
||||
(customer_email, user_key, device_hash, first_seen_at, last_seen_at,
|
||||
device_name, is_active, app_version, device_fingerprint)
|
||||
VALUES (?, ?, ?, ?, ?, ?, 1, ?, ?)""",
|
||||
(customer_email, user_key, device_hash, now, now,
|
||||
device_name, app_version, device_fingerprint),
|
||||
)
|
||||
conn.commit()
|
||||
print(f"[DEVICE-ENFORCE] step5 new-device registered name={device_name} "
|
||||
f"{used_devices + 1}/{total_devices}")
|
||||
|
||||
return DeviceDecision(
|
||||
allowed=True, reason="ok",
|
||||
devices_used=used_devices + 1, devices_allowed=total_devices,
|
||||
users_used=users_used if used_devices > 0 else users_used + 1,
|
||||
users_allowed=1,
|
||||
license_active=True, license_count=n_licenses,
|
||||
)
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
|
||||
def list_devices_for_email(
|
||||
customer_email: str, db_path: Optional[str] = None,
|
||||
) -> Dict[str, Any]:
|
||||
"""Admin/debug: list all registered devices and license info for an email."""
|
||||
conn = sqlite3.connect(db_path or DB_PATH)
|
||||
try:
|
||||
ensure_device_table(conn)
|
||||
n_licenses, total_devices = _count_active_licenses(conn, customer_email)
|
||||
|
||||
rows = conn.execute(
|
||||
"""SELECT device_hash, device_name, COALESCE(is_active, 1),
|
||||
COALESCE(app_version, ''), first_seen_at, last_seen_at
|
||||
FROM device_bindings
|
||||
WHERE lower(customer_email) = lower(?)
|
||||
ORDER BY last_seen_at DESC""",
|
||||
(customer_email,),
|
||||
).fetchall()
|
||||
|
||||
devices: List[Dict[str, Any]] = []
|
||||
for r in rows:
|
||||
devices.append({
|
||||
"device_hash_short": (r[0] or "")[:12] + "…",
|
||||
"device_name": r[1] or "",
|
||||
"is_active": bool(r[2]),
|
||||
"app_version": r[3] or "",
|
||||
"first_seen": r[4],
|
||||
"last_seen": r[5],
|
||||
})
|
||||
|
||||
return {
|
||||
"email": customer_email,
|
||||
"active_licenses": n_licenses,
|
||||
"allowed_devices": total_devices,
|
||||
"registered_devices": len(devices),
|
||||
"active_devices": sum(1 for d in devices if d["is_active"]),
|
||||
"devices": devices,
|
||||
}
|
||||
finally:
|
||||
conn.close()
|
||||
BIN
AzA march 2026 - Kopie (18)/aza_diff_backend_main.txt
Normal file
BIN
AzA march 2026 - Kopie (18)/aza_diff_basis14.txt
Normal file
BIN
AzA march 2026 - Kopie (18)/aza_diff_version.txt
Normal file
474
AzA march 2026 - Kopie (18)/aza_diktat_mixin.py
Normal file
@@ -0,0 +1,474 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
AzaDiktatMixin – Diktat-Fenster (nur Transkription, keine KG).
|
||||
"""
|
||||
|
||||
import os
|
||||
import threading
|
||||
import wave
|
||||
import tkinter as tk
|
||||
from tkinter import ttk, messagebox
|
||||
from tkinter.scrolledtext import ScrolledText
|
||||
|
||||
from aza_persistence import (
|
||||
load_diktat_geometry,
|
||||
save_diktat_geometry,
|
||||
save_to_ablage,
|
||||
_win_clipboard_set,
|
||||
sanitize_markdown_for_plain_text,
|
||||
is_autocopy_after_diktat_enabled,
|
||||
is_global_right_click_paste_enabled,
|
||||
save_autocopy_prefs,
|
||||
)
|
||||
from aza_ui_helpers import (
|
||||
center_window,
|
||||
add_resize_grip,
|
||||
add_font_scale_control,
|
||||
add_text_font_size_control,
|
||||
RoundedButton,
|
||||
)
|
||||
from aza_audio import AudioRecorder
|
||||
|
||||
|
||||
class AzaDiktatMixin:
|
||||
"""Mixin für das Diktat-Fenster (nur Aufnahme + Transkription)."""
|
||||
|
||||
def open_diktat_window(self):
|
||||
"""Unabhängiges Fenster: nur Diktat aufnehmen und transkribieren (keine KG). Text wird automatisch kopiert."""
|
||||
if not self.ensure_ready():
|
||||
return
|
||||
DIKTAT_MIN_W, DIKTAT_MIN_H = 420, 380
|
||||
win = tk.Toplevel(self)
|
||||
win.title("Diktat – nur Transkription")
|
||||
win.minsize(DIKTAT_MIN_W, DIKTAT_MIN_H)
|
||||
win.configure(bg="#B9ECFA")
|
||||
win.attributes("-topmost", True)
|
||||
self._diktat_window = win
|
||||
self._register_window(win)
|
||||
|
||||
# Fensterposition: gespeichert laden oder zentrieren
|
||||
saved_geom = load_diktat_geometry()
|
||||
if saved_geom:
|
||||
# Gespeicherte Position verwenden (Position wird beibehalten!)
|
||||
win.geometry(saved_geom)
|
||||
else:
|
||||
# Keine gespeicherte Position → zentrieren
|
||||
win.geometry("300x290")
|
||||
center_window(win, 300, 290)
|
||||
|
||||
def on_diktat_close():
|
||||
try:
|
||||
geom = win.geometry()
|
||||
save_diktat_geometry(geom)
|
||||
except Exception:
|
||||
pass
|
||||
self._diktat_window = None
|
||||
if hasattr(self, "_aza_windows"):
|
||||
self._aza_windows.discard(win)
|
||||
win.destroy()
|
||||
if getattr(self, "_main_hidden", False):
|
||||
try:
|
||||
self.destroy()
|
||||
except Exception:
|
||||
pass
|
||||
win.protocol("WM_DELETE_WINDOW", on_diktat_close)
|
||||
|
||||
# Speichere Position auch während Verschieben/Resize
|
||||
_diktat_geom_after_id = [None]
|
||||
def on_diktat_configure(e):
|
||||
if e.widget is win and _diktat_geom_after_id[0]:
|
||||
win.after_cancel(_diktat_geom_after_id[0])
|
||||
if e.widget is win:
|
||||
_diktat_geom_after_id[0] = win.after(400, lambda: save_diktat_geometry(win.geometry()))
|
||||
win.bind("<Configure>", on_diktat_configure)
|
||||
|
||||
add_resize_grip(win, DIKTAT_MIN_W, DIKTAT_MIN_H)
|
||||
add_font_scale_control(win)
|
||||
|
||||
# ─── Header mit Minimierungs-Button ───
|
||||
diktat_header = tk.Frame(win, bg="#B9ECFA")
|
||||
diktat_header.pack(fill="x")
|
||||
tk.Label(diktat_header, text="🎙 Diktat", font=("Segoe UI", 12, "bold"),
|
||||
bg="#B9ECFA", fg="#1a4d6d").pack(side="left", padx=10, pady=6)
|
||||
|
||||
_dik_minimized = [False]
|
||||
_dik_geom_before = [None]
|
||||
_dik_restoring = [False]
|
||||
|
||||
# Mini buttons + status (shown when minimized)
|
||||
_mini_rec_btn = [None]
|
||||
_mini_neu_btn = [None]
|
||||
_mini_status_lbl = [None]
|
||||
|
||||
def _mini_toggle_record():
|
||||
"""Start/Stop Aufnahme im minimierten Zustand."""
|
||||
toggle_diktat()
|
||||
if _mini_rec_btn[0]:
|
||||
if is_recording[0]:
|
||||
_mini_rec_btn[0].configure(text="⏹", fg="#D04040")
|
||||
else:
|
||||
_mini_rec_btn[0].configure(text="⏺", fg="#5A90B0")
|
||||
|
||||
def _mini_new_diktat():
|
||||
"""Neu: Text leeren, laufende Aufnahme verwerfen, sofort neue Aufnahme starten."""
|
||||
do_neu()
|
||||
if _mini_rec_btn[0]:
|
||||
if is_recording[0]:
|
||||
_mini_rec_btn[0].configure(text="⏹", fg="#D04040")
|
||||
else:
|
||||
_mini_rec_btn[0].configure(text="⏺", fg="#5A90B0")
|
||||
|
||||
def _restore_diktat_content():
|
||||
if not _dik_minimized[0]:
|
||||
return
|
||||
_dik_restoring[0] = True
|
||||
if _mini_rec_btn[0]:
|
||||
_mini_rec_btn[0].pack_forget()
|
||||
if _mini_neu_btn[0]:
|
||||
_mini_neu_btn[0].pack_forget()
|
||||
if _mini_status_lbl[0]:
|
||||
_mini_status_lbl[0]._parent_bar.pack_forget()
|
||||
main_f.pack(fill="both", expand=True)
|
||||
btn_minimize_dik.configure(text="—")
|
||||
_dik_minimized[0] = False
|
||||
win.minsize(DIKTAT_MIN_W, DIKTAT_MIN_H)
|
||||
win.after(200, lambda: _dik_restoring.__setitem__(0, False))
|
||||
|
||||
def _toggle_minimize_diktat():
|
||||
if _dik_minimized[0]:
|
||||
_restore_diktat_content()
|
||||
if _dik_geom_before[0]:
|
||||
try:
|
||||
win.geometry(_dik_geom_before[0])
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
_dik_geom_before[0] = win.geometry()
|
||||
main_f.pack_forget()
|
||||
btn_minimize_dik.configure(text="□")
|
||||
_dik_minimized[0] = True
|
||||
win.minsize(200, 74)
|
||||
w_cur = win.winfo_width()
|
||||
win.geometry(f"{w_cur}x74")
|
||||
# Show mini record button
|
||||
rec_text = "⏹" if is_recording[0] else "⏺"
|
||||
rec_fg = "#D04040" if is_recording[0] else "#5A90B0"
|
||||
if not _mini_rec_btn[0]:
|
||||
_mini_rec_btn[0] = tk.Label(diktat_header, text=rec_text, font=("Segoe UI", 14, "bold"),
|
||||
bg="#B9ECFA", fg=rec_fg, cursor="hand2", padx=4)
|
||||
_mini_rec_btn[0].bind("<Button-1>", lambda e: _mini_toggle_record())
|
||||
_mini_rec_btn[0].bind("<Enter>", lambda e: _mini_rec_btn[0].configure(fg="#1a4d6d"))
|
||||
_mini_rec_btn[0].bind("<Leave>", lambda e: _mini_rec_btn[0].configure(
|
||||
fg="#D04040" if is_recording[0] else "#5A90B0"))
|
||||
else:
|
||||
_mini_rec_btn[0].configure(text=rec_text, fg=rec_fg)
|
||||
_mini_rec_btn[0].pack(side="left", padx=(0, 4))
|
||||
if not _mini_neu_btn[0]:
|
||||
_mini_neu_btn[0] = tk.Label(diktat_header, text="Neu", font=("Segoe UI", 9),
|
||||
bg="#B9ECFA", fg="#5A90B0", cursor="hand2", padx=4)
|
||||
_mini_neu_btn[0].bind("<Button-1>", lambda e: _mini_new_diktat())
|
||||
_mini_neu_btn[0].bind("<Enter>", lambda e: _mini_neu_btn[0].configure(fg="#1a4d6d"))
|
||||
_mini_neu_btn[0].bind("<Leave>", lambda e: _mini_neu_btn[0].configure(fg="#5A90B0"))
|
||||
_mini_neu_btn[0].pack(side="left", padx=(0, 4))
|
||||
# Show mini status bar below header
|
||||
if not _mini_status_lbl[0]:
|
||||
_mini_status_bar = tk.Frame(win, bg="#FFE4CC", height=22, padx=6, pady=2)
|
||||
_mini_status_lbl[0] = tk.Label(
|
||||
_mini_status_bar, textvariable=status_var,
|
||||
fg="#BD4500", bg="#FFE4CC",
|
||||
font=("Segoe UI", 8), anchor="w",
|
||||
)
|
||||
_mini_status_lbl[0].pack(side="left", fill="x", expand=True)
|
||||
_mini_status_lbl[0]._parent_bar = _mini_status_bar
|
||||
_mini_status_lbl[0]._parent_bar.pack(fill="x")
|
||||
_mini_status_lbl[0].pack(side="left", fill="x", expand=True)
|
||||
|
||||
def _on_dik_configure(e):
|
||||
if e.widget is not win:
|
||||
return
|
||||
if _dik_minimized[0] and not _dik_restoring[0] and e.height > 95:
|
||||
_restore_diktat_content()
|
||||
|
||||
win.bind("<Configure>", _on_dik_configure, add="+")
|
||||
|
||||
btn_minimize_dik = tk.Label(diktat_header, text="—", font=("Segoe UI", 12, "bold"),
|
||||
bg="#B9ECFA", fg="#5A90B0", cursor="hand2", padx=6)
|
||||
btn_minimize_dik.pack(side="right", padx=(0, 8))
|
||||
btn_minimize_dik.bind("<Button-1>", lambda e: _toggle_minimize_diktat())
|
||||
btn_minimize_dik.bind("<Enter>", lambda e: btn_minimize_dik.configure(fg="#1a4d6d"))
|
||||
btn_minimize_dik.bind("<Leave>", lambda e: btn_minimize_dik.configure(fg="#5A90B0"))
|
||||
|
||||
win._aza_minimize = _toggle_minimize_diktat
|
||||
win._aza_is_minimized = lambda: _dik_minimized[0]
|
||||
if hasattr(self, "_aza_windows"):
|
||||
self._aza_windows.add(win)
|
||||
|
||||
main_f = ttk.Frame(win, padding=12)
|
||||
main_f.pack(fill="both", expand=True)
|
||||
|
||||
# Label + Schriftgrößen-Steuerung
|
||||
label_frame = ttk.Frame(main_f)
|
||||
label_frame.pack(fill="x", anchor="w")
|
||||
ttk.Label(label_frame, text="Diktat (nur Transkription):").pack(side="left")
|
||||
|
||||
# Modus auch direkt im Diktat-Fenster: Medizin vs. Allgemein (exklusiv)
|
||||
if not hasattr(self, "_transcribe_medical_var"):
|
||||
self._transcribe_medical_var = tk.BooleanVar(value=True)
|
||||
if not hasattr(self, "_transcribe_general_var"):
|
||||
self._transcribe_general_var = tk.BooleanVar(value=False)
|
||||
if not hasattr(self, "_transcribe_toggle_guard"):
|
||||
self._transcribe_toggle_guard = False
|
||||
|
||||
def _set_mode(domain_value: str):
|
||||
if hasattr(self, "_set_transcribe_domain"):
|
||||
self._set_transcribe_domain(domain_value)
|
||||
return
|
||||
# Fallback ohne Hauptmethode
|
||||
if domain_value == "general":
|
||||
self._transcribe_medical_var.set(False)
|
||||
self._transcribe_general_var.set(True)
|
||||
else:
|
||||
self._transcribe_medical_var.set(True)
|
||||
self._transcribe_general_var.set(False)
|
||||
|
||||
def _on_med_toggle():
|
||||
if self._transcribe_medical_var.get():
|
||||
_set_mode("medical")
|
||||
elif not self._transcribe_general_var.get():
|
||||
_set_mode("medical")
|
||||
|
||||
def _on_gen_toggle():
|
||||
if self._transcribe_general_var.get():
|
||||
_set_mode("general")
|
||||
elif not self._transcribe_medical_var.get():
|
||||
_set_mode("medical")
|
||||
|
||||
tk.Checkbutton(
|
||||
label_frame, text="Medizin", variable=self._transcribe_medical_var,
|
||||
command=_on_med_toggle, bg="#B9ECFA", fg="#1a4d6d",
|
||||
activebackground="#B9ECFA", selectcolor="#E8F4FA",
|
||||
).pack(side="left", padx=(10, 4))
|
||||
tk.Checkbutton(
|
||||
label_frame, text="Allgemein", variable=self._transcribe_general_var,
|
||||
command=_on_gen_toggle, bg="#B9ECFA", fg="#1a4d6d",
|
||||
activebackground="#B9ECFA", selectcolor="#E8F4FA",
|
||||
).pack(side="left")
|
||||
|
||||
# Kleinere Schrift für bessere Lesbarkeit (8pt statt 10pt)
|
||||
diktat_font = ("Segoe UI", 8)
|
||||
txt = ScrolledText(main_f, wrap="word", font=diktat_font, bg="#F5FCFF", height=8)
|
||||
txt.pack(fill="both", expand=True, pady=(4, 4))
|
||||
|
||||
# Schriftgrößen-Spinbox in Fensterhintergrundfarbe
|
||||
add_text_font_size_control(label_frame, txt, initial_size=8, bg_color="#B9ECFA", save_key="diktat_window")
|
||||
|
||||
self._bind_textblock_pending(txt)
|
||||
status_var = tk.StringVar(value="Bereit.")
|
||||
status_bar = tk.Frame(main_f, bg="#FFE4CC", height=24, padx=8, pady=4)
|
||||
status_bar.pack(fill="x", pady=(4, 0))
|
||||
status_bar.pack_propagate(False)
|
||||
lbl_status = tk.Label(
|
||||
status_bar, textvariable=status_var, fg="#BD4500", bg="#FFE4CC",
|
||||
font=("Segoe UI", 8), anchor="w",
|
||||
)
|
||||
lbl_status.pack(side="left", fill="x", expand=True)
|
||||
cb_row = tk.Frame(main_f, bg="#B9ECFA")
|
||||
cb_row.pack(fill="x", pady=(2, 0))
|
||||
_diktat_autocopy_var = tk.BooleanVar(value=is_autocopy_after_diktat_enabled())
|
||||
ttk.Checkbutton(
|
||||
cb_row, text="Autokopie nach Transkription",
|
||||
variable=_diktat_autocopy_var,
|
||||
command=lambda: save_autocopy_prefs(autocopy=_diktat_autocopy_var.get()),
|
||||
).pack(side="left")
|
||||
_diktat_rclick_var = tk.BooleanVar(value=is_global_right_click_paste_enabled())
|
||||
ttk.Checkbutton(
|
||||
cb_row, text="Rechtsklick = Einfügen",
|
||||
variable=_diktat_rclick_var,
|
||||
command=lambda: save_autocopy_prefs(global_right_click=_diktat_rclick_var.get()),
|
||||
).pack(side="left", padx=(12, 0))
|
||||
|
||||
btn_row = ttk.Frame(main_f, padding=(0, 4, 0, 0))
|
||||
btn_row.pack(fill="x")
|
||||
btn_row2 = ttk.Frame(main_f, padding=(0, 2, 0, 0))
|
||||
btn_row2.pack(fill="x")
|
||||
diktat_recorder = [None]
|
||||
is_recording = [False]
|
||||
|
||||
def toggle_diktat():
|
||||
if not diktat_recorder[0]:
|
||||
diktat_recorder[0] = AudioRecorder()
|
||||
rec = diktat_recorder[0]
|
||||
if not is_recording[0]:
|
||||
try:
|
||||
rec.start()
|
||||
is_recording[0] = True
|
||||
btn_diktat_record.configure(text="⏹ Aufnahme stoppen")
|
||||
status_var.set("Aufnahme läuft…")
|
||||
except Exception as e:
|
||||
messagebox.showerror("Aufnahme-Fehler", str(e))
|
||||
status_var.set("Bereit.")
|
||||
else:
|
||||
is_recording[0] = False
|
||||
btn_diktat_record.configure(text="⏺ Aufnahme starten")
|
||||
status_var.set("Transkribiere…")
|
||||
|
||||
def worker():
|
||||
def _safe_after(fn):
|
||||
try:
|
||||
if self.winfo_exists():
|
||||
self.after(0, fn)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
wav_path = rec.stop_and_save_wav()
|
||||
|
||||
try:
|
||||
with wave.open(wav_path, 'rb') as wf:
|
||||
frames = wf.getnframes()
|
||||
framerate = wf.getframerate()
|
||||
duration = frames / float(framerate)
|
||||
|
||||
if duration < 0.3:
|
||||
try:
|
||||
if os.path.exists(wav_path):
|
||||
os.remove(wav_path)
|
||||
except Exception:
|
||||
pass
|
||||
diktat_recorder[0] = None
|
||||
_safe_after(lambda: status_var.set("Kein Audio erkannt."))
|
||||
return
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
transcript_result = self.transcribe_wav(wav_path)
|
||||
|
||||
if hasattr(transcript_result, 'text'):
|
||||
transcript_text = transcript_result.text
|
||||
elif isinstance(transcript_result, str):
|
||||
transcript_text = transcript_result
|
||||
else:
|
||||
transcript_text = ""
|
||||
|
||||
try:
|
||||
if os.path.exists(wav_path):
|
||||
os.remove(wav_path)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if not transcript_text or not transcript_text.strip():
|
||||
diktat_recorder[0] = None
|
||||
_safe_after(lambda: status_var.set("Kein Text erkannt."))
|
||||
return
|
||||
|
||||
transcript_text = self._diktat_apply_punctuation(transcript_text)
|
||||
|
||||
if not transcript_text or not transcript_text.strip():
|
||||
diktat_recorder[0] = None
|
||||
_safe_after(lambda: status_var.set("Kein Text erkannt."))
|
||||
return
|
||||
|
||||
_safe_after(lambda: _done(transcript_text))
|
||||
except Exception as e:
|
||||
def _show_err(err=e):
|
||||
try:
|
||||
if win.winfo_exists():
|
||||
messagebox.showerror("Fehler", str(err), parent=win)
|
||||
status_var.set("Fehler.")
|
||||
except Exception:
|
||||
pass
|
||||
_safe_after(_show_err)
|
||||
|
||||
def _done(text):
|
||||
diktat_recorder[0] = None
|
||||
try:
|
||||
if not win.winfo_exists():
|
||||
return
|
||||
txt.configure(state="normal")
|
||||
idx = txt.index(tk.INSERT)
|
||||
txt.insert(idx, text)
|
||||
full = txt.get("1.0", "end").strip()
|
||||
copied = False
|
||||
if _diktat_autocopy_var.get() and full:
|
||||
if not _win_clipboard_set(full):
|
||||
try:
|
||||
self.clipboard_clear()
|
||||
self.clipboard_append(sanitize_markdown_for_plain_text(full))
|
||||
except Exception:
|
||||
pass
|
||||
copied = True
|
||||
if full:
|
||||
try:
|
||||
save_to_ablage("Diktat", full)
|
||||
except Exception:
|
||||
pass
|
||||
if copied:
|
||||
self.set_status("Diktat transkribiert und kopiert.")
|
||||
status_var.set("Fertig. Kopiert.")
|
||||
else:
|
||||
self.set_status("Diktat transkribiert.")
|
||||
status_var.set("Fertig.")
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
threading.Thread(target=worker, daemon=True).start()
|
||||
|
||||
def do_neu():
|
||||
if is_recording[0]:
|
||||
if not messagebox.askyesno(
|
||||
"Aufnahme läuft",
|
||||
"Es läuft gerade eine Aufnahme.\n"
|
||||
"Möchtest du die aktuelle Aufnahme wirklich verwerfen und neu starten?",
|
||||
parent=win
|
||||
):
|
||||
return
|
||||
rec = diktat_recorder[0]
|
||||
is_recording[0] = False
|
||||
btn_diktat_record.configure(text="⏺ Aufnahme starten")
|
||||
try:
|
||||
wav_path = rec.stop_and_save_wav()
|
||||
if os.path.exists(wav_path):
|
||||
os.remove(wav_path)
|
||||
except Exception:
|
||||
pass
|
||||
diktat_recorder[0] = None
|
||||
txt.configure(state="normal")
|
||||
txt.delete("1.0", "end")
|
||||
status_var.set("Bereit.")
|
||||
toggle_diktat()
|
||||
|
||||
def do_kopieren():
|
||||
t = txt.get("1.0", "end").strip()
|
||||
if t:
|
||||
if not _win_clipboard_set(t):
|
||||
self.clipboard_clear()
|
||||
self.clipboard_append(sanitize_markdown_for_plain_text(t))
|
||||
self.set_status("Diktat kopiert.")
|
||||
else:
|
||||
self.set_status("Nichts zum Kopieren.")
|
||||
|
||||
def do_clear_text():
|
||||
txt.configure(state="normal")
|
||||
txt.delete("1.0", "end")
|
||||
status_var.set("Diktatfeld geleert.")
|
||||
self.set_status("Diktatfeld geleert.")
|
||||
|
||||
btn_diktat_record = RoundedButton(
|
||||
btn_row, "⏺ Aufnahme starten", command=toggle_diktat,
|
||||
width=160, height=26, canvas_bg="#B9ECFA",
|
||||
)
|
||||
btn_diktat_record.pack(side="left")
|
||||
RoundedButton(
|
||||
btn_row, "Neu", command=do_neu,
|
||||
width=70, height=26, canvas_bg="#B9ECFA",
|
||||
).pack(side="left", padx=(6, 0))
|
||||
RoundedButton(
|
||||
btn_row2, "Kopieren", command=do_kopieren,
|
||||
width=100, height=26, canvas_bg="#B9ECFA",
|
||||
).pack(side="left")
|
||||
RoundedButton(
|
||||
btn_row2, "X", command=do_clear_text,
|
||||
width=34, height=26, canvas_bg="#B9ECFA",
|
||||
).pack(side="left", padx=(6, 0))
|
||||
if self._autotext_data.get("diktat_auto_start", True):
|
||||
win.after(350, toggle_diktat)
|
||||
1438
AzA march 2026 - Kopie (18)/aza_docapp.py
Normal file
4380
AzA march 2026 - Kopie (18)/aza_email.py
Normal file
535
AzA march 2026 - Kopie (18)/aza_empfang_app.py
Normal file
@@ -0,0 +1,535 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
AZA Empfang - Schlanke Desktop-Huelle fuer die Empfangs-Weboberflaeche.
|
||||
|
||||
Unabhaengig von AzA Office / Hauptfenster: Standard-URL ist die oeffentliche
|
||||
Empfang-Instanz (empfang.aza-medwork.ch). Kein lokales Backend noetig.
|
||||
|
||||
URL-Reihenfolge:
|
||||
1) Umgebung AZA_EMPFANG_URL oder EMPFANG_URL (volle Basis, siehe unten)
|
||||
2) backend_url.txt neben der EXE (optional, erste nicht-leere Zeile)
|
||||
3) Im EXE-Bundle nur mit AZA_EMPFANG_USE_BUNDLED_URL=1
|
||||
4) Fallback: https://empfang.aza-medwork.ch/empfang/
|
||||
|
||||
Lokale URLs (localhost / 127.0.0.1 / 192.168...): bei gebundelter EXE nur mit
|
||||
AZA_EMPFANG_ALLOW_LOCAL=1, sonst Fallback auf (4) — damit die Huelle ohne
|
||||
Haupt-App und ohne laufenden lokalen Server funktioniert.
|
||||
|
||||
Standard: Laedt die URL direkt im Fenster (kein iframe).
|
||||
|
||||
Optional: AZA_EMPFANG_IFRAME=1 = alte iframe-Huelle mit HTML-Toolbar.
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
import ssl
|
||||
import sys
|
||||
import threading
|
||||
import webbrowser
|
||||
|
||||
_APP_TITLE = "AZA Empfang"
|
||||
_MIN_SIZE = (380, 500)
|
||||
_DEFAULT_W, _DEFAULT_H = 480, 820
|
||||
|
||||
_PUBLIC_EMPFANG_URL = "https://empfang.aza-medwork.ch"
|
||||
|
||||
|
||||
def _data_dir():
|
||||
if getattr(sys, "frozen", False):
|
||||
return sys._MEIPASS
|
||||
return os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
||||
def _user_dir():
|
||||
if getattr(sys, "frozen", False):
|
||||
return os.path.dirname(sys.executable)
|
||||
return os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
||||
def _normalize_to_empfang_url(base: str) -> str:
|
||||
"""Aus Basis-URL (Host oder .../empfang) wird .../empfang/ mit Slash am Ende."""
|
||||
b = (base or "").strip().rstrip("/")
|
||||
if not b:
|
||||
b = _PUBLIC_EMPFANG_URL.rstrip("/")
|
||||
if b.endswith("/empfang"):
|
||||
return b + "/"
|
||||
return b + "/empfang/"
|
||||
|
||||
|
||||
def _is_local_or_lan_url(url: str) -> bool:
|
||||
u = (url or "").lower()
|
||||
if "localhost" in u or "127.0.0.1" in u:
|
||||
return True
|
||||
if u.startswith("http://192.168.") or u.startswith("http://10."):
|
||||
return True
|
||||
if u.startswith("https://192.168.") or u.startswith("https://10."):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _read_backend_url_file_in_dir(dir_path: str) -> str | None:
|
||||
try:
|
||||
p = os.path.join(dir_path, "backend_url.txt")
|
||||
if not os.path.isfile(p):
|
||||
return None
|
||||
with open(p, "r", encoding="utf-8") as f:
|
||||
for ln in f:
|
||||
s = ln.strip()
|
||||
if s and not s.startswith("#"):
|
||||
return s.rstrip("/")
|
||||
except Exception:
|
||||
return None
|
||||
return None
|
||||
|
||||
|
||||
def _empfang_url() -> str:
|
||||
env = (
|
||||
(os.environ.get("AZA_EMPFANG_URL") or os.environ.get("EMPFANG_URL") or "")
|
||||
.strip()
|
||||
)
|
||||
if env:
|
||||
return _normalize_to_empfang_url(env)
|
||||
|
||||
search_dirs: list[str] = [_user_dir()]
|
||||
if getattr(sys, "frozen", False):
|
||||
if os.environ.get("AZA_EMPFANG_USE_BUNDLED_URL", "").strip() == "1":
|
||||
search_dirs.append(_data_dir())
|
||||
else:
|
||||
# Entwicklung: Skript-Verzeichnis (identisch zu _user_dir bei direktem Start)
|
||||
dd = _data_dir()
|
||||
if dd not in search_dirs:
|
||||
search_dirs.append(dd)
|
||||
|
||||
raw: str | None = None
|
||||
for d in search_dirs:
|
||||
raw = _read_backend_url_file_in_dir(d)
|
||||
if raw:
|
||||
break
|
||||
|
||||
if raw:
|
||||
if getattr(sys, "frozen", False) and _is_local_or_lan_url(raw):
|
||||
if os.environ.get("AZA_EMPFANG_ALLOW_LOCAL", "").strip() != "1":
|
||||
raw = None
|
||||
if raw:
|
||||
return _normalize_to_empfang_url(raw)
|
||||
|
||||
return _normalize_to_empfang_url(_PUBLIC_EMPFANG_URL)
|
||||
|
||||
|
||||
_SETTINGS_FILE = os.path.join(_user_dir(), "empfang_app_settings.json")
|
||||
|
||||
|
||||
def _load_settings() -> dict:
|
||||
defaults = {"width": _DEFAULT_W, "height": _DEFAULT_H,
|
||||
"x": None, "y": None, "on_top": False}
|
||||
try:
|
||||
with open(_SETTINGS_FILE, "r", encoding="utf-8") as f:
|
||||
return {**defaults, **json.load(f)}
|
||||
except Exception:
|
||||
return defaults
|
||||
|
||||
|
||||
def _save_settings(s: dict):
|
||||
try:
|
||||
with open(_SETTINGS_FILE, "w", encoding="utf-8") as f:
|
||||
json.dump(s, f, indent=2)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
class _Api:
|
||||
def __init__(self, window, on_top: bool, url: str):
|
||||
self._w = window
|
||||
self._on_top = on_top
|
||||
self._url = url
|
||||
self._pin_lock = threading.Lock()
|
||||
|
||||
def check_url(self):
|
||||
"""Diagnostic connectivity check (GET with SSL fallback)."""
|
||||
from urllib.request import urlopen, Request
|
||||
for verify in (True, False):
|
||||
try:
|
||||
ctx = ssl.create_default_context() if verify else ssl._create_unverified_context()
|
||||
req = Request(self._url)
|
||||
with urlopen(req, timeout=10, context=ctx) as r:
|
||||
r.read(512)
|
||||
return {"ok": True, "url": self._url, "error": ""}
|
||||
except ssl.SSLError:
|
||||
if verify:
|
||||
continue
|
||||
return {"ok": False, "url": self._url,
|
||||
"error": "SSL/TLS-Zertifikatsfehler. Bitte IT kontaktieren."}
|
||||
except Exception as e:
|
||||
if verify:
|
||||
continue
|
||||
return {"ok": False, "url": self._url, "error": str(e)}
|
||||
return {"ok": False, "url": self._url,
|
||||
"error": "Server nicht erreichbar. Bitte Netzwerk pruefen."}
|
||||
|
||||
def open_in_browser(self):
|
||||
"""Öffnet dieselbe Ziel-URL wie im Fenster (lokal oder Produktion)."""
|
||||
webbrowser.open(self._url)
|
||||
|
||||
def toggle_on_top(self):
|
||||
if not self._pin_lock.acquire(blocking=False):
|
||||
return self._on_top
|
||||
try:
|
||||
self._on_top = not self._on_top
|
||||
new_val = self._on_top
|
||||
finally:
|
||||
self._pin_lock.release()
|
||||
|
||||
def _apply():
|
||||
try:
|
||||
self._w.on_top = new_val
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
threading.Thread(target=_apply, daemon=True).start()
|
||||
return new_val
|
||||
|
||||
def get_on_top(self):
|
||||
return self._on_top
|
||||
|
||||
def get_version(self):
|
||||
try:
|
||||
sys.path.insert(0, _data_dir())
|
||||
from _build_info import BUILD_TIME, GIT_COMMIT
|
||||
return f"Build: {BUILD_TIME} ({GIT_COMMIT})"
|
||||
except Exception:
|
||||
return "Entwicklungsversion"
|
||||
|
||||
def get_url(self):
|
||||
return self._url
|
||||
|
||||
def get_public_url(self):
|
||||
return _PUBLIC_EMPFANG_URL
|
||||
|
||||
|
||||
_SHELL_HTML = r'''<!DOCTYPE html>
|
||||
<html lang="de">
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<style>
|
||||
*{margin:0;padding:0;box-sizing:border-box}
|
||||
html,body{height:100%;overflow:hidden}
|
||||
body{font-family:'Segoe UI',system-ui,sans-serif;background:#f0f4f8}
|
||||
#toolbar{
|
||||
height:34px;
|
||||
background:linear-gradient(135deg,#5B8DB3,#3a6d93);
|
||||
display:flex;align-items:center;justify-content:space-between;
|
||||
padding:0 10px;box-shadow:0 1px 4px rgba(0,0,0,.15);
|
||||
position:relative;z-index:20;
|
||||
}
|
||||
.tb-l,.tb-r{display:flex;align-items:center;gap:6px}
|
||||
.tb-title{color:#fff;font-size:12px;font-weight:600;letter-spacing:.3px}
|
||||
.tb-btn{
|
||||
background:rgba(255,255,255,.08);border:1px solid rgba(255,255,255,.18);
|
||||
color:#fff;border-radius:4px;padding:3px 9px;font-size:11px;
|
||||
cursor:pointer;transition:all .15s;font-family:inherit;white-space:nowrap;
|
||||
}
|
||||
.tb-btn:hover{background:rgba(255,255,255,.2)}
|
||||
.tb-btn.on{background:rgba(255,255,255,.35);border-color:rgba(255,255,255,.6);font-weight:600}
|
||||
.tb-btn.busy{opacity:.5;pointer-events:none}
|
||||
#content{position:absolute;top:34px;left:0;right:0;bottom:0}
|
||||
#frame{width:100%;height:100%;border:none;display:none}
|
||||
#loading{
|
||||
position:absolute;top:0;left:0;right:0;bottom:0;
|
||||
display:flex;flex-direction:column;align-items:center;justify-content:center;
|
||||
background:#f0f4f8;z-index:10;
|
||||
}
|
||||
.spin{width:34px;height:34px;border:3px solid #dde8f0;border-top-color:#5B8DB3;
|
||||
border-radius:50%;animation:sp .8s linear infinite;margin-bottom:16px}
|
||||
@keyframes sp{to{transform:rotate(360deg)}}
|
||||
.ld-t{color:#5B8DB3;font-size:13px;font-weight:500}
|
||||
.ld-s{color:#8aa8bc;font-size:11px;margin-top:5px}
|
||||
#error{
|
||||
position:absolute;top:0;left:0;right:0;bottom:0;
|
||||
display:none;flex-direction:column;align-items:center;justify-content:center;
|
||||
background:#f0f4f8;padding:30px;text-align:center;z-index:10;
|
||||
}
|
||||
.er-i{font-size:44px;margin-bottom:14px;opacity:.65}
|
||||
.er-h{font-size:15px;font-weight:600;color:#1a4d6d;margin-bottom:6px}
|
||||
.er-m{font-size:12px;color:#6a8a9a;margin-bottom:20px;line-height:1.6}
|
||||
.er-b{display:flex;gap:8px}
|
||||
.er-btn{background:#5B8DB3;color:#fff;border:none;border-radius:6px;
|
||||
padding:8px 18px;font-size:12px;cursor:pointer;font-weight:500;font-family:inherit}
|
||||
.er-btn:hover{background:#4A7A9E}
|
||||
.er-btn.s{background:#dde8f0;color:#1a4d6d}
|
||||
.er-btn.s:hover{background:#ccd8e0}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div id="toolbar">
|
||||
<div class="tb-l"><span class="tb-title">AZA Empfang</span></div>
|
||||
<div class="tb-r">
|
||||
<button class="tb-btn" onclick="doReload()" title="Seite neu laden">↻ Laden</button>
|
||||
<button class="tb-btn" onclick="doOpen()" title="Im Browser oeffnen">↗ Browser</button>
|
||||
<button class="tb-btn" id="bp" onclick="doPin()" title="Immer im Vordergrund">📌</button>
|
||||
<button class="tb-btn" onclick="doInfo()" title="Version / Info">ℹ</button>
|
||||
</div>
|
||||
</div>
|
||||
<div id="content">
|
||||
<div id="loading">
|
||||
<div class="spin"></div>
|
||||
<div class="ld-t">Empfang wird geladen</div>
|
||||
<div class="ld-s">Verbindung wird hergestellt…</div>
|
||||
</div>
|
||||
<div id="error">
|
||||
<div class="er-i">⚠️</div>
|
||||
<div class="er-h">Empfang nicht erreichbar</div>
|
||||
<div class="er-m" id="erMsg">Die Empfangsseite konnte nicht geladen werden.</div>
|
||||
<div class="er-b">
|
||||
<button class="er-btn" onclick="doReload()">↻ Neu laden</button>
|
||||
<button class="er-btn s" onclick="doOpen()">↗ Im Browser oeffnen</button>
|
||||
</div>
|
||||
</div>
|
||||
<iframe id="frame" src="about:blank"></iframe>
|
||||
</div>
|
||||
<script>
|
||||
var RDY=false, URL='';
|
||||
window.addEventListener('pywebviewready',function(){
|
||||
RDY=true;
|
||||
pywebview.api.get_url().then(function(u){URL=u;boot()});
|
||||
pinInit();
|
||||
});
|
||||
|
||||
function view(id){
|
||||
['loading','error','frame'].forEach(function(k){
|
||||
var e=document.getElementById(k);
|
||||
e.style.display=(k===id)?(k==='frame'?'block':'flex'):'none';
|
||||
});
|
||||
}
|
||||
|
||||
function boot(){
|
||||
view('loading');
|
||||
var f=document.getElementById('frame');
|
||||
var done=false;
|
||||
f.onload=function(){
|
||||
if(!done&&f.src!=='about:blank'){done=true;view('frame')}
|
||||
};
|
||||
var cacheBust = '?_t=' + Date.now();
|
||||
f.src=URL + (URL.indexOf('?')>=0 ? '&_t=' : '?_t=') + Date.now();
|
||||
setTimeout(function(){
|
||||
if(!done){diagnose()}
|
||||
},12000);
|
||||
}
|
||||
|
||||
async function diagnose(){
|
||||
try{
|
||||
var r=await pywebview.api.check_url();
|
||||
if(r&&r.ok){
|
||||
view('frame');
|
||||
}else{
|
||||
document.getElementById('erMsg').innerHTML=
|
||||
(r&&r.error?r.error:'Server nicht erreichbar.')+'<br><br><small>URL: '+(r&&r.url||'')+'</small>';
|
||||
view('error');
|
||||
}
|
||||
}catch(e){view('error')}
|
||||
}
|
||||
|
||||
async function doReload(){
|
||||
view('loading');
|
||||
var f=document.getElementById('frame');
|
||||
var done=false;
|
||||
f.onload=function(){
|
||||
if(!done&&f.src!=='about:blank'){done=true;view('frame')}
|
||||
};
|
||||
f.src='about:blank';
|
||||
setTimeout(function(){f.src=URL + (URL.indexOf('?')>=0 ? '&_t=' : '?_t=') + Date.now()},100);
|
||||
setTimeout(function(){if(!done)diagnose()},12000);
|
||||
}
|
||||
|
||||
function doOpen(){if(RDY)pywebview.api.open_in_browser()}
|
||||
|
||||
var pinBusy=false;
|
||||
async function doPin(){
|
||||
if(!RDY||pinBusy)return;
|
||||
var b=document.getElementById('bp');
|
||||
pinBusy=true;
|
||||
b.classList.add('busy');
|
||||
try{
|
||||
var v=await pywebview.api.toggle_on_top();
|
||||
pinUI(v);
|
||||
}catch(e){}
|
||||
setTimeout(function(){pinBusy=false;b.classList.remove('busy')},300);
|
||||
}
|
||||
async function pinInit(){
|
||||
try{var v=await pywebview.api.get_on_top();pinUI(v)}catch(e){}
|
||||
}
|
||||
function pinUI(on){
|
||||
var b=document.getElementById('bp');
|
||||
if(on){b.classList.add('on');b.textContent='\uD83D\uDCCC An';b.title='Immer im Vordergrund (aktiv)'}
|
||||
else {b.classList.remove('on');b.textContent='\uD83D\uDCCC';b.title='Immer im Vordergrund'}
|
||||
}
|
||||
|
||||
async function doInfo(){
|
||||
if(!RDY)return;
|
||||
var v=await pywebview.api.get_version();
|
||||
alert('AZA Empfang\n\n'+v+'\n'+URL);
|
||||
}
|
||||
</script>
|
||||
</body>
|
||||
</html>'''
|
||||
|
||||
|
||||
def main():
|
||||
try:
|
||||
import webview
|
||||
except ImportError:
|
||||
print("FEHLER: pywebview ist nicht installiert.")
|
||||
print("Bitte ausfuehren: pip install pywebview")
|
||||
sys.exit(1)
|
||||
try:
|
||||
from webview.menu import Menu, MenuAction, MenuSeparator
|
||||
except ImportError:
|
||||
Menu = None # type: ignore
|
||||
MenuAction = None # type: ignore
|
||||
MenuSeparator = None # type: ignore
|
||||
|
||||
settings = _load_settings()
|
||||
url = _empfang_url()
|
||||
# iframe-Huelle (alt): nur setzen wenn noetig: AZA_EMPFANG_IFRAME=1
|
||||
use_iframe_shell = os.environ.get("AZA_EMPFANG_IFRAME", "").strip() == "1"
|
||||
|
||||
w = max(_MIN_SIZE[0], min(1920, settings.get("width") or _DEFAULT_W))
|
||||
h = max(_MIN_SIZE[1], min(1200, settings.get("height") or _DEFAULT_H))
|
||||
x = settings.get("x")
|
||||
y = settings.get("y")
|
||||
if isinstance(x, (int, float)) and isinstance(y, (int, float)):
|
||||
if x < -200 or y < -200 or x > 3800 or y > 2200:
|
||||
x, y = None, None
|
||||
else:
|
||||
x, y = None, None
|
||||
|
||||
on_top = bool(settings.get("on_top", False))
|
||||
|
||||
if use_iframe_shell:
|
||||
window = webview.create_window(
|
||||
_APP_TITLE,
|
||||
html=_SHELL_HTML,
|
||||
width=w,
|
||||
height=h,
|
||||
x=x,
|
||||
y=y,
|
||||
min_size=_MIN_SIZE,
|
||||
on_top=on_top,
|
||||
text_select=True,
|
||||
background_color="#f0f4f8",
|
||||
)
|
||||
api = _Api(window, on_top, url)
|
||||
window.expose(
|
||||
api.check_url,
|
||||
api.open_in_browser,
|
||||
api.toggle_on_top,
|
||||
api.get_on_top,
|
||||
api.get_version,
|
||||
api.get_url,
|
||||
api.get_public_url,
|
||||
)
|
||||
else:
|
||||
window = webview.create_window(
|
||||
_APP_TITLE,
|
||||
url=url,
|
||||
width=w,
|
||||
height=h,
|
||||
x=x,
|
||||
y=y,
|
||||
min_size=_MIN_SIZE,
|
||||
on_top=on_top,
|
||||
text_select=True,
|
||||
background_color="#f0f4f8",
|
||||
)
|
||||
api = _Api(window, on_top, url)
|
||||
window.expose(
|
||||
api.toggle_on_top,
|
||||
api.get_on_top,
|
||||
api.open_in_browser,
|
||||
api.get_version,
|
||||
api.get_url,
|
||||
api.get_public_url,
|
||||
api.check_url,
|
||||
)
|
||||
|
||||
def _reload():
|
||||
try:
|
||||
window.evaluate_js("window.location.reload()")
|
||||
except Exception as exc:
|
||||
print(f"[AZA Empfang] Neu laden: {exc}")
|
||||
|
||||
def _info_box():
|
||||
try:
|
||||
msg = f"{api.get_version()}\n\n{api._url}"
|
||||
try:
|
||||
import ctypes
|
||||
|
||||
ctypes.windll.user32.MessageBoxW(0, msg, _APP_TITLE, 0)
|
||||
except Exception:
|
||||
print(msg)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def _toggle_pin_menu():
|
||||
api.toggle_on_top()
|
||||
|
||||
menu = None
|
||||
if Menu is not None:
|
||||
menu = [
|
||||
Menu(
|
||||
"Empfang",
|
||||
[
|
||||
MenuAction("Neu laden", _reload),
|
||||
MenuAction("Im Browser \u00f6ffnen", api.open_in_browser),
|
||||
MenuAction("Immer im Vordergrund", _toggle_pin_menu),
|
||||
MenuSeparator(),
|
||||
MenuAction("Version / Info", _info_box),
|
||||
],
|
||||
),
|
||||
]
|
||||
|
||||
def _on_closing():
|
||||
try:
|
||||
_save_settings({
|
||||
"x": window.x,
|
||||
"y": window.y,
|
||||
"width": window.width,
|
||||
"height": window.height,
|
||||
"on_top": api._on_top,
|
||||
})
|
||||
except Exception:
|
||||
pass
|
||||
return True
|
||||
|
||||
try:
|
||||
window.events.closing += _on_closing
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
if use_iframe_shell:
|
||||
webview.start()
|
||||
elif menu:
|
||||
try:
|
||||
webview.start(menu=menu)
|
||||
except TypeError:
|
||||
webview.start()
|
||||
else:
|
||||
webview.start()
|
||||
except Exception as e:
|
||||
print(f"[AZA Empfang] Fehler: {e}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
import ctypes
|
||||
ctypes.windll.shell32.SetCurrentProcessExplicitAppUserModelID(
|
||||
"ch.aza-medwork.empfang.v2")
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
main()
|
||||
except Exception as e:
|
||||
print(f"[AZA Empfang] Kritischer Fehler: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
178
AzA march 2026 - Kopie (18)/aza_firewall.py
Normal file
@@ -0,0 +1,178 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
AZA Desktop – Windows-Firewall-Handling.
|
||||
Minimale lokale Regel fuer Backend auf 127.0.0.1:8000.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
|
||||
_RULE_NAME = "AZA Desktop - Lokale Kommunikation"
|
||||
_BACKEND_PORT = 8000
|
||||
|
||||
|
||||
def add_firewall_rule_if_needed() -> tuple[bool, str]:
|
||||
"""
|
||||
Legt eine minimale Windows-Firewall-Regel fuer das lokale AZA-Backend an.
|
||||
Nur fuer aza_desktop.exe, TCP 8000, Remote 127.0.0.1 (localhost).
|
||||
Gibt (erfolg, meldung) zurueck.
|
||||
"""
|
||||
if sys.platform != "win32":
|
||||
return True, ""
|
||||
|
||||
exe_path = ""
|
||||
if getattr(sys, "frozen", False):
|
||||
exe_path = sys.executable
|
||||
else:
|
||||
# Dev: Basis-Pfad ermitteln
|
||||
base = os.path.dirname(os.path.abspath(__file__))
|
||||
for _ in range(5):
|
||||
candidate = os.path.join(base, "aza_desktop.exe")
|
||||
if os.path.isfile(candidate):
|
||||
exe_path = candidate
|
||||
break
|
||||
parent = os.path.dirname(base)
|
||||
if parent == base:
|
||||
break
|
||||
base = parent
|
||||
|
||||
if not exe_path or not os.path.isfile(exe_path):
|
||||
return False, "aza_desktop.exe nicht gefunden"
|
||||
|
||||
exe_path = os.path.normpath(exe_path)
|
||||
|
||||
# Regel loeschen falls vorhanden (idempotent)
|
||||
try:
|
||||
subprocess.run(
|
||||
[
|
||||
"netsh", "advfirewall", "firewall", "delete", "rule",
|
||||
f"name={_RULE_NAME}",
|
||||
],
|
||||
capture_output=True,
|
||||
timeout=10,
|
||||
creationflags=subprocess.CREATE_NO_WINDOW if hasattr(subprocess, "CREATE_NO_WINDOW") else 0,
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Regel anlegen
|
||||
try:
|
||||
result = subprocess.run(
|
||||
[
|
||||
"netsh", "advfirewall", "firewall", "add", "rule",
|
||||
f"name={_RULE_NAME}",
|
||||
"dir=in",
|
||||
"action=allow",
|
||||
f"program={exe_path}",
|
||||
f"localport={_BACKEND_PORT}",
|
||||
"protocol=tcp",
|
||||
"remoteip=127.0.0.1",
|
||||
],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=10,
|
||||
creationflags=subprocess.CREATE_NO_WINDOW if hasattr(subprocess, "CREATE_NO_WINDOW") else 0,
|
||||
)
|
||||
if result.returncode == 0:
|
||||
return True, "Firewall-Regel angelegt"
|
||||
err = (result.stderr or result.stdout or "").strip()
|
||||
return False, err or f"Rueckcode {result.returncode}"
|
||||
except subprocess.TimeoutExpired:
|
||||
return False, "Zeitueberschreitung"
|
||||
except Exception as e:
|
||||
return False, str(e)
|
||||
|
||||
|
||||
def check_firewall_rule() -> dict:
|
||||
"""
|
||||
Prueft ohne Adminrechte, ob die erwartete Firewall-Regel existiert.
|
||||
Gibt dict mit keys: exists, program_ok, port_ok, protocol_ok, remote_ok, detail
|
||||
"""
|
||||
result = {
|
||||
"exists": False,
|
||||
"program_ok": None,
|
||||
"port_ok": None,
|
||||
"protocol_ok": None,
|
||||
"remote_ok": None,
|
||||
"detail": "",
|
||||
}
|
||||
if sys.platform != "win32":
|
||||
result["detail"] = "Nur unter Windows relevant"
|
||||
return result
|
||||
|
||||
try:
|
||||
proc = subprocess.run(
|
||||
[
|
||||
"netsh", "advfirewall", "firewall", "show", "rule",
|
||||
f"name={_RULE_NAME}", "verbose",
|
||||
],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=10,
|
||||
creationflags=(
|
||||
subprocess.CREATE_NO_WINDOW
|
||||
if hasattr(subprocess, "CREATE_NO_WINDOW")
|
||||
else 0
|
||||
),
|
||||
)
|
||||
except Exception as e:
|
||||
result["detail"] = f"Pruefung nicht moeglich: {e}"
|
||||
return result
|
||||
|
||||
stdout = proc.stdout or ""
|
||||
if proc.returncode != 0 or _RULE_NAME not in stdout:
|
||||
result["detail"] = "Regel nicht vorhanden"
|
||||
return result
|
||||
|
||||
result["exists"] = True
|
||||
|
||||
lower = stdout.lower()
|
||||
|
||||
if "tcp" in lower:
|
||||
result["protocol_ok"] = True
|
||||
else:
|
||||
result["protocol_ok"] = False
|
||||
|
||||
if str(_BACKEND_PORT) in stdout:
|
||||
result["port_ok"] = True
|
||||
else:
|
||||
result["port_ok"] = False
|
||||
|
||||
if "127.0.0.1" in stdout or "localsubnet" in lower:
|
||||
result["remote_ok"] = True
|
||||
else:
|
||||
result["remote_ok"] = False
|
||||
|
||||
if "aza_desktop" in lower:
|
||||
result["program_ok"] = True
|
||||
else:
|
||||
result["program_ok"] = False
|
||||
|
||||
problems = []
|
||||
if not result["protocol_ok"]:
|
||||
problems.append("Protokoll")
|
||||
if not result["port_ok"]:
|
||||
problems.append("Port")
|
||||
if not result["remote_ok"]:
|
||||
problems.append("Remote-IP")
|
||||
if not result["program_ok"]:
|
||||
problems.append("Programm")
|
||||
|
||||
if problems:
|
||||
result["detail"] = "Regel vorhanden, Details abweichend: " + ", ".join(problems)
|
||||
else:
|
||||
result["detail"] = "Regel korrekt (TCP 8000, localhost, programmgebunden)"
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def get_firewall_hint_text() -> str:
|
||||
"""Ruiger Hinweistext fuer den Nutzer bei Firewall-Popup oder Backend-Problem."""
|
||||
return (
|
||||
"Windows hat eine Rueckfrage zur lokalen Netzwerkkommunikation von AZA angezeigt.\n\n"
|
||||
"AZA verwendet lokal laufende Komponenten auf diesem Computer. "
|
||||
"Die Rueckfrage betrifft die interne Kommunikation der App.\n\n"
|
||||
"Fuer den lokalen Betrieb von AZA koennen Sie fortfahren bzw. "
|
||||
"den Zugriff fuer private Netzwerke erlauben."
|
||||
)
|
||||
BIN
AzA march 2026 - Kopie (18)/aza_full_bundle.tar.gz
Normal file
125
AzA march 2026 - Kopie (18)/aza_global_paste.py
Normal file
@@ -0,0 +1,125 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Globaler Rechtsklick-Paste: Bei Rechtsklick in externen Apps wird
|
||||
der Zwischenablage-Inhalt eingefügt (Strg+V).
|
||||
Wird von Hauptfenster, Audio-Notiz und Übersetzer genutzt.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import threading
|
||||
import ctypes
|
||||
from ctypes import wintypes
|
||||
|
||||
_user32 = None
|
||||
if sys.platform == "win32":
|
||||
try:
|
||||
_user32 = ctypes.windll.user32
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
_HAS_PYNPUT_MOUSE = False
|
||||
try:
|
||||
from pynput.mouse import Button as MouseButton, Listener as MouseListener
|
||||
_HAS_PYNPUT_MOUSE = True
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
def _is_own_process_window(hwnd: int) -> bool:
|
||||
if _user32 is None:
|
||||
return False
|
||||
try:
|
||||
pid = wintypes.DWORD(0)
|
||||
_user32.GetWindowThreadProcessId(hwnd, ctypes.byref(pid))
|
||||
return int(pid.value) == int(os.getpid())
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
def _send_ctrl_v():
|
||||
if _user32 is None:
|
||||
return
|
||||
try:
|
||||
VK_CONTROL = 0x11
|
||||
VK_V = 0x56
|
||||
_user32.keybd_event(VK_CONTROL, 0, 0, 0)
|
||||
time.sleep(0.02)
|
||||
_user32.keybd_event(VK_V, 0, 0, 0)
|
||||
_user32.keybd_event(VK_V, 0, 2, 0)
|
||||
_user32.keybd_event(VK_CONTROL, 0, 2, 0)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
def _paste_to_hwnd(hwnd: int) -> bool:
|
||||
if _user32 is None or not hwnd:
|
||||
return False
|
||||
try:
|
||||
_user32.SetForegroundWindow(hwnd)
|
||||
time.sleep(0.04)
|
||||
VK_ESCAPE = 0x1B
|
||||
_user32.keybd_event(VK_ESCAPE, 0, 0, 0)
|
||||
_user32.keybd_event(VK_ESCAPE, 0, 2, 0)
|
||||
time.sleep(0.03)
|
||||
_send_ctrl_v()
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
def _run_listener():
|
||||
"""Startet den globalen Rechtsklick-Paste-Listener (Windows)."""
|
||||
if sys.platform != "win32" or _user32 is None:
|
||||
return
|
||||
|
||||
def on_click(x, y, button, pressed):
|
||||
if pressed:
|
||||
return
|
||||
if button != MouseButton.right:
|
||||
return
|
||||
try:
|
||||
from aza_persistence import is_global_right_click_paste_enabled
|
||||
if not is_global_right_click_paste_enabled():
|
||||
return
|
||||
hwnd = _user32.GetForegroundWindow()
|
||||
if _is_own_process_window(hwnd):
|
||||
return
|
||||
_paste_to_hwnd(hwnd)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def _polling_fallback():
|
||||
VK_RBUTTON = 0x02
|
||||
was_down = False
|
||||
while True:
|
||||
try:
|
||||
from aza_persistence import is_global_right_click_paste_enabled
|
||||
if not is_global_right_click_paste_enabled():
|
||||
time.sleep(0.5)
|
||||
continue
|
||||
is_down = bool(_user32.GetAsyncKeyState(VK_RBUTTON) & 0x8000)
|
||||
if was_down and not is_down:
|
||||
hwnd = _user32.GetForegroundWindow()
|
||||
if not _is_own_process_window(hwnd):
|
||||
_paste_to_hwnd(hwnd)
|
||||
was_down = is_down
|
||||
except Exception:
|
||||
pass
|
||||
time.sleep(0.01)
|
||||
|
||||
if _HAS_PYNPUT_MOUSE:
|
||||
try:
|
||||
with MouseListener(on_click=on_click) as listener:
|
||||
listener.join()
|
||||
return
|
||||
except Exception:
|
||||
pass
|
||||
_polling_fallback()
|
||||
|
||||
|
||||
def start_global_right_click_paste_listener():
|
||||
"""Startet den Listener in einem Daemon-Thread."""
|
||||
t = threading.Thread(target=_run_listener, daemon=True)
|
||||
t.start()
|
||||
644
AzA march 2026 - Kopie (18)/aza_launcher.py
Normal file
@@ -0,0 +1,644 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
AZA Desktop – Launcher / Startseite.
|
||||
Premium-Medizinprodukt-Design mit 6 Modulkacheln, KI-Kapazitätsanzeige
|
||||
und verstecktem Admin-Zugang (Doppelklick auf Logo).
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import tkinter as tk
|
||||
|
||||
from aza_config import (
|
||||
LAUNCHER_MODULES,
|
||||
LAUNCHER_MODULE_LABELS,
|
||||
LAUNCHER_DISABLED_MODULES,
|
||||
)
|
||||
from aza_persistence import (
|
||||
load_launcher_prefs,
|
||||
save_launcher_prefs,
|
||||
get_remaining_tokens,
|
||||
get_capacity_fraction,
|
||||
estimated_reports_remaining,
|
||||
is_capacity_low,
|
||||
)
|
||||
from aza_ui_helpers import save_toplevel_geometry, load_toplevel_geometry
|
||||
from aza_style import (
|
||||
BG, CARD_BG, CARD_HOVER_BG, CARD_BORDER, CARD_HOVER_BORDER,
|
||||
ACCENT, TEXT, SUBTLE,
|
||||
CAPACITY_BLUE, TURQUOISE, WARNING_AMBER, DANGER,
|
||||
FONT_FAMILY, SPACING,
|
||||
format_number_de,
|
||||
)
|
||||
|
||||
|
||||
_MODULE_DESCRIPTIONS = {
|
||||
"ki": "Medizinische Fragen stellen,\nBefunde besprechen, Zweitmeinung einholen",
|
||||
"kg": "Diktat aufnehmen und in Krankengeschichte umwandeln",
|
||||
"empfang": "Empfangs-Chat, Aufgaben\nund Praxis-Kommunikation",
|
||||
"notizen": "Sprachaufnahmen und Notizen\nfuer den Praxisalltag",
|
||||
"translator": "Medizinische Fachtexte uebersetzen\nund Begriffe nachschlagen",
|
||||
"medwork_chat": "Kollegialer Austausch mit\nAerzten und Fachpersonal",
|
||||
"praxis_chat": "Nachrichten und Aufgaben\nim eigenen Praxisteam",
|
||||
}
|
||||
|
||||
_ICON_BLUE = "#5B8DB3"
|
||||
_MODULE_ICON_COLORS = {
|
||||
"ki": _ICON_BLUE,
|
||||
"kg": _ICON_BLUE,
|
||||
"empfang": _ICON_BLUE,
|
||||
"notizen": _ICON_BLUE,
|
||||
"translator": _ICON_BLUE,
|
||||
"medwork_chat": _ICON_BLUE,
|
||||
"praxis_chat": _ICON_BLUE,
|
||||
}
|
||||
|
||||
_ICON_SZ = 38
|
||||
|
||||
|
||||
def _draw_module_icon(c: tk.Canvas, key: str):
|
||||
"""Draw a minimal white line-art icon (38×38) for the given module."""
|
||||
s = _ICON_SZ
|
||||
m = s // 2
|
||||
fg = "#FFFFFF"
|
||||
|
||||
if key == "ki":
|
||||
c.create_polygon(m, 7, m + 5, m, m, s - 7, m - 5, m,
|
||||
fill=fg, outline="")
|
||||
c.create_polygon(7, m, m, m - 5, s - 7, m, m, m + 5,
|
||||
fill=fg, outline="")
|
||||
|
||||
elif key == "kg":
|
||||
# Nur Fallback wenn logo.png fehlt (Kachel nutzt sonst echtes Logo als PhotoImage)
|
||||
c.create_text(m, m, text="AzA", font=("Segoe UI", 11, "bold"), fill=fg)
|
||||
|
||||
elif key == "notizen":
|
||||
c.create_oval(m - 5, 7, m + 5, 19, outline=fg, width=2)
|
||||
c.create_arc(m - 9, 13, m + 9, 27, start=180, extent=180,
|
||||
outline=fg, width=2, style="arc")
|
||||
c.create_line(m, 27, m, s - 9, fill=fg, width=2)
|
||||
c.create_line(m - 5, s - 9, m + 5, s - 9, fill=fg, width=2)
|
||||
|
||||
elif key == "translator":
|
||||
c.create_rectangle(7, 8, m + 1, s - 10, outline=fg, width=1.5)
|
||||
c.create_rectangle(m - 1, 10, s - 7, s - 8, outline=fg, width=1.5)
|
||||
c.create_text(14, m, text="A",
|
||||
font=("Segoe UI", 9, "bold"), fill=fg)
|
||||
c.create_text(s - 14, m + 1, text="\u6587",
|
||||
font=("Segoe UI", 8), fill=fg)
|
||||
|
||||
elif key == "medwork_chat":
|
||||
nodes = [(m, 9), (9, s - 11), (s - 9, s - 11)]
|
||||
for i in range(3):
|
||||
for j in range(i + 1, 3):
|
||||
c.create_line(*nodes[i], *nodes[j], fill=fg, width=1.5)
|
||||
for x, y in nodes:
|
||||
c.create_oval(x - 4, y - 4, x + 4, y + 4, fill=fg, outline="")
|
||||
|
||||
elif key == "empfang":
|
||||
c.create_rectangle(8, 10, s - 8, s - 8, outline=fg, width=2)
|
||||
c.create_line(8, 10, m, m + 2, fill=fg, width=2)
|
||||
c.create_line(s - 8, 10, m, m + 2, fill=fg, width=2)
|
||||
|
||||
elif key == "praxis_chat":
|
||||
c.create_oval(8, 7, s - 8, s - 13, outline=fg, width=2)
|
||||
bg = c.cget("bg")
|
||||
c.create_polygon(12, s - 14, 10, s - 7, 18, s - 14,
|
||||
fill=bg, outline=bg)
|
||||
c.create_line(12, s - 14, 10, s - 7, fill=fg, width=2)
|
||||
c.create_line(10, s - 7, 17, s - 14, fill=fg, width=2)
|
||||
|
||||
_GRID_COLS = 2
|
||||
_BAR_H = 6
|
||||
_WIN_W = 620
|
||||
_WIN_MIN_W = 520
|
||||
_WIN_MIN_H = 500
|
||||
|
||||
|
||||
class _Tooltip:
|
||||
"""Dezentes Hover-Tooltip im Glas-Stil."""
|
||||
|
||||
def __init__(self, widget, text: str):
|
||||
self._widget = widget
|
||||
self._text = text
|
||||
self._tip = None
|
||||
widget.bind("<Enter>", self._show, add="+")
|
||||
widget.bind("<Leave>", self._hide, add="+")
|
||||
|
||||
def _show(self, event):
|
||||
if self._tip:
|
||||
return
|
||||
x = self._widget.winfo_rootx() + 20
|
||||
y = self._widget.winfo_rooty() + self._widget.winfo_height() + 4
|
||||
self._tip = tw = tk.Toplevel(self._widget)
|
||||
tw.wm_overrideredirect(True)
|
||||
tw.wm_geometry(f"+{x}+{y}")
|
||||
tw.configure(bg="#2D3436")
|
||||
tk.Label(
|
||||
tw, text=self._text,
|
||||
font=(FONT_FAMILY, 9), fg="#F0F0F0", bg="#2D3436",
|
||||
padx=12, pady=8, justify="left",
|
||||
).pack()
|
||||
|
||||
def _hide(self, event):
|
||||
if self._tip:
|
||||
self._tip.destroy()
|
||||
self._tip = None
|
||||
|
||||
def update_text(self, text: str):
|
||||
self._text = text
|
||||
|
||||
|
||||
class AzaLauncher(tk.Tk):
|
||||
"""Premium-Startseite mit Modulauswahl, KI-Kapazität und verstecktem Admin."""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.title("AzA-Cockpit")
|
||||
self.configure(bg=BG)
|
||||
self.resizable(True, True)
|
||||
self.minsize(_WIN_MIN_W, _WIN_MIN_H)
|
||||
|
||||
# AppUserModelID bereits im __main__ gesetzt (vor Fenster-Erstellung)
|
||||
for _d in [os.path.dirname(os.path.abspath(__file__)),
|
||||
getattr(sys, "_MEIPASS", ""),
|
||||
os.path.dirname(sys.executable) if getattr(sys, "frozen", False) else ""]:
|
||||
if _d:
|
||||
_ip = os.path.join(_d, "logo.ico")
|
||||
if os.path.isfile(_ip):
|
||||
try:
|
||||
self.iconbitmap(_ip)
|
||||
except Exception:
|
||||
pass
|
||||
break
|
||||
|
||||
self.attributes("-topmost", True)
|
||||
|
||||
self._logo_img = None
|
||||
self._kg_tile_icon = None # gleiches logo.png wie Header, 38×38 für AzA-Office-Kachel
|
||||
try:
|
||||
import sys as _sys
|
||||
_search = []
|
||||
if getattr(_sys, "frozen", False):
|
||||
_exe = os.path.dirname(os.path.abspath(_sys.executable))
|
||||
_search.append(_exe)
|
||||
_search.append(os.path.join(_exe, "_internal"))
|
||||
_search.append(os.path.dirname(os.path.abspath(__file__)))
|
||||
logo_path = None
|
||||
for _d in _search:
|
||||
_p = os.path.join(_d, "logo.png")
|
||||
if os.path.isfile(_p):
|
||||
logo_path = _p
|
||||
break
|
||||
if logo_path:
|
||||
from PIL import Image, ImageTk
|
||||
_pil = Image.open(logo_path)
|
||||
if _pil.mode not in ("RGB", "RGBA"):
|
||||
_pil = _pil.convert("RGBA")
|
||||
self._logo_img = ImageTk.PhotoImage(
|
||||
_pil.resize((82, 82), Image.Resampling.LANCZOS),
|
||||
master=self,
|
||||
)
|
||||
self._kg_tile_icon = ImageTk.PhotoImage(
|
||||
_pil.resize((_ICON_SZ, _ICON_SZ), Image.Resampling.LANCZOS),
|
||||
master=self,
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
self._selected_module = None
|
||||
prefs = load_launcher_prefs()
|
||||
self._auto_open_var = tk.BooleanVar(value=prefs.get("auto_open", False))
|
||||
|
||||
self._build_ui()
|
||||
self._apply_geometry()
|
||||
self.protocol("WM_DELETE_WINDOW", self._on_close)
|
||||
|
||||
def _apply_geometry(self):
|
||||
saved_geom = load_toplevel_geometry("launcher")
|
||||
if saved_geom:
|
||||
try:
|
||||
self.geometry(saved_geom)
|
||||
return
|
||||
except Exception:
|
||||
pass
|
||||
self.update_idletasks()
|
||||
req_w = max(self.winfo_reqwidth(), _WIN_W)
|
||||
req_h = self.winfo_reqheight() + 20
|
||||
sw = self.winfo_screenwidth()
|
||||
sh = self.winfo_screenheight()
|
||||
win_w = min(req_w, sw - 40)
|
||||
win_h = min(req_h, sh - 80)
|
||||
self.minsize(_WIN_MIN_W, min(win_h, _WIN_MIN_H))
|
||||
x = (sw - win_w) // 2
|
||||
y = max(20, (sh - win_h) // 2)
|
||||
self.geometry(f"{win_w}x{win_h}+{x}+{y}")
|
||||
|
||||
# ── UI Aufbau ──────────────────────────────────────────────────────────────
|
||||
|
||||
def _build_ui(self):
|
||||
outer = tk.Frame(self, bg=BG)
|
||||
outer.pack(fill="both", expand=True, padx=36, pady=(28, 20))
|
||||
|
||||
# ── Header ──
|
||||
header = tk.Frame(outer, bg=BG)
|
||||
header.pack(fill="x")
|
||||
|
||||
title_row = tk.Frame(header, bg=BG)
|
||||
title_row.pack(fill="x")
|
||||
|
||||
if self._logo_img:
|
||||
try:
|
||||
logo_lbl = tk.Label(title_row, image=self._logo_img, bg=BG, cursor="hand2")
|
||||
logo_lbl.image = self._logo_img
|
||||
logo_lbl.pack(side="left", padx=(0, 14))
|
||||
logo_lbl.bind("<Double-Button-1>", self._open_admin)
|
||||
except tk.TclError:
|
||||
self._logo_img = None
|
||||
|
||||
title_block = tk.Frame(title_row, bg=BG)
|
||||
title_block.pack(side="left", anchor="w")
|
||||
aza_lbl = tk.Label(title_block, text="AzA",
|
||||
font=(FONT_FAMILY, 19, "bold"), fg="#1a4d6d", bg=BG,
|
||||
cursor="hand2")
|
||||
aza_lbl.pack(anchor="w")
|
||||
aza_lbl.bind("<Double-Button-1>", self._open_admin)
|
||||
tk.Label(title_block, text="von Arzt zu Arzt",
|
||||
font=(FONT_FAMILY, 11), fg="#1a4d6d", bg=BG
|
||||
).pack(anchor="w")
|
||||
|
||||
self._build_capacity_bar(header)
|
||||
|
||||
# ── Separator ──
|
||||
tk.Frame(outer, bg="#E2E8F0", height=1).pack(fill="x", pady=(14, 16))
|
||||
|
||||
# ── Card Grid ──
|
||||
grid = tk.Frame(outer, bg=BG)
|
||||
grid.pack(fill="both", expand=True)
|
||||
|
||||
for c in range(_GRID_COLS):
|
||||
grid.columnconfigure(c, weight=1, uniform="col")
|
||||
num_rows = (len(LAUNCHER_MODULES) + _GRID_COLS - 1) // _GRID_COLS
|
||||
for r in range(num_rows):
|
||||
grid.rowconfigure(r, weight=1)
|
||||
|
||||
_CARD_GRAYS = ["#f2f4f6", "#f5f7f9", "#f7f8fa", "#f9fafb", "#fafbfc", "#fbfcfd"]
|
||||
|
||||
for i, mod_key in enumerate(LAUNCHER_MODULES):
|
||||
row, col = divmod(i, _GRID_COLS)
|
||||
card_bg = _CARD_GRAYS[min(i, len(_CARD_GRAYS) - 1)]
|
||||
card = self._create_card(grid, mod_key, card_bg=card_bg)
|
||||
card.grid(row=row, column=col,
|
||||
padx=SPACING // 2, pady=SPACING // 2,
|
||||
sticky="nsew")
|
||||
|
||||
# Auto-Open-Bereich vorlaeufig deaktiviert (Endlosschlaufen-Problem)
|
||||
|
||||
# ── Footer ──
|
||||
_FOOTER_BG = "#EDF2F7"
|
||||
footer_wrap = tk.Frame(outer, bg=_FOOTER_BG, highlightthickness=0)
|
||||
footer_wrap.pack(fill="x", pady=(16, 0), ipady=8)
|
||||
|
||||
footer = tk.Frame(footer_wrap, bg=_FOOTER_BG)
|
||||
footer.pack(fill="x", padx=12)
|
||||
|
||||
lbl_status = tk.Label(
|
||||
footer, text="Systemstatus",
|
||||
font=(FONT_FAMILY, 9, "bold"), fg=ACCENT, bg=_FOOTER_BG,
|
||||
cursor="hand2",
|
||||
)
|
||||
lbl_status.pack(side="left")
|
||||
lbl_status.bind("<Button-1>", self._open_systemstatus)
|
||||
|
||||
self._build_key_status(footer, _FOOTER_BG)
|
||||
|
||||
try:
|
||||
from aza_version import APP_VERSION
|
||||
tk.Label(
|
||||
footer, text=f"v{APP_VERSION}",
|
||||
font=(FONT_FAMILY, 8), fg="#A0AEC0", bg=_FOOTER_BG,
|
||||
).pack(side="right")
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# ── Auto-Open Bereich ──────────────────────────────────────────────────────
|
||||
|
||||
def _build_auto_open_section(self, parent):
|
||||
prefs = load_launcher_prefs()
|
||||
default_mod = prefs.get("default_module", "")
|
||||
|
||||
section = tk.Frame(parent, bg=BG)
|
||||
section.pack(fill="x", pady=(14, 0))
|
||||
|
||||
tk.Frame(section, bg="#E2E8F0", height=1).pack(fill="x", pady=(0, 10))
|
||||
|
||||
row1 = tk.Frame(section, bg=BG)
|
||||
row1.pack(fill="x")
|
||||
|
||||
tk.Checkbutton(
|
||||
row1,
|
||||
text="Nächste Auswahl als Standardstart merken",
|
||||
variable=self._auto_open_var,
|
||||
font=(FONT_FAMILY, 9), fg=TEXT, bg=BG,
|
||||
activebackground=BG, selectcolor=CARD_BG,
|
||||
command=self._on_auto_open_toggle,
|
||||
).pack(side="left")
|
||||
|
||||
row2 = tk.Frame(section, bg=BG)
|
||||
row2.pack(fill="x", pady=(4, 0))
|
||||
|
||||
self._auto_open_status = tk.Label(row2, font=(FONT_FAMILY, 9), bg=BG)
|
||||
self._auto_open_status.pack(side="left")
|
||||
|
||||
self._auto_open_reset = tk.Label(
|
||||
row2, text="Zurücksetzen",
|
||||
font=(FONT_FAMILY, 8, "underline"), fg=ACCENT, bg=BG,
|
||||
cursor="hand2",
|
||||
)
|
||||
self._auto_open_reset.bind("<Button-1>", self._reset_auto_open)
|
||||
|
||||
self._auto_open_help = tk.Label(
|
||||
section, font=(FONT_FAMILY, 8), fg=SUBTLE, bg=BG,
|
||||
)
|
||||
self._auto_open_help.pack(anchor="w", pady=(2, 0))
|
||||
|
||||
self._update_auto_open_display(default_mod)
|
||||
|
||||
def _update_auto_open_display(self, mod_key: str):
|
||||
if mod_key and mod_key in LAUNCHER_MODULES:
|
||||
label = LAUNCHER_MODULE_LABELS.get(mod_key, mod_key)
|
||||
self._auto_open_status.configure(
|
||||
text=f"Standardstart: {label}",
|
||||
fg=ACCENT,
|
||||
)
|
||||
self._auto_open_reset.pack(side="left", padx=(10, 0))
|
||||
self._auto_open_help.configure(
|
||||
text="Dieses Modul öffnet sich beim nächsten Programmstart automatisch.",
|
||||
)
|
||||
else:
|
||||
self._auto_open_status.configure(
|
||||
text="Kein Standardstart festgelegt",
|
||||
fg=SUBTLE,
|
||||
)
|
||||
self._auto_open_reset.pack_forget()
|
||||
self._auto_open_help.configure(
|
||||
text="Aktivieren Sie die Option und wählen Sie ein Modul, um einen Standardstart festzulegen.",
|
||||
)
|
||||
|
||||
def _on_auto_open_toggle(self):
|
||||
if not self._auto_open_var.get():
|
||||
save_launcher_prefs("", False)
|
||||
self._update_auto_open_display("")
|
||||
|
||||
def _reset_auto_open(self, event=None):
|
||||
self._auto_open_var.set(False)
|
||||
save_launcher_prefs("", False)
|
||||
self._update_auto_open_display("")
|
||||
|
||||
# ── Admin-Zugang ──────────────────────────────────────────────────────────
|
||||
|
||||
def _open_systemstatus(self, event=None):
|
||||
try:
|
||||
from aza_systemstatus import show_systemstatus
|
||||
show_systemstatus(self)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def _open_admin(self, event=None):
|
||||
try:
|
||||
from aza_admin import show_admin_login, show_admin_panel
|
||||
if show_admin_login(self):
|
||||
show_admin_panel(self)
|
||||
self._refresh_capacity()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def _refresh_capacity(self):
|
||||
"""Aktualisiert Kapazitätsanzeige nach Admin-Aktion."""
|
||||
try:
|
||||
remaining = get_remaining_tokens()
|
||||
pct = get_capacity_fraction()
|
||||
low = is_capacity_low()
|
||||
est = estimated_reports_remaining()
|
||||
|
||||
self._cap_label.configure(
|
||||
text=f"Ihre KI-Kapazität: {format_number_de(remaining)} Einheiten verbleibend",
|
||||
fg=WARNING_AMBER if low else SUBTLE,
|
||||
)
|
||||
self._cap_canvas.delete("all")
|
||||
self._cap_canvas.update_idletasks()
|
||||
self._draw_gradient_bar(self._cap_canvas, pct, _BAR_H)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# ── KI-Kapazitätsanzeige ──────────────────────────────────────────────────
|
||||
|
||||
def _build_capacity_bar(self, parent):
|
||||
frame = tk.Frame(parent, bg=BG)
|
||||
frame.pack(fill="x", pady=(12, 0))
|
||||
|
||||
remaining = get_remaining_tokens()
|
||||
pct = get_capacity_fraction()
|
||||
low = is_capacity_low()
|
||||
est = estimated_reports_remaining()
|
||||
|
||||
label_text = f"KI-Kapazität: {format_number_de(remaining)} Einheiten"
|
||||
color = WARNING_AMBER if low else "#A0AEC0"
|
||||
self._cap_label = tk.Label(
|
||||
frame, text=label_text,
|
||||
font=(FONT_FAMILY, 8), fg=color, bg=BG, anchor="e",
|
||||
)
|
||||
self._cap_label.pack(anchor="e", pady=(0, 3))
|
||||
|
||||
self._cap_canvas = canvas = tk.Canvas(
|
||||
frame, height=_BAR_H, bg=BG, highlightthickness=0,
|
||||
)
|
||||
canvas.pack(fill="x")
|
||||
canvas.bind("<Configure>", lambda e: self._draw_gradient_bar(canvas, pct, _BAR_H))
|
||||
|
||||
tooltip_text = (
|
||||
f"Entspricht ca. {est} weiteren Berichten\n"
|
||||
"(Basierend auf Ihrem Durchschnittsverbrauch)"
|
||||
)
|
||||
if low:
|
||||
tooltip_text += (
|
||||
"\n\n\u26A0 Kapazität fast aufgebraucht.\n"
|
||||
"Guthaben unter aza-medwork.ch nachfüllen."
|
||||
)
|
||||
_Tooltip(canvas, tooltip_text)
|
||||
_Tooltip(self._cap_label, tooltip_text)
|
||||
|
||||
@staticmethod
|
||||
def _draw_gradient_bar(canvas, pct: float, bar_h: int):
|
||||
canvas.delete("all")
|
||||
w = canvas.winfo_width()
|
||||
if w <= 1:
|
||||
return
|
||||
filled_w = max(0, min(w, int(w * pct)))
|
||||
|
||||
canvas.create_rectangle(0, 0, w, bar_h, fill="#EDF2F7", outline="")
|
||||
|
||||
if filled_w <= 0:
|
||||
return
|
||||
|
||||
if pct <= 0.02:
|
||||
canvas.create_rectangle(0, 0, filled_w, bar_h, fill=DANGER, outline="")
|
||||
elif pct <= 0.10:
|
||||
canvas.create_rectangle(0, 0, filled_w, bar_h, fill=WARNING_AMBER, outline="")
|
||||
else:
|
||||
r1, g1, b1 = 0x00, 0x78, 0xD7
|
||||
r2, g2, b2 = 0x00, 0xCE, 0xC9
|
||||
steps = min(filled_w, 120)
|
||||
step_w = filled_w / steps
|
||||
for i in range(steps):
|
||||
t = i / max(1, steps - 1)
|
||||
r = int(r1 + (r2 - r1) * t)
|
||||
g = int(g1 + (g2 - g1) * t)
|
||||
b = int(b1 + (b2 - b1) * t)
|
||||
x1 = int(i * step_w)
|
||||
x2 = int((i + 1) * step_w)
|
||||
canvas.create_rectangle(
|
||||
x1, 0, x2, bar_h,
|
||||
fill=f"#{r:02x}{g:02x}{b:02x}", outline="",
|
||||
)
|
||||
|
||||
# ── Key-Status ────────────────────────────────────────────────────────────
|
||||
|
||||
def _build_key_status(self, parent, bg_color=None):
|
||||
bg = bg_color or BG
|
||||
try:
|
||||
from security_vault import has_vault_key, get_masked_key
|
||||
if has_vault_key():
|
||||
masked = get_masked_key()
|
||||
tk.Label(
|
||||
parent,
|
||||
text=f"Schlüssel aktiv ({masked})",
|
||||
font=(FONT_FAMILY, 8), fg="#00B894", bg=bg,
|
||||
).pack(side="left", padx=(12, 0))
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# ── Card Rendering ────────────────────────────────────────────────────────
|
||||
|
||||
def _create_card(self, parent, mod_key: str, card_bg: str = None) -> tk.Frame:
|
||||
label = LAUNCHER_MODULE_LABELS.get(mod_key, mod_key)
|
||||
desc = _MODULE_DESCRIPTIONS.get(mod_key, "")
|
||||
is_disabled = mod_key in LAUNCHER_DISABLED_MODULES
|
||||
icon_color = "#B0BEC5" if is_disabled else _MODULE_ICON_COLORS.get(mod_key, ACCENT)
|
||||
_cbg = "#ECEFF1" if is_disabled else (card_bg or CARD_BG)
|
||||
_cursor = "arrow" if is_disabled else "hand2"
|
||||
_text_fg = "#90A4AE" if is_disabled else TEXT
|
||||
_desc_fg = "#B0BEC5" if is_disabled else SUBTLE
|
||||
|
||||
card = tk.Frame(parent, bg=_cbg, cursor=_cursor,
|
||||
highlightthickness=1, highlightbackground=CARD_BORDER)
|
||||
|
||||
inner = tk.Frame(card, bg=_cbg, cursor=_cursor)
|
||||
inner.pack(fill="both", expand=True, padx=20, pady=18)
|
||||
|
||||
top_row = tk.Frame(inner, bg=_cbg, cursor=_cursor)
|
||||
top_row.pack(fill="x", pady=(0, 10))
|
||||
|
||||
icon_cv = tk.Canvas(top_row, width=_ICON_SZ, height=_ICON_SZ,
|
||||
bg=icon_color, highlightthickness=0, cursor=_cursor)
|
||||
icon_cv.pack(side="left")
|
||||
if mod_key == "kg" and getattr(self, "_kg_tile_icon", None) is not None:
|
||||
icon_cv.create_image(
|
||||
_ICON_SZ // 2, _ICON_SZ // 2,
|
||||
image=self._kg_tile_icon,
|
||||
)
|
||||
else:
|
||||
_draw_module_icon(icon_cv, mod_key)
|
||||
|
||||
lbl_title = tk.Label(inner, text=label, font=(FONT_FAMILY, 12, "bold"),
|
||||
fg=_text_fg, bg=_cbg, anchor="w", cursor=_cursor)
|
||||
lbl_title.pack(anchor="w")
|
||||
|
||||
if desc:
|
||||
lbl_desc = tk.Label(inner, text=desc, font=(FONT_FAMILY, 9),
|
||||
fg=_desc_fg, bg=_cbg, anchor="w",
|
||||
justify="left", cursor=_cursor)
|
||||
lbl_desc.pack(anchor="w", pady=(4, 0))
|
||||
|
||||
if is_disabled:
|
||||
tk.Label(inner, text="Bald verf\u00fcgbar", font=(FONT_FAMILY, 8, "italic"),
|
||||
fg="#B0BEC5", bg=_cbg).pack(anchor="w", pady=(4, 0))
|
||||
return card
|
||||
|
||||
def on_enter(e):
|
||||
card.configure(highlightbackground=CARD_HOVER_BORDER, highlightthickness=2)
|
||||
for w in _deep_children(card):
|
||||
try:
|
||||
if isinstance(w, tk.Canvas):
|
||||
continue
|
||||
w.configure(bg=CARD_HOVER_BG)
|
||||
except tk.TclError:
|
||||
pass
|
||||
|
||||
def on_leave(e):
|
||||
card.configure(highlightbackground=CARD_BORDER, highlightthickness=1)
|
||||
for w in _deep_children(card):
|
||||
try:
|
||||
if isinstance(w, tk.Canvas):
|
||||
continue
|
||||
w.configure(bg=CARD_BG)
|
||||
except tk.TclError:
|
||||
pass
|
||||
|
||||
def on_click(e, key=mod_key):
|
||||
self._select(key)
|
||||
|
||||
for w in _deep_children(card):
|
||||
w.bind("<Enter>", on_enter)
|
||||
w.bind("<Leave>", on_leave)
|
||||
w.bind("<Button-1>", on_click)
|
||||
|
||||
return card
|
||||
|
||||
# ── Modul-Auswahl / Schliessen ────────────────────────────────────────────
|
||||
|
||||
def _save_geom(self):
|
||||
try:
|
||||
save_toplevel_geometry("launcher", self.geometry())
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def _select(self, mod_key: str):
|
||||
self._selected_module = mod_key
|
||||
if self._auto_open_var.get():
|
||||
save_launcher_prefs(mod_key, True)
|
||||
else:
|
||||
save_launcher_prefs("", False)
|
||||
self._save_geom()
|
||||
self.destroy()
|
||||
|
||||
def _on_close(self):
|
||||
self._selected_module = None
|
||||
self._save_geom()
|
||||
self.destroy()
|
||||
|
||||
def run(self) -> str | None:
|
||||
self.mainloop()
|
||||
return self._selected_module
|
||||
|
||||
|
||||
def _deep_children(widget):
|
||||
"""Widget + alle verschachtelten Kinder."""
|
||||
result = [widget]
|
||||
for child in widget.winfo_children():
|
||||
result.extend(_deep_children(child))
|
||||
return result
|
||||
|
||||
|
||||
def should_skip_launcher() -> tuple[bool, str]:
|
||||
"""Standardstart vorlaeufig deaktiviert (Endlosschlaufen-Problem).
|
||||
Alte gespeicherte Werte werden ignoriert."""
|
||||
try:
|
||||
prefs = load_launcher_prefs()
|
||||
if prefs.get("auto_open"):
|
||||
prefs["auto_open"] = False
|
||||
prefs["default_module"] = ""
|
||||
save_launcher_prefs("", False)
|
||||
except Exception:
|
||||
pass
|
||||
return False, ""
|
||||
43
AzA march 2026 - Kopie (18)/aza_license_logic.py
Normal file
@@ -0,0 +1,43 @@
|
||||
import os
|
||||
import time
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class LicenseDecision:
|
||||
valid: bool
|
||||
valid_until: Optional[int]
|
||||
status: str # "active"|"grace"|"expired"|"none"
|
||||
|
||||
|
||||
def compute_license_decision(current_period_end: Optional[int], status: Optional[str]) -> LicenseDecision:
|
||||
"""
|
||||
current_period_end: epoch seconds (UTC)
|
||||
status: normalized db status or stripe subscription status (best effort)
|
||||
"""
|
||||
now = int(time.time())
|
||||
normalized = (status or "").lower().strip()
|
||||
|
||||
if not current_period_end:
|
||||
if normalized == "active":
|
||||
fallback = now + 24 * 60 * 60
|
||||
return LicenseDecision(valid=True, valid_until=fallback, status="active")
|
||||
return LicenseDecision(valid=False, valid_until=None, status="none")
|
||||
|
||||
grace_days = int(os.getenv("AZA_GRACE_DAYS", "0"))
|
||||
grace_seconds = grace_days * 24 * 60 * 60
|
||||
|
||||
if now <= int(current_period_end):
|
||||
return LicenseDecision(valid=True, valid_until=int(current_period_end), status="active")
|
||||
|
||||
if grace_seconds > 0 and now <= int(current_period_end) + grace_seconds:
|
||||
return LicenseDecision(valid=True, valid_until=int(current_period_end), status="grace")
|
||||
|
||||
# Period expired but DB status is still "active": trust the status.
|
||||
# Stripe/WC webhook may not have updated current_period_end yet.
|
||||
if normalized == "active":
|
||||
fallback = now + 24 * 60 * 60
|
||||
return LicenseDecision(valid=True, valid_until=fallback, status="active")
|
||||
|
||||
return LicenseDecision(valid=False, valid_until=int(current_period_end), status="expired")
|
||||
388
AzA march 2026 - Kopie (18)/aza_macro.py
Normal file
@@ -0,0 +1,388 @@
|
||||
import asyncio
|
||||
import json
|
||||
import sys
|
||||
from pathlib import Path
|
||||
import tkinter as tk
|
||||
from tkinter import messagebox, simpledialog
|
||||
|
||||
from aza_config import get_writable_data_dir
|
||||
|
||||
try:
|
||||
from playwright.async_api import TimeoutError as PlaywrightTimeoutError
|
||||
from playwright.async_api import async_playwright
|
||||
except Exception as exc: # pragma: no cover
|
||||
async_playwright = None
|
||||
PlaywrightTimeoutError = Exception
|
||||
_PLAYWRIGHT_IMPORT_ERROR = exc
|
||||
else:
|
||||
_PLAYWRIGHT_IMPORT_ERROR = None
|
||||
|
||||
|
||||
DEFAULT_STEPS = [
|
||||
{"action": "click", "selector": 'button:has-text("Einstellungen")'},
|
||||
{"action": "click", "selector": 'li:has-text("Importieren")'},
|
||||
{"action": "fill", "selector": "#textarea-input", "value": "<clipboard>"},
|
||||
{"action": "click", "selector": "#submit-button"},
|
||||
]
|
||||
DEFAULT_PROFILE_NAME = "macro1"
|
||||
|
||||
|
||||
def _notify(title: str, text: str, is_error: bool = False) -> None:
|
||||
root = tk.Tk()
|
||||
root.withdraw()
|
||||
try:
|
||||
if is_error:
|
||||
messagebox.showerror(title, text)
|
||||
else:
|
||||
messagebox.showinfo(title, text)
|
||||
finally:
|
||||
root.destroy()
|
||||
|
||||
|
||||
def _ask_string(title: str, prompt: str, initial_value: str = "") -> str | None:
|
||||
root = tk.Tk()
|
||||
root.withdraw()
|
||||
try:
|
||||
return simpledialog.askstring(title, prompt, initialvalue=initial_value, parent=root)
|
||||
finally:
|
||||
root.destroy()
|
||||
|
||||
|
||||
def _macro_steps_path() -> Path:
|
||||
return Path(get_writable_data_dir()) / "macro_steps.json"
|
||||
|
||||
|
||||
def _profiles_path() -> Path:
|
||||
return Path(get_writable_data_dir()) / "macro_profiles.json"
|
||||
|
||||
|
||||
def _load_steps() -> list[dict]:
|
||||
path = _macro_steps_path()
|
||||
if not path.exists():
|
||||
return DEFAULT_STEPS
|
||||
try:
|
||||
data = json.loads(path.read_text(encoding="utf-8"))
|
||||
if isinstance(data, list):
|
||||
return data
|
||||
except Exception:
|
||||
pass
|
||||
return DEFAULT_STEPS
|
||||
|
||||
|
||||
def _load_profiles() -> dict:
|
||||
path = _profiles_path()
|
||||
if not path.exists():
|
||||
return {}
|
||||
try:
|
||||
data = json.loads(path.read_text(encoding="utf-8"))
|
||||
if isinstance(data, dict):
|
||||
return data
|
||||
except Exception:
|
||||
pass
|
||||
return {}
|
||||
|
||||
|
||||
def _save_profiles(data: dict) -> None:
|
||||
path = _profiles_path()
|
||||
path.write_text(json.dumps(data, ensure_ascii=False, indent=2), encoding="utf-8")
|
||||
|
||||
|
||||
def _normalize_click_steps(raw_clicks: list[dict]) -> list[dict]:
|
||||
out = []
|
||||
last_sel = None
|
||||
for item in raw_clicks:
|
||||
sel = str(item.get("selector", "")).strip()
|
||||
if not sel:
|
||||
continue
|
||||
if sel == last_sel:
|
||||
continue
|
||||
out.append({"action": "click", "selector": sel})
|
||||
last_sel = sel
|
||||
return out
|
||||
|
||||
|
||||
class BrowserMacro:
|
||||
def __init__(self, cdp_url: str = "http://127.0.0.1:9222"):
|
||||
self.cdp_url = cdp_url
|
||||
self._pw = None
|
||||
self.browser = None
|
||||
self.context = None
|
||||
self.page = None
|
||||
|
||||
async def connect_to_browser(self):
|
||||
"""Verbindet sich via CDP mit einem laufenden Chromium-Browser."""
|
||||
if async_playwright is None:
|
||||
raise RuntimeError(
|
||||
f"Playwright ist nicht installiert/importierbar: {_PLAYWRIGHT_IMPORT_ERROR}"
|
||||
)
|
||||
|
||||
self._pw = await async_playwright().start()
|
||||
self.browser = await self._pw.chromium.connect_over_cdp(self.cdp_url)
|
||||
|
||||
if self.browser.contexts:
|
||||
self.context = self.browser.contexts[0]
|
||||
else:
|
||||
self.context = await self.browser.new_context()
|
||||
|
||||
if self.context.pages:
|
||||
self.page = self.context.pages[0]
|
||||
else:
|
||||
self.page = await self.context.new_page()
|
||||
|
||||
return self.page
|
||||
|
||||
async def click_element(self, selector: str, timeout_ms: int = 12000):
|
||||
"""Wartet strukturell auf ein sichtbares Element und klickt dann."""
|
||||
await self.page.wait_for_selector(selector, state="visible", timeout=timeout_ms)
|
||||
await self.page.click(selector, timeout=timeout_ms)
|
||||
|
||||
async def run_macro(self, steps: list[dict] | None = None):
|
||||
if self.page is None:
|
||||
await self.connect_to_browser()
|
||||
|
||||
await self.page.wait_for_load_state("networkidle")
|
||||
await self.page.keyboard.press("Control+0")
|
||||
|
||||
sequence = steps or DEFAULT_STEPS
|
||||
for idx, step in enumerate(sequence, start=1):
|
||||
action = str(step.get("action", "")).strip().lower()
|
||||
if action == "click":
|
||||
selector = step.get("selector")
|
||||
if not selector:
|
||||
raise ValueError(f"Schritt {idx}: selector fehlt.")
|
||||
await self.click_element(selector)
|
||||
elif action == "fill":
|
||||
selector = step.get("selector")
|
||||
value = step.get("value", "")
|
||||
if not selector:
|
||||
raise ValueError(f"Schritt {idx}: selector fehlt.")
|
||||
await self.page.wait_for_selector(selector, state="visible", timeout=12000)
|
||||
await self.page.fill(selector, str(value))
|
||||
elif action == "press":
|
||||
key = step.get("key")
|
||||
if not key:
|
||||
raise ValueError(f"Schritt {idx}: key fehlt.")
|
||||
await self.page.keyboard.press(str(key))
|
||||
elif action == "wait":
|
||||
ms = int(step.get("ms", 500))
|
||||
await self.page.wait_for_timeout(ms)
|
||||
elif action == "goto":
|
||||
url = step.get("url")
|
||||
if not url:
|
||||
raise ValueError(f"Schritt {idx}: url fehlt.")
|
||||
await self.page.goto(str(url), wait_until="networkidle")
|
||||
else:
|
||||
raise ValueError(f"Schritt {idx}: Unbekannte action '{action}'.")
|
||||
|
||||
async def run_saved_profile(self, profile_name: str):
|
||||
profiles = _load_profiles()
|
||||
profile = profiles.get(profile_name)
|
||||
if not isinstance(profile, dict):
|
||||
raise ValueError(
|
||||
f"Profil '{profile_name}' nicht gefunden. Rechtsklick auf 'Macro 1' zum Aufnehmen."
|
||||
)
|
||||
url = str(profile.get("url", "")).strip()
|
||||
steps = profile.get("steps")
|
||||
if not isinstance(steps, list) or not steps:
|
||||
raise ValueError(f"Profil '{profile_name}' enthält keine Schritte.")
|
||||
|
||||
if self.page is None:
|
||||
await self.connect_to_browser()
|
||||
|
||||
if url:
|
||||
await self.page.goto(url, wait_until="networkidle")
|
||||
else:
|
||||
await self.page.wait_for_load_state("networkidle")
|
||||
await self.run_macro(steps)
|
||||
|
||||
async def record_profile(self, profile_name: str):
|
||||
profiles = _load_profiles()
|
||||
old_profile = profiles.get(profile_name, {}) if isinstance(profiles.get(profile_name), dict) else {}
|
||||
initial_url = str(old_profile.get("url", "")).strip() or "https://"
|
||||
url = _ask_string("Makro aufnehmen", "Zielseite (URL) für Macro 1:", initial_url)
|
||||
if not url:
|
||||
return
|
||||
|
||||
if self.page is None:
|
||||
await self.connect_to_browser()
|
||||
|
||||
await self.page.goto(url, wait_until="networkidle")
|
||||
await self.page.keyboard.press("Control+0")
|
||||
await self.page.evaluate(
|
||||
"""
|
||||
() => {
|
||||
const cssEscape = (v) => {
|
||||
try { return CSS.escape(v); } catch (_) { return v.replace(/[^a-zA-Z0-9_-]/g, "_"); }
|
||||
};
|
||||
const txt = (v) => (v || "").replace(/\\s+/g, " ").trim();
|
||||
const escTxt = (v) => txt(v).replace(/"/g, '\\"');
|
||||
const buildPath = (el) => {
|
||||
const parts = [];
|
||||
let cur = el;
|
||||
for (let i = 0; i < 5 && cur && cur.nodeType === 1; i++) {
|
||||
let part = cur.tagName.toLowerCase();
|
||||
if (cur.id) {
|
||||
part += "#" + cssEscape(cur.id);
|
||||
parts.unshift(part);
|
||||
return parts.join(" > ");
|
||||
}
|
||||
const siblings = Array.from(cur.parentElement ? cur.parentElement.children : []).filter(
|
||||
(s) => s.tagName === cur.tagName
|
||||
);
|
||||
if (siblings.length > 1) {
|
||||
const pos = siblings.indexOf(cur) + 1;
|
||||
part += `:nth-of-type(${pos})`;
|
||||
}
|
||||
parts.unshift(part);
|
||||
cur = cur.parentElement;
|
||||
}
|
||||
return parts.join(" > ");
|
||||
};
|
||||
|
||||
const chooseTarget = (el) => {
|
||||
return el.closest('button,a,input,textarea,select,[role="button"],[data-testid]') || el;
|
||||
};
|
||||
const selectorFor = (el) => {
|
||||
const t = chooseTarget(el);
|
||||
if (!t) return "";
|
||||
if (t.id) return "#" + cssEscape(t.id);
|
||||
const testId = t.getAttribute("data-testid");
|
||||
if (testId) return `[data-testid="${testId.replace(/"/g, '\\"')}"]`;
|
||||
const name = t.getAttribute("name");
|
||||
if (name) return `${t.tagName.toLowerCase()}[name="${name.replace(/"/g, '\\"')}"]`;
|
||||
const aria = t.getAttribute("aria-label");
|
||||
if (aria) return `${t.tagName.toLowerCase()}[aria-label="${aria.replace(/"/g, '\\"')}"]`;
|
||||
const text = txt(t.innerText || t.textContent || "");
|
||||
if (text && text.length <= 80) return `${t.tagName.toLowerCase()}:has-text("${escTxt(text)}")`;
|
||||
return buildPath(t);
|
||||
};
|
||||
|
||||
if (window.__macroRecorderCleanup) {
|
||||
window.__macroRecorderCleanup();
|
||||
}
|
||||
window.__macroClicks = [];
|
||||
window.__macroStopFlag = false;
|
||||
|
||||
const onClick = (ev) => {
|
||||
const selector = selectorFor(ev.target);
|
||||
if (!selector) return;
|
||||
window.__macroClicks.push({ action: "click", selector });
|
||||
};
|
||||
const onKey = (ev) => {
|
||||
if (ev.key === "F8") {
|
||||
window.__macroStopFlag = true;
|
||||
}
|
||||
};
|
||||
|
||||
document.addEventListener("click", onClick, true);
|
||||
document.addEventListener("keydown", onKey, true);
|
||||
|
||||
window.__macroRecorderCleanup = () => {
|
||||
document.removeEventListener("click", onClick, true);
|
||||
document.removeEventListener("keydown", onKey, true);
|
||||
};
|
||||
}
|
||||
"""
|
||||
)
|
||||
|
||||
_notify(
|
||||
"Makro-Aufnahme",
|
||||
"Aufnahme läuft.\n\n"
|
||||
"1) Klicke im Browser die gewünschten Schritte.\n"
|
||||
"2) Drücke F8 im Browser zum Beenden.\n\n"
|
||||
"Danach wird 'Macro 1' gespeichert.",
|
||||
is_error=False,
|
||||
)
|
||||
|
||||
while True:
|
||||
stop = await self.page.evaluate("() => !!window.__macroStopFlag")
|
||||
if stop:
|
||||
break
|
||||
await asyncio.sleep(0.25)
|
||||
|
||||
raw_clicks = await self.page.evaluate(
|
||||
"() => (Array.isArray(window.__macroClicks) ? window.__macroClicks : [])"
|
||||
)
|
||||
await self.page.evaluate(
|
||||
"() => { if (window.__macroRecorderCleanup) window.__macroRecorderCleanup(); }"
|
||||
)
|
||||
|
||||
steps = _normalize_click_steps(raw_clicks if isinstance(raw_clicks, list) else [])
|
||||
if not steps:
|
||||
raise ValueError("Keine Klicks aufgenommen. Bitte erneut probieren.")
|
||||
|
||||
profiles[profile_name] = {"url": url, "steps": steps}
|
||||
_save_profiles(profiles)
|
||||
_notify(
|
||||
"Makro gespeichert",
|
||||
f"Profil '{profile_name}' gespeichert ({len(steps)} Klick-Schritte).\n"
|
||||
"Start per Linksklick auf 'Macro 1'.",
|
||||
is_error=False,
|
||||
)
|
||||
|
||||
async def close(self):
|
||||
if self._pw is not None:
|
||||
await self._pw.stop()
|
||||
self._pw = None
|
||||
|
||||
|
||||
async def _run_default():
|
||||
macro = BrowserMacro()
|
||||
try:
|
||||
await macro.run_macro(_load_steps())
|
||||
_notify(
|
||||
"Makro",
|
||||
"Makro erfolgreich gestartet/ausgeführt.\n\n"
|
||||
"Hinweis: Schritte können in 'macro_steps.json' angepasst werden.",
|
||||
is_error=False,
|
||||
)
|
||||
except PlaywrightTimeoutError as exc:
|
||||
_notify("Makro-Timeout", f"Element nicht rechtzeitig gefunden:\n{exc}", is_error=True)
|
||||
except Exception as exc:
|
||||
_notify(
|
||||
"Makro-Fehler",
|
||||
"Makro konnte nicht ausgeführt werden.\n\n"
|
||||
"Prüfe:\n"
|
||||
"- Chrome/Edge mit --remote-debugging-port=9222 gestartet\n"
|
||||
"- korrekte Selektoren in macro_steps.json\n"
|
||||
f"- Details: {exc}",
|
||||
is_error=True,
|
||||
)
|
||||
finally:
|
||||
await macro.close()
|
||||
|
||||
|
||||
async def _run_profile(profile_name: str):
|
||||
macro = BrowserMacro()
|
||||
try:
|
||||
await macro.run_saved_profile(profile_name)
|
||||
except Exception as exc:
|
||||
_notify("Makro-Fehler", str(exc), is_error=True)
|
||||
finally:
|
||||
await macro.close()
|
||||
|
||||
|
||||
async def _record_profile(profile_name: str):
|
||||
macro = BrowserMacro()
|
||||
try:
|
||||
await macro.record_profile(profile_name)
|
||||
except Exception as exc:
|
||||
_notify("Makro-Fehler", str(exc), is_error=True)
|
||||
finally:
|
||||
await macro.close()
|
||||
|
||||
|
||||
def main():
|
||||
mode = (sys.argv[1].strip().lower() if len(sys.argv) > 1 else "")
|
||||
profile_name = (sys.argv[2].strip() if len(sys.argv) > 2 else DEFAULT_PROFILE_NAME)
|
||||
|
||||
if mode == "record":
|
||||
asyncio.run(_record_profile(profile_name))
|
||||
elif mode == "run":
|
||||
asyncio.run(_run_profile(profile_name))
|
||||
else:
|
||||
asyncio.run(_run_default())
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
384
AzA march 2026 - Kopie (18)/aza_med_validator.py
Normal file
@@ -0,0 +1,384 @@
|
||||
"""Zentrale Medikamenten-Validierung fuer AZA Desktop.
|
||||
|
||||
Stellt validate_medication_name() und suggest_medication_candidate() bereit.
|
||||
Verwendet eine kuratierte Liste bekannter Medikamente/Wirkstoffe im DACH-Raum
|
||||
plus difflib.get_close_matches() fuer Fuzzy-Matching.
|
||||
"""
|
||||
|
||||
import difflib
|
||||
|
||||
_KNOWN_MEDICATIONS: frozenset = frozenset({
|
||||
# --- Dermatologie ---
|
||||
"Dermovate", "Clobetasol", "Elidel", "Pimecrolimus", "Protopic", "Tacrolimus",
|
||||
"Dupixent", "Dupilumab", "Cosentyx", "Secukinumab", "Tremfya", "Guselkumab",
|
||||
"Skyrizi", "Risankizumab", "Stelara", "Ustekinumab", "Humira", "Adalimumab",
|
||||
"Enbrel", "Etanercept", "Taltz", "Ixekizumab", "Bimzelx", "Bimekizumab",
|
||||
"Otezla", "Apremilast", "Fumaderm", "Dimethylfumarat", "Skilarence",
|
||||
"Methotrexat", "MTX", "Ciclosporin", "Cyclosporin", "Sandimmun",
|
||||
"Acitretin", "Neotigason", "Isotretinoin", "Roaccutan", "Accutane",
|
||||
"Daivobet", "Calcipotriol", "Daivonex", "Betamethason", "Betnovate",
|
||||
"Hydrocortison", "Advantan", "Methylprednisolon", "Prednisolon", "Prednison",
|
||||
"Dexamethason", "Triamcinolon", "Kenacort", "Mometason", "Elocom",
|
||||
"Fluticason", "Cutivate", "Fluocinolon", "Synalar", "Halcinonid",
|
||||
"Aldara", "Imiquimod", "Efudix", "Fluorouracil", "5-FU",
|
||||
"Picato", "Ingenolmebutat", "Solaraze", "Diclofenac",
|
||||
"Lamisil", "Terbinafin", "Itraconazol", "Sporanox", "Fluconazol", "Diflucan",
|
||||
"Ketoconazol", "Nizoral", "Clotrimazol", "Canesten", "Miconazol", "Daktarin",
|
||||
"Nystatin", "Permethrin", "Infectoscab", "Ivermectin", "Stromectol", "Scabioral",
|
||||
"Aciclovir", "Zovirax", "Valaciclovir", "Valtrex",
|
||||
"Mupirocin", "Bactroban", "Fusidinsaeure", "Fucidine", "Fucidin",
|
||||
"Retinol", "Tretinoin", "Adapalen", "Differin", "Epiduo",
|
||||
"Azelainsaeure", "Skinoren", "Benzoylperoxid", "BPO",
|
||||
"Doxycyclin", "Minocyclin", "Tetracyclin",
|
||||
"Bilastin", "Cetirizin", "Loratadin", "Desloratadin", "Aerius",
|
||||
"Fexofenadin", "Telfast", "Levocetirizin", "Xyzal",
|
||||
"Hydroxyzin", "Atarax", "Clemastin", "Tavegil",
|
||||
"Rituximab", "MabThera", "Omalizumab", "Xolair",
|
||||
"Baricitinib", "Olumiant", "Upadacitinib", "Rinvoq",
|
||||
"Abrocitinib", "Cibinqo", "Ruxolitinib", "Jakavi", "Opzelura",
|
||||
"Tralokinumab", "Adtralza", "Lebrikizumab", "Ebglyss",
|
||||
"Deucravacitinib", "Sotyktu", "Tildrakizumab", "Ilumetri",
|
||||
"Brodalumab", "Kyntheum", "Certolizumab", "Cimzia",
|
||||
"Infliximab", "Remicade", "Golimumab", "Simponi",
|
||||
|
||||
# --- Schmerz / Analgetika ---
|
||||
"Paracetamol", "Dafalgan", "Panadol", "Ibuprofen", "Brufen", "Algifor",
|
||||
"Naproxen", "Aspirin", "Acetylsalicylsaeure", "Novalgin", "Metamizol",
|
||||
"Tramadol", "Tramal", "Morphin", "Oxycodon", "Targin", "Fentanyl",
|
||||
"Durogesic", "Buprenorphin", "Temgesic", "Transtec",
|
||||
"Gabapentin", "Neurontin", "Pregabalin", "Lyrica",
|
||||
"Celecoxib", "Celebrex", "Etoricoxib", "Arcoxia",
|
||||
"Indometacin", "Piroxicam", "Meloxicam",
|
||||
|
||||
# --- Kardio / Blutdruck / Antikoagulation ---
|
||||
"Amlodipin", "Norvasc", "Lisinopril", "Enalapril", "Ramipril", "Triatec",
|
||||
"Candesartan", "Atacand", "Valsartan", "Diovan", "Losartan", "Cosaar",
|
||||
"Irbesartan", "Aprovel", "Telmisartan", "Micardis",
|
||||
"Metoprolol", "Beloc", "Bisoprolol", "Concor", "Atenolol", "Tenormin",
|
||||
"Propranolol", "Inderal", "Carvedilol", "Nebivolol",
|
||||
"Hydrochlorothiazid", "HCT", "Torasemid", "Furosemid", "Lasix",
|
||||
"Spironolacton", "Aldactone", "Eplerenon",
|
||||
"Marcoumar", "Phenprocoumon", "Xarelto", "Rivaroxaban",
|
||||
"Eliquis", "Apixaban", "Pradaxa", "Dabigatran", "Lixiana", "Edoxaban",
|
||||
"Heparin", "Enoxaparin", "Clexane", "Clopidogrel", "Plavix",
|
||||
"Ticagrelor", "Brilique", "Prasugrel", "Efient",
|
||||
"Atorvastatin", "Sortis", "Rosuvastatin", "Crestor", "Simvastatin", "Zocor",
|
||||
"Pravastatin", "Ezetimib", "Ezetrol", "Evolocumab", "Repatha",
|
||||
"Alirocumab", "Praluent", "Inclisiran", "Leqvio",
|
||||
"Amiodaron", "Cordarone", "Digoxin",
|
||||
|
||||
# --- Diabetes ---
|
||||
"Metformin", "Glucophage", "Jardiance", "Empagliflozin",
|
||||
"Forxiga", "Dapagliflozin", "Invokana", "Canagliflozin",
|
||||
"Ozempic", "Semaglutid", "Wegovy", "Rybelsus",
|
||||
"Trulicity", "Dulaglutid", "Victoza", "Liraglutid",
|
||||
"Januvia", "Sitagliptin", "Galvus", "Vildagliptin",
|
||||
"Insulin", "Lantus", "Levemir", "Tresiba", "NovoRapid", "Humalog",
|
||||
"Glimepirid", "Amaryl", "Gliclazid",
|
||||
"Pioglitazon", "Actos",
|
||||
|
||||
# --- Antibiotika ---
|
||||
"Amoxicillin", "Augmentin", "Amoxicillin-Clavulansaeure",
|
||||
"Ciprofloxacin", "Cipro", "Levofloxacin", "Tavanic",
|
||||
"Moxifloxacin", "Avalox", "Azithromycin", "Zithromax",
|
||||
"Clarithromycin", "Klacid", "Erythromycin",
|
||||
"Cotrimoxazol", "Bactrim", "Trimethoprim",
|
||||
"Cefuroxim", "Zinnat", "Ceftriaxon", "Rocephin",
|
||||
"Cefpodoxim", "Cephalexin", "Clindamycin", "Dalacin",
|
||||
"Metronidazol", "Flagyl", "Penicillin", "Nitrofurantoin",
|
||||
"Fosfomycin", "Monuril", "Rifampicin",
|
||||
"Linezolid", "Zyvoxid", "Vancomycin", "Daptomycin",
|
||||
|
||||
# --- Psychopharmaka ---
|
||||
"Sertralin", "Zoloft", "Escitalopram", "Cipralex", "Citalopram",
|
||||
"Fluoxetin", "Fluctine", "Paroxetin", "Deroxat",
|
||||
"Venlafaxin", "Efexor", "Duloxetin", "Cymbalta",
|
||||
"Mirtazapin", "Remeron", "Amitriptylin", "Saroten",
|
||||
"Quetiapin", "Seroquel", "Olanzapin", "Zyprexa",
|
||||
"Risperidon", "Risperdal", "Aripiprazol", "Abilify",
|
||||
"Lithium", "Quilonum", "Valproat", "Depakine",
|
||||
"Carbamazepin", "Tegretol", "Lamotrigin", "Lamictal",
|
||||
"Lorazepam", "Temesta", "Diazepam", "Valium",
|
||||
"Oxazepam", "Midazolam", "Dormicum",
|
||||
"Zolpidem", "Stilnox", "Zopiclon",
|
||||
"Methylphenidat", "Ritalin", "Concerta",
|
||||
"Atomoxetin", "Strattera",
|
||||
|
||||
# --- Magen/Darm ---
|
||||
"Omeprazol", "Antra", "Pantoprazol", "Pantozol", "Zurcal",
|
||||
"Esomeprazol", "Nexium", "Lansoprazol", "Rabeprazol",
|
||||
"Ranitidin", "Famotidin", "Antacida",
|
||||
"Ondansetron", "Zofran", "Metoclopramid", "Paspertin",
|
||||
"Domperidon", "Motilium", "Loperamid", "Imodium",
|
||||
"Mesalazin", "Pentasa", "Salofalk", "Sulfasalazin",
|
||||
"Budesonid", "Entocort", "Azathioprin", "Imurek",
|
||||
"Mercaptopurin",
|
||||
|
||||
# --- Atemwege ---
|
||||
"Salbutamol", "Ventolin", "Formoterol", "Salmeterol",
|
||||
"Beclomethason", "Budesonid", "Pulmicort",
|
||||
"Fluticason", "Flutide", "Seretide", "Symbicort",
|
||||
"Tiotropium", "Spiriva", "Ipratropium", "Atrovent",
|
||||
"Montelukast", "Singulair", "Theophyllin",
|
||||
"Benralizumab", "Fasenra", "Mepolizumab", "Nucala",
|
||||
|
||||
# --- Schilddruese ---
|
||||
"Levothyroxin", "Euthyrox", "L-Thyroxin",
|
||||
"Carbimazol", "Neo-Mercazole", "Thiamazol", "Tapazole",
|
||||
|
||||
# --- Urologie ---
|
||||
"Tamsulosin", "Pradif", "Finasterid", "Proscar", "Propecia",
|
||||
"Dutasterid", "Avodart", "Sildenafil", "Viagra",
|
||||
"Tadalafil", "Cialis", "Solifenacin", "Vesicare",
|
||||
"Mirabegron", "Betmiga",
|
||||
|
||||
# --- Augen ---
|
||||
"Timolol", "Latanoprost", "Xalatan", "Travoprost", "Travatan",
|
||||
"Dorzolamid", "Brinzolamid",
|
||||
|
||||
# --- Weitere haeufige ---
|
||||
"Allopurinol", "Zyloric", "Febuxostat", "Adenuric",
|
||||
"Colchicin", "Colchysat",
|
||||
"Alendronsaeure", "Fosamax", "Risedronat", "Ibandronat",
|
||||
"Denosumab", "Prolia", "Xgeva",
|
||||
"Calcitonin", "Calcium", "Vitamin D", "Cholecalciferol",
|
||||
"Eisen", "Ferritin", "Ferinject", "Eisenfumarat",
|
||||
"Folsaeure", "Vitamin B12", "Hydroxocobalamin",
|
||||
"Magnesium", "Kalium",
|
||||
"Leflunomid", "Arava", "Mycophenolatmofetil", "CellCept",
|
||||
"Tacrolimus", "Prograf", "Everolimus", "Certican",
|
||||
"Sirolimus", "Rapamune",
|
||||
"Hydroxychloroquin", "Plaquenil", "Chloroquin",
|
||||
"Sulfasalazin", "Salazopyrin",
|
||||
"Tofacitinib", "Xeljanz",
|
||||
"Tocilizumab", "Actemra", "RoActemra",
|
||||
"Sarilumab", "Kevzara",
|
||||
"Belimumab", "Benlysta",
|
||||
"Vedolizumab", "Entyvio",
|
||||
"Natalizumab", "Tysabri",
|
||||
"Fingolimod", "Gilenya", "Dimethylfumarat", "Tecfidera",
|
||||
"Teriflunomid", "Aubagio",
|
||||
"Ocrelizumab", "Ocrevus",
|
||||
|
||||
# --- Onkologie (haeufigste) ---
|
||||
"Tamoxifen", "Letrozol", "Femara", "Anastrozol", "Arimidex",
|
||||
"Imatinib", "Glivec", "Pembrolizumab", "Keytruda",
|
||||
"Nivolumab", "Opdivo", "Atezolizumab", "Tecentriq",
|
||||
"Ipilimumab", "Yervoy", "Trastuzumab", "Herceptin",
|
||||
"Bevacizumab", "Avastin",
|
||||
"Palbociclib", "Ibrance", "Ribociclib", "Kisqali",
|
||||
})
|
||||
|
||||
_KNOWN_LOWER = {m.lower(): m for m in _KNOWN_MEDICATIONS}
|
||||
|
||||
|
||||
def validate_medication_name(name: str) -> tuple:
|
||||
"""Prueft ob ein Medikamentenname bekannt ist.
|
||||
|
||||
Returns:
|
||||
(True, canonical_name) – sicher erkannt
|
||||
(False, candidate_or_None) – nicht erkannt; candidate ist ein
|
||||
naher Treffer oder None
|
||||
"""
|
||||
if not name or len(name.strip()) < 2:
|
||||
return (False, None)
|
||||
|
||||
clean = name.strip()
|
||||
low = clean.lower()
|
||||
|
||||
if low in _KNOWN_LOWER:
|
||||
return (True, _KNOWN_LOWER[low])
|
||||
|
||||
for k, v in _KNOWN_LOWER.items():
|
||||
if k == low.rstrip("s"):
|
||||
return (True, v)
|
||||
|
||||
candidates = difflib.get_close_matches(low, _KNOWN_LOWER.keys(), n=1, cutoff=0.75)
|
||||
if candidates:
|
||||
return (False, _KNOWN_LOWER[candidates[0]])
|
||||
|
||||
return (False, None)
|
||||
|
||||
|
||||
def suggest_medication_candidate(name: str) -> str | None:
|
||||
"""Gibt den besten Kandidaten zurueck oder None."""
|
||||
valid, candidate = validate_medication_name(name)
|
||||
if valid:
|
||||
return name.strip()
|
||||
return candidate
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Kuratierte Medikamenten-Fakten aus Fachinformationen (Compendium.ch / EMA)
|
||||
# ---------------------------------------------------------------------------
|
||||
# Jeder Eintrag ist MANUELL aus der jeweiligen Fachinformation extrahiert.
|
||||
# NUR Informationen, die direkt aus der Quelle ableitbar sind.
|
||||
# Felder: wirkstoff, indikation, einnahme, dosierung, wichtig, quelle
|
||||
|
||||
_MEDICATION_FACTS: dict[str, dict] = {
|
||||
"bilastin": {
|
||||
"wirkstoff": "Bilastin",
|
||||
"indikation": "Nicht-sedierendes Antihistaminikum (H1-Blocker, 2. Generation). Symptomatische Behandlung allergische Rhinokonjunktivitis und Urtikaria.",
|
||||
"einnahme": "Nuechtern einnehmen: mindestens 1 Stunde vor oder 2 Stunden nach einer Mahlzeit. Nicht mit Fruchtsaft (insbesondere Grapefruit) einnehmen, da dies die Bioverfuegbarkeit reduziert.",
|
||||
"dosierung": "Erwachsene und Jugendliche ab 12 Jahren: 20 mg 1x taeglich.",
|
||||
"wichtig": "Keine klinisch relevante Sedierung in Studien. Kein relevanter Einfluss auf QTc in therapeutischer Dosis.",
|
||||
"quelle": "Fachinformation Bilastin / Compendium.ch, EMA EPAR Bilastin",
|
||||
},
|
||||
"cetirizin": {
|
||||
"wirkstoff": "Cetirizin",
|
||||
"indikation": "Antihistaminikum (H1-Blocker, 2. Generation). Allergische Rhinitis, chronische spontane Urtikaria.",
|
||||
"einnahme": "Unabhaengig von Mahlzeiten einnehmbar.",
|
||||
"dosierung": "Erwachsene: 10 mg 1x taeglich. Bei Niereninsuffizienz Dosisreduktion.",
|
||||
"wichtig": "Kann Muedigkeit verursachen. Dosisanpassung bei Niereninsuffizienz erforderlich.",
|
||||
"quelle": "Fachinformation Cetirizin / Compendium.ch",
|
||||
},
|
||||
"loratadin": {
|
||||
"wirkstoff": "Loratadin",
|
||||
"indikation": "Antihistaminikum (H1-Blocker, 2. Generation). Allergische Rhinitis, chronische Urtikaria.",
|
||||
"einnahme": "Unabhaengig von Mahlzeiten einnehmbar.",
|
||||
"dosierung": "Erwachsene und Kinder ab 12 Jahren: 10 mg 1x taeglich.",
|
||||
"wichtig": "Nicht sedierend in therapeutischer Dosis. Vorsicht bei schwerer Leberinsuffizienz.",
|
||||
"quelle": "Fachinformation Loratadin / Compendium.ch",
|
||||
},
|
||||
"desloratadin": {
|
||||
"wirkstoff": "Desloratadin",
|
||||
"indikation": "Antihistaminikum (H1-Blocker, 2. Generation). Aktiver Metabolit von Loratadin. Allergische Rhinitis, chronische Urtikaria.",
|
||||
"einnahme": "Unabhaengig von Mahlzeiten einnehmbar.",
|
||||
"dosierung": "Erwachsene: 5 mg 1x taeglich.",
|
||||
"wichtig": "Sehr geringe Sedierung. Dosisanpassung bei Nieren-/Leberinsuffizienz erwaegen.",
|
||||
"quelle": "Fachinformation Desloratadin (Aerius) / Compendium.ch",
|
||||
},
|
||||
"fexofenadin": {
|
||||
"wirkstoff": "Fexofenadin",
|
||||
"indikation": "Antihistaminikum (H1-Blocker, 2. Generation). Allergische Rhinitis, chronische Urtikaria.",
|
||||
"einnahme": "Nicht mit Fruchtsaft (Grapefruit, Orange, Apfel) einnehmen – reduziert Bioverfuegbarkeit. Vor einer Mahlzeit einnehmen.",
|
||||
"dosierung": "Allergische Rhinitis: 120 mg 1x taeglich. Urtikaria: 180 mg 1x taeglich.",
|
||||
"wichtig": "Nicht sedierend. Keine Dosisanpassung bei Leberinsuffizienz noetig.",
|
||||
"quelle": "Fachinformation Fexofenadin (Telfast) / Compendium.ch",
|
||||
},
|
||||
"ibuprofen": {
|
||||
"wirkstoff": "Ibuprofen",
|
||||
"indikation": "NSAR. Schmerzen, Entzuendungen, Fieber.",
|
||||
"einnahme": "Zu oder nach einer Mahlzeit einnehmen (Magenschutz). Mit ausreichend Fluessigkeit.",
|
||||
"dosierung": "200-400 mg alle 6-8 Stunden. Maximaldosis OTC: 1200 mg/Tag. Verschreibungspflichtig: bis 2400 mg/Tag.",
|
||||
"wichtig": "GI-Blutungsrisiko. KI bei schwerer Nieren-/Leberinsuffizienz, 3. Trimenon, aktiver GI-Blutung.",
|
||||
"quelle": "Fachinformation Ibuprofen / Compendium.ch",
|
||||
},
|
||||
"methotrexat": {
|
||||
"wirkstoff": "Methotrexat",
|
||||
"indikation": "Immunsuppressivum / Zytostatikum. Schwere Psoriasis, rheumatoide Arthritis, weitere Autoimmunerkrankungen.",
|
||||
"einnahme": "CAVE: 1x WOECHENTLICH, NICHT taeglich! Oral oder s.c. Folsaeure-Substitution (5 mg) 24-48 Stunden nach MTX-Gabe empfohlen.",
|
||||
"dosierung": "Dermatologie: 7.5-25 mg 1x pro Woche. Einschleichend beginnen.",
|
||||
"wichtig": "Verwechslungsgefahr taeglich/woechentlich ist lebensgefaehrlich! Regelmaessige Kontrolle von Blutbild, Leber- und Nierenwerten. Teratogen.",
|
||||
"quelle": "Fachinformation Methotrexat / Compendium.ch",
|
||||
},
|
||||
"dupilumab": {
|
||||
"wirkstoff": "Dupilumab",
|
||||
"indikation": "Monoklonaler Antikoerper (Anti-IL-4/IL-13). Mittelschwere bis schwere atopische Dermatitis, Asthma, CRSwNP.",
|
||||
"einnahme": "Subkutane Injektion. Injektionsstellen wechseln.",
|
||||
"dosierung": "Atopische Dermatitis Erwachsene: 600 mg Initialdosis (2x 300 mg), dann 300 mg alle 2 Wochen.",
|
||||
"wichtig": "Haeufigste NW: Reaktion an Injektionsstelle, Konjunktivitis. Keine Lebendimpfstoffe waehrend Therapie.",
|
||||
"quelle": "Fachinformation Dupixent (Dupilumab) / Compendium.ch, EMA EPAR",
|
||||
},
|
||||
"prednisolon": {
|
||||
"wirkstoff": "Prednisolon",
|
||||
"indikation": "Glukokortikoid. Entzuendliche und allergische Erkrankungen, Autoimmunerkrankungen.",
|
||||
"einnahme": "Morgens einnehmen (entsprechend zirkadianem Cortisol-Rhythmus). Mit oder nach einer Mahlzeit.",
|
||||
"dosierung": "Stark indikationsabhaengig. Stosstherapie, ausschleichend oder Erhaltungsdosis je nach Erkrankung.",
|
||||
"wichtig": "Ausschleichen bei Therapie ueber 2 Wochen. Osteoporose-Prophylaxe bei Langzeittherapie. Infektionsrisiko erhoht.",
|
||||
"quelle": "Fachinformation Prednisolon / Compendium.ch",
|
||||
},
|
||||
"aciclovir": {
|
||||
"wirkstoff": "Aciclovir",
|
||||
"indikation": "Virostatikum. Herpes-simplex-Infektionen, Varizella-Zoster-Infektionen.",
|
||||
"einnahme": "Mit reichlich Fluessigkeit einnehmen (Kristallurierisiko). Unabhaengig von Mahlzeiten.",
|
||||
"dosierung": "Herpes simplex: 5x 200 mg/Tag. Zoster: 5x 800 mg/Tag fuer 7 Tage.",
|
||||
"wichtig": "Ausreichende Hydratation wichtig. Dosisanpassung bei Niereninsuffizienz. Fruehzeitiger Therapiebeginn entscheidend.",
|
||||
"quelle": "Fachinformation Aciclovir (Zovirax) / Compendium.ch",
|
||||
},
|
||||
"isotretinoin": {
|
||||
"wirkstoff": "Isotretinoin",
|
||||
"indikation": "Retinoid. Schwere, therapieresistente Akne (nodulaer-zystisch).",
|
||||
"einnahme": "Mit einer fetthaltigen Mahlzeit einnehmen (verbessert Resorption erheblich).",
|
||||
"dosierung": "0.5-1.0 mg/kg/Tag, Gesamtkumulativdosis 120-150 mg/kg.",
|
||||
"wichtig": "STRENG TERATOGEN – Schwangerschaftsverhuetungsprogramm obligat. Trockene Haut/Lippen haeufig. Regelmaessige Laborkontrolle (Lipide, Leberwerte).",
|
||||
"quelle": "Fachinformation Isotretinoin (Roaccutan) / Compendium.ch",
|
||||
},
|
||||
"omeprazol": {
|
||||
"wirkstoff": "Omeprazol",
|
||||
"indikation": "Protonenpumpeninhibitor (PPI). Refluxoesophagitis, peptisches Ulkus, Magenschutz bei NSAR.",
|
||||
"einnahme": "Vor einer Mahlzeit einnehmen (idealerweise 30 Min. vor dem Fruehstueck). Kapsel ganz schlucken.",
|
||||
"dosierung": "Standarddosis: 20 mg 1x taeglich. Bei Ulkus oder Reflux: 20-40 mg/Tag.",
|
||||
"wichtig": "Langzeitanwendung: Magnesium-/B12-Mangel, Osteoporoserisiko erhoht. Interaktion mit Clopidogrel beachten.",
|
||||
"quelle": "Fachinformation Omeprazol (Antra) / Compendium.ch",
|
||||
},
|
||||
"pantoprazol": {
|
||||
"wirkstoff": "Pantoprazol",
|
||||
"indikation": "Protonenpumpeninhibitor (PPI). Refluxoesophagitis, peptisches Ulkus, Magenschutz bei NSAR.",
|
||||
"einnahme": "Vor einer Mahlzeit einnehmen (idealerweise morgens). Tablette ganz schlucken.",
|
||||
"dosierung": "Standarddosis: 20-40 mg 1x taeglich.",
|
||||
"wichtig": "Langzeitanwendung: Magnesium-/B12-Mangel moeglich. Geringeres CYP-Interaktionspotenzial als Omeprazol.",
|
||||
"quelle": "Fachinformation Pantoprazol (Pantozol) / Compendium.ch",
|
||||
},
|
||||
"amoxicillin": {
|
||||
"wirkstoff": "Amoxicillin",
|
||||
"indikation": "Aminopenicillin. Bakterielle Infektionen der Atemwege, HNO, Harnwege, Haut.",
|
||||
"einnahme": "Unabhaengig von Mahlzeiten einnehmbar.",
|
||||
"dosierung": "Erwachsene: 750-3000 mg/Tag, aufgeteilt in 2-3 Einzeldosen.",
|
||||
"wichtig": "Penicillinallergie beachten. Exanthem besonders bei EBV-Infektion.",
|
||||
"quelle": "Fachinformation Amoxicillin / Compendium.ch",
|
||||
},
|
||||
"doxycyclin": {
|
||||
"wirkstoff": "Doxycyclin",
|
||||
"indikation": "Tetracyclin-Antibiotikum. Akne, Rosacea, bakterielle Infektionen, Borreliose.",
|
||||
"einnahme": "Mit reichlich Fluessigkeit und aufrecht einnehmen (Oesophagus-Ulkusrisiko). Milchprodukte und Antazida meiden.",
|
||||
"dosierung": "200 mg am 1. Tag, dann 100 mg 1x taeglich.",
|
||||
"wichtig": "Photosensibilisierung – Sonnenschutz! Nicht bei Kindern unter 8 Jahren und in der Schwangerschaft.",
|
||||
"quelle": "Fachinformation Doxycyclin / Compendium.ch",
|
||||
},
|
||||
"secukinumab": {
|
||||
"wirkstoff": "Secukinumab",
|
||||
"indikation": "Monoklonaler Antikoerper (Anti-IL-17A). Mittelschwere bis schwere Plaque-Psoriasis, Psoriasis-Arthritis.",
|
||||
"einnahme": "Subkutane Injektion. Fertigpen oder Fertigspritze. Injektionsstellen wechseln.",
|
||||
"dosierung": "Psoriasis: 300 mg s.c. Woche 0, 1, 2, 3, 4, danach 300 mg alle 4 Wochen.",
|
||||
"wichtig": "TB-Screening vor Therapiebeginn. Haeufigste NW: Infektionen obere Atemwege. Vorsicht bei CED.",
|
||||
"quelle": "Fachinformation Cosentyx (Secukinumab) / Compendium.ch, EMA EPAR",
|
||||
},
|
||||
"adalimumab": {
|
||||
"wirkstoff": "Adalimumab",
|
||||
"indikation": "TNF-alpha-Inhibitor. Psoriasis, rheumatoide Arthritis, Morbus Crohn, Colitis ulcerosa, Hidradenitis suppurativa.",
|
||||
"einnahme": "Subkutane Injektion. Injektionsstellen wechseln.",
|
||||
"dosierung": "Psoriasis: 80 mg Initialdosis, dann 40 mg alle 2 Wochen ab Woche 1.",
|
||||
"wichtig": "TB-Screening vor Therapiebeginn obligat. Erhoehtes Infektionsrisiko. Keine Lebendimpfstoffe.",
|
||||
"quelle": "Fachinformation Humira (Adalimumab) / Compendium.ch, EMA EPAR",
|
||||
},
|
||||
"paracetamol": {
|
||||
"wirkstoff": "Paracetamol",
|
||||
"indikation": "Analgetikum / Antipyretikum. Leichte bis maessige Schmerzen, Fieber.",
|
||||
"einnahme": "Unabhaengig von Mahlzeiten einnehmbar.",
|
||||
"dosierung": "Erwachsene: 500-1000 mg alle 4-6 Stunden. Maximaldosis: 4000 mg/Tag (Lebergesunde).",
|
||||
"wichtig": "Hepatotoxisch bei Ueberdosierung! Bei Leberinsuffizienz/Alkoholabusus Maximaldosis reduzieren.",
|
||||
"quelle": "Fachinformation Paracetamol (Dafalgan) / Compendium.ch",
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def get_medication_facts(name: str) -> dict | None:
|
||||
"""Gibt kuratierte Medikamenten-Fakten zurueck oder None wenn nicht vorhanden."""
|
||||
if not name:
|
||||
return None
|
||||
low = name.strip().lower()
|
||||
if low in _MEDICATION_FACTS:
|
||||
return _MEDICATION_FACTS[low]
|
||||
for facts in _MEDICATION_FACTS.values():
|
||||
if facts["wirkstoff"].lower() == low:
|
||||
return facts
|
||||
is_valid, canonical = validate_medication_name(name)
|
||||
if is_valid and canonical:
|
||||
cl = canonical.lower()
|
||||
if cl in _MEDICATION_FACTS:
|
||||
return _MEDICATION_FACTS[cl]
|
||||
for facts in _MEDICATION_FACTS.values():
|
||||
if facts["wirkstoff"].lower() == cl:
|
||||
return facts
|
||||
return None
|
||||
82
AzA march 2026 - Kopie (18)/aza_medwork_messages.json
Normal file
@@ -0,0 +1,82 @@
|
||||
[
|
||||
{
|
||||
"id": "a9b5b2de-d7cd-40a9-8cae-41413c386522",
|
||||
"contact": "Dr. med. Muster (Innere Medizin)",
|
||||
"from": "Dr. med. Muster",
|
||||
"text": "Guten Tag, können Sie mir bitte den Befund zusenden?",
|
||||
"time": "09:15",
|
||||
"date": "2026-02-16",
|
||||
"read": true,
|
||||
"delivered": true
|
||||
},
|
||||
{
|
||||
"id": "71e735d7-0938-4666-b4e1-a3f01e951651",
|
||||
"contact": "Dr. med. Muster (Innere Medizin)",
|
||||
"from": "André M. Surovy",
|
||||
"text": "Ja, schicke ich Ihnen heute noch zu.",
|
||||
"time": "09:30",
|
||||
"date": "2026-02-16",
|
||||
"read": true,
|
||||
"delivered": true
|
||||
},
|
||||
{
|
||||
"id": "291c9937-6e97-4788-8f4e-28fc7b31a96b",
|
||||
"contact": "Dr. med. Muster (Innere Medizin)",
|
||||
"from": "Dr. med. Muster",
|
||||
"text": "Perfekt, vielen Dank!",
|
||||
"time": "09:32",
|
||||
"date": "2026-02-16",
|
||||
"read": true,
|
||||
"delivered": true
|
||||
},
|
||||
{
|
||||
"id": "5d440163-a49d-4e48-bf68-f3b80be1bd34",
|
||||
"contact": "Dr. med. Beispiel (Chirurgie)",
|
||||
"from": "Dr. med. Beispiel",
|
||||
"text": "Haben Sie nächste Woche Zeit für eine Besprechung?",
|
||||
"time": "14:20",
|
||||
"date": "2026-02-17",
|
||||
"read": true,
|
||||
"delivered": true
|
||||
},
|
||||
{
|
||||
"id": "4c164331-5654-498e-9816-fb1288462e28",
|
||||
"contact": "Dr. med. Test (Kardiologie)",
|
||||
"from": "André M. Surovy",
|
||||
"text": "Das EKG sieht unauffällig aus.",
|
||||
"time": "11:00",
|
||||
"date": "2026-02-17",
|
||||
"read": true,
|
||||
"delivered": true
|
||||
},
|
||||
{
|
||||
"id": "f0f47bbb-a4cc-4507-8d90-b593db65fc68",
|
||||
"contact": "Dr. med. Test (Kardiologie)",
|
||||
"from": "Dr. med. Test",
|
||||
"text": "Danke für die schnelle Rückmeldung!",
|
||||
"time": "11:15",
|
||||
"date": "2026-02-17",
|
||||
"read": true,
|
||||
"delivered": true
|
||||
},
|
||||
{
|
||||
"id": "b806c7bf-0578-4aab-9a84-db71ca10635a",
|
||||
"contact": "Dr. med. Muster (Innere Medizin)",
|
||||
"from": "André M. Surovy",
|
||||
"text": "test",
|
||||
"time": "23:23",
|
||||
"date": "2026-02-17",
|
||||
"read": true,
|
||||
"delivered": true
|
||||
},
|
||||
{
|
||||
"id": "374b589b-1343-4af8-a39c-61351d95c978",
|
||||
"contact": "Dr. med. Muster (Innere Medizin)",
|
||||
"from": "Dr. med. Muster",
|
||||
"text": "OK, wird erledigt.",
|
||||
"time": "23:23",
|
||||
"date": "2026-02-17",
|
||||
"read": true,
|
||||
"delivered": true
|
||||
}
|
||||
]
|
||||
315
AzA march 2026 - Kopie (18)/aza_monitoring.py
Normal file
@@ -0,0 +1,315 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
AZA MedWork – Monitoring & Integritaetspruefung.
|
||||
|
||||
Sammelt Health-Status, Metriken aus Audit-/Consent-Logs
|
||||
und fuehrt Integritaetschecks durch. Keine Patientendaten.
|
||||
|
||||
Nutzung:
|
||||
python aza_monitoring.py health -> Health-Checks
|
||||
python aza_monitoring.py metrics -> Metriken aus Logs
|
||||
python aza_monitoring.py integrity -> Integritaetspruefung
|
||||
python aza_monitoring.py all -> Alles zusammen
|
||||
python aza_monitoring.py nightly -> Nightly-Check (Integrity + Alert-Metriken)
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import urllib.request
|
||||
import ssl
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
|
||||
_BASE_DIR = Path(__file__).resolve().parent
|
||||
|
||||
|
||||
# =====================================================================
|
||||
# 1) HEALTH CHECKS
|
||||
# =====================================================================
|
||||
|
||||
_SERVICES = [
|
||||
{"name": "backend_main", "url": os.getenv("AZA_BACKEND_URL", "https://127.0.0.1:8000/health")},
|
||||
{"name": "transcribe_server", "url": os.getenv("AZA_TRANSCRIBE_URL", "https://127.0.0.1:8090/health")},
|
||||
{"name": "todo_server", "url": os.getenv("AZA_TODO_URL", "https://127.0.0.1:5111/health")},
|
||||
]
|
||||
|
||||
|
||||
def check_health(services=None) -> list:
|
||||
"""Prueft /health fuer alle konfigurierten Services."""
|
||||
if services is None:
|
||||
services = _SERVICES
|
||||
|
||||
ctx = ssl.create_default_context()
|
||||
ctx.check_hostname = False
|
||||
ctx.verify_mode = ssl.CERT_NONE
|
||||
|
||||
results = []
|
||||
for svc in services:
|
||||
entry = {"name": svc["name"], "url": svc["url"], "status": "FAIL", "detail": ""}
|
||||
try:
|
||||
req = urllib.request.Request(svc["url"], method="GET")
|
||||
resp = urllib.request.urlopen(req, timeout=5, context=ctx)
|
||||
data = json.loads(resp.read().decode("utf-8"))
|
||||
entry["status"] = "OK" if data.get("status") == "ok" else "WARN"
|
||||
entry["version"] = data.get("version", "?")
|
||||
entry["uptime_s"] = data.get("uptime_s", 0)
|
||||
entry["tls"] = data.get("tls", False)
|
||||
except Exception as e:
|
||||
entry["detail"] = str(e)[:120]
|
||||
results.append(entry)
|
||||
return results
|
||||
|
||||
|
||||
# =====================================================================
|
||||
# 2) MONITORING-METRIKEN
|
||||
# =====================================================================
|
||||
|
||||
def collect_metrics() -> dict:
|
||||
"""Sammelt Metriken aus Audit-Log und Backup-Status."""
|
||||
metrics = {
|
||||
"timestamp": datetime.now(timezone.utc).isoformat(),
|
||||
"audit_log": {},
|
||||
"consent_log": {},
|
||||
"backup": {},
|
||||
}
|
||||
|
||||
try:
|
||||
from aza_audit_log import get_log_stats
|
||||
metrics["audit_log"] = get_log_stats()
|
||||
except Exception as e:
|
||||
metrics["audit_log"] = {"error": str(e)[:100]}
|
||||
|
||||
try:
|
||||
from aza_consent import verify_chain_integrity
|
||||
ok, errs = verify_chain_integrity()
|
||||
consent_file = _BASE_DIR / "aza_consent_log.json"
|
||||
count = 0
|
||||
if consent_file.exists():
|
||||
try:
|
||||
with open(consent_file, "r", encoding="utf-8") as f:
|
||||
count = len(json.load(f))
|
||||
except Exception:
|
||||
pass
|
||||
metrics["consent_log"] = {
|
||||
"entries": count,
|
||||
"integrity": "PASS" if ok else "FAIL",
|
||||
}
|
||||
except Exception as e:
|
||||
metrics["consent_log"] = {"error": str(e)[:100]}
|
||||
|
||||
try:
|
||||
backup_dir = _BASE_DIR / "backups"
|
||||
if backup_dir.exists():
|
||||
zips = sorted(backup_dir.glob("aza_backup_*.zip"), key=lambda p: p.stat().st_mtime, reverse=True)
|
||||
metrics["backup"] = {
|
||||
"count": len(zips),
|
||||
"latest": zips[0].name if zips else None,
|
||||
"latest_time": datetime.fromtimestamp(zips[0].stat().st_mtime, tz=timezone.utc).isoformat() if zips else None,
|
||||
}
|
||||
else:
|
||||
metrics["backup"] = {"count": 0, "latest": None}
|
||||
except Exception as e:
|
||||
metrics["backup"] = {"error": str(e)[:100]}
|
||||
|
||||
return metrics
|
||||
|
||||
|
||||
def get_alert_metrics() -> list:
|
||||
"""Extrahiert sicherheitsrelevante Zaehler fuer Alerting."""
|
||||
alerts = []
|
||||
try:
|
||||
from aza_audit_log import get_log_stats
|
||||
stats = get_log_stats()
|
||||
events = stats.get("events", {})
|
||||
|
||||
login_fail = events.get("LOGIN_FAIL", 0)
|
||||
if login_fail > 0:
|
||||
alerts.append({"metric": "login_fail_count", "value": login_fail, "severity": "WARN" if login_fail < 10 else "HIGH"})
|
||||
|
||||
ai_blocked = events.get("AI_BLOCKED", 0)
|
||||
if ai_blocked > 0:
|
||||
alerts.append({"metric": "ai_blocked_count", "value": ai_blocked, "severity": "INFO"})
|
||||
|
||||
ai_calls = events.get("AI_CHAT", 0) + events.get("AI_TRANSCRIBE", 0)
|
||||
alerts.append({"metric": "ai_calls_total", "value": ai_calls, "severity": "INFO"})
|
||||
|
||||
twofa_fail = events.get("2FA_FAIL", 0)
|
||||
if twofa_fail > 0:
|
||||
alerts.append({"metric": "2fa_fail_count", "value": twofa_fail, "severity": "WARN" if twofa_fail < 5 else "HIGH"})
|
||||
|
||||
if stats.get("integrity") == "FAIL":
|
||||
alerts.append({"metric": "audit_log_integrity", "value": "FAIL", "severity": "CRITICAL"})
|
||||
|
||||
except Exception as e:
|
||||
alerts.append({"metric": "audit_log_read_error", "value": str(e)[:80], "severity": "HIGH"})
|
||||
|
||||
return alerts
|
||||
|
||||
|
||||
# =====================================================================
|
||||
# 3) INTEGRITAETS-CHECKS
|
||||
# =====================================================================
|
||||
|
||||
def check_integrity() -> dict:
|
||||
"""Prueft Audit-Log und Consent-Log Integritaet."""
|
||||
results = {"timestamp": datetime.now(timezone.utc).isoformat(), "audit_log": {}, "consent_log": {}}
|
||||
|
||||
try:
|
||||
from aza_audit_log import verify_integrity, verify_all_rotations, _LOG_FILE
|
||||
if _LOG_FILE.exists():
|
||||
ok_all, res_all = verify_all_rotations()
|
||||
results["audit_log"] = {
|
||||
"status": "PASS" if ok_all else "FAIL",
|
||||
"files": {k: {"ok": v["ok"], "errors": v["errors"]} for k, v in res_all.items()},
|
||||
}
|
||||
else:
|
||||
results["audit_log"] = {"status": "PASS", "note": "Keine Logdatei vorhanden"}
|
||||
except Exception as e:
|
||||
results["audit_log"] = {"status": "ERROR", "error": str(e)[:120]}
|
||||
|
||||
try:
|
||||
from aza_consent import verify_chain_integrity
|
||||
consent_file = _BASE_DIR / "aza_consent_log.json"
|
||||
if consent_file.exists():
|
||||
ok, errs = verify_chain_integrity()
|
||||
results["consent_log"] = {
|
||||
"status": "PASS" if ok else "FAIL",
|
||||
"errors": errs,
|
||||
}
|
||||
else:
|
||||
results["consent_log"] = {"status": "PASS", "note": "Keine Logdatei vorhanden"}
|
||||
except Exception as e:
|
||||
results["consent_log"] = {"status": "ERROR", "error": str(e)[:120]}
|
||||
|
||||
if results["audit_log"].get("status") == "FAIL" or results["consent_log"].get("status") == "FAIL":
|
||||
try:
|
||||
from aza_audit_log import log_event
|
||||
log_event("INTEGRITY_FAIL", source="monitoring",
|
||||
detail=f"audit={results['audit_log'].get('status')} consent={results['consent_log'].get('status')}")
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return results
|
||||
|
||||
|
||||
# =====================================================================
|
||||
# 4) NIGHTLY CHECK (alle Pruefungen + Ausgabe)
|
||||
# =====================================================================
|
||||
|
||||
def run_nightly() -> dict:
|
||||
"""Fuehrt alle naechtlichen Pruefungen durch."""
|
||||
report = {
|
||||
"timestamp": datetime.now(timezone.utc).isoformat(),
|
||||
"integrity": check_integrity(),
|
||||
"alerts": get_alert_metrics(),
|
||||
"metrics": collect_metrics(),
|
||||
}
|
||||
|
||||
all_ok = (
|
||||
report["integrity"]["audit_log"].get("status") in ("PASS", None)
|
||||
and report["integrity"]["consent_log"].get("status") in ("PASS", None)
|
||||
and not any(a.get("severity") in ("HIGH", "CRITICAL") for a in report["alerts"])
|
||||
)
|
||||
report["overall"] = "PASS" if all_ok else "ATTENTION"
|
||||
|
||||
return report
|
||||
|
||||
|
||||
# =====================================================================
|
||||
# CLI
|
||||
# =====================================================================
|
||||
|
||||
def _print_health(results):
|
||||
print(f"\n{'='*60}")
|
||||
print("HEALTH CHECKS")
|
||||
print(f"{'='*60}")
|
||||
for r in results:
|
||||
status = r["status"]
|
||||
line = f" {r['name']:25s} {status:4s}"
|
||||
if status == "OK":
|
||||
line += f" v{r.get('version','?')} uptime={r.get('uptime_s',0)}s tls={r.get('tls','?')}"
|
||||
else:
|
||||
line += f" {r.get('detail','')}"
|
||||
print(line)
|
||||
|
||||
|
||||
def _print_metrics(m):
|
||||
print(f"\n{'='*60}")
|
||||
print("METRIKEN")
|
||||
print(f"{'='*60}")
|
||||
al = m.get("audit_log", {})
|
||||
print(f" Audit-Log: {al.get('total_lines', '?')} Eintraege, "
|
||||
f"Integritaet={al.get('integrity','?')}, "
|
||||
f"Groesse={al.get('size_mb','?')} MB")
|
||||
for ev, cnt in sorted(al.get("events", {}).items()):
|
||||
print(f" {ev}: {cnt}")
|
||||
|
||||
cl = m.get("consent_log", {})
|
||||
print(f" Consent-Log: {cl.get('entries','?')} Eintraege, Integritaet={cl.get('integrity','?')}")
|
||||
|
||||
bk = m.get("backup", {})
|
||||
print(f" Backups: {bk.get('count','?')} vorhanden, letztes={bk.get('latest','keins')}")
|
||||
if bk.get("latest_time"):
|
||||
print(f" Zeitpunkt: {bk['latest_time']}")
|
||||
|
||||
|
||||
def _print_integrity(r):
|
||||
print(f"\n{'='*60}")
|
||||
print("INTEGRITAETS-CHECKS")
|
||||
print(f"{'='*60}")
|
||||
for name in ("audit_log", "consent_log"):
|
||||
info = r.get(name, {})
|
||||
status = info.get("status", "?")
|
||||
print(f" {name:15s} {status}")
|
||||
for e in info.get("errors", []):
|
||||
print(f" {e}")
|
||||
if info.get("note"):
|
||||
print(f" ({info['note']})")
|
||||
|
||||
|
||||
def _print_alerts(alerts):
|
||||
print(f"\n{'='*60}")
|
||||
print("SICHERHEITS-ALERTS")
|
||||
print(f"{'='*60}")
|
||||
if not alerts:
|
||||
print(" Keine Alerts.")
|
||||
for a in alerts:
|
||||
print(f" [{a['severity']:8s}] {a['metric']}: {a['value']}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
cmd = sys.argv[1] if len(sys.argv) > 1 else "all"
|
||||
|
||||
if cmd == "health":
|
||||
_print_health(check_health())
|
||||
elif cmd == "metrics":
|
||||
_print_metrics(collect_metrics())
|
||||
elif cmd == "integrity":
|
||||
r = check_integrity()
|
||||
_print_integrity(r)
|
||||
ok = all(r[k].get("status") in ("PASS", None) for k in ("audit_log", "consent_log"))
|
||||
sys.exit(0 if ok else 1)
|
||||
elif cmd == "alerts":
|
||||
_print_alerts(get_alert_metrics())
|
||||
elif cmd == "nightly":
|
||||
report = run_nightly()
|
||||
_print_integrity(report["integrity"])
|
||||
_print_alerts(report["alerts"])
|
||||
print(f"\n GESAMT: {report['overall']}")
|
||||
out = _BASE_DIR / f"monitoring_nightly_{datetime.now().strftime('%Y-%m-%d')}.json"
|
||||
with open(out, "w", encoding="utf-8") as f:
|
||||
json.dump(report, f, ensure_ascii=False, indent=2)
|
||||
print(f" Report: {out}")
|
||||
sys.exit(0 if report["overall"] == "PASS" else 1)
|
||||
elif cmd == "all":
|
||||
_print_health(check_health())
|
||||
m = collect_metrics()
|
||||
_print_metrics(m)
|
||||
r = check_integrity()
|
||||
_print_integrity(r)
|
||||
_print_alerts(get_alert_metrics())
|
||||
else:
|
||||
print("Nutzung: python aza_monitoring.py [health|metrics|integrity|alerts|nightly|all]")
|
||||
sys.exit(1)
|
||||
477
AzA march 2026 - Kopie (18)/aza_news_backend.py
Normal file
@@ -0,0 +1,477 @@
|
||||
"""Aggregator for open-access medical news and congress events."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import re
|
||||
import threading
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
import xml.etree.ElementTree as ET
|
||||
from dataclasses import asdict, dataclass, field
|
||||
from datetime import date, datetime, timedelta, timezone
|
||||
from email.utils import parsedate_to_datetime
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
|
||||
NEWS_CACHE_TTL_SECONDS = 30 * 60
|
||||
EVENTS_CACHE_TTL_SECONDS = 45 * 60
|
||||
EVENT_URL_CHECK_TTL_SECONDS = 12 * 60 * 60
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class NewsFilter:
|
||||
specialties: list[str]
|
||||
regions: list[str]
|
||||
language: str = "de"
|
||||
sort: str = "newest"
|
||||
limit: int = 30
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class EventFilter:
|
||||
specialties: list[str]
|
||||
regions: list[str]
|
||||
from_date: date
|
||||
to_date: date
|
||||
sort: str = "soonest"
|
||||
limit: int = 100
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class NewsItem:
|
||||
id: str
|
||||
source: str
|
||||
title: str
|
||||
url: str
|
||||
publishedAt: str
|
||||
tags: list[str]
|
||||
languageOriginal: str
|
||||
isOpenAccess: bool
|
||||
evidenceType: str
|
||||
summaryOriginal: str
|
||||
summaryTranslated: str | None = None
|
||||
regions: list[str] = field(default_factory=list)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class EventItem:
|
||||
id: str
|
||||
name: str
|
||||
startDate: str
|
||||
endDate: str
|
||||
city: str
|
||||
country: str
|
||||
regions: list[str]
|
||||
tags: list[str]
|
||||
url: str
|
||||
description: str
|
||||
type: str
|
||||
cmeFlag: bool = False
|
||||
organizer: str = ""
|
||||
source: str = ""
|
||||
icsUrl: str | None = None
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class FeedSource:
|
||||
name: str
|
||||
url: str
|
||||
regions: list[str]
|
||||
language: str
|
||||
default_tags: list[str]
|
||||
evidence_type: str
|
||||
|
||||
|
||||
NEWS_SOURCES: tuple[FeedSource, ...] = (
|
||||
FeedSource("WHO", "https://www.who.int/feeds/entity/news-room/releases/en/rss.xml", ["WORLD"], "en", ["public-health"], "official"),
|
||||
FeedSource("ECDC", "https://www.ecdc.europa.eu/en/rss.xml", ["EU"], "en", ["public-health"], "official"),
|
||||
FeedSource("CDC", "https://tools.cdc.gov/api/v2/resources/media/132608.rss", ["WORLD"], "en", ["public-health"], "official"),
|
||||
FeedSource("EMA", "https://www.ema.europa.eu/en/news-events/news/rss.xml", ["EU"], "en", ["drug-safety"], "official"),
|
||||
FeedSource("Swissmedic", "https://www.swissmedic.ch/swissmedic/en/home/news/rss-feed/_jcr_content/contentPar/rssfeed.rss", ["CH"], "en", ["drug-safety"], "official"),
|
||||
FeedSource("Cochrane", "https://www.cochrane.org/news/rss.xml", ["WORLD"], "en", ["evidence-based-medicine"], "review"),
|
||||
FeedSource("medRxiv", "https://connect.medrxiv.org/relate/feed/medrxiv.xml", ["WORLD"], "en", ["preprint", "dermatology"], "preprint"),
|
||||
FeedSource("bioRxiv", "https://connect.biorxiv.org/relate/feed/biorxiv.xml", ["WORLD"], "en", ["preprint", "research"], "preprint"),
|
||||
)
|
||||
|
||||
DEFAULT_SPECIALTY = "dermatology"
|
||||
DEFAULT_NEWS_REGIONS = ["CH", "EU"]
|
||||
DEFAULT_EVENT_REGIONS = ["CH", "EU"]
|
||||
|
||||
_news_cache_lock = threading.Lock()
|
||||
_events_cache_lock = threading.Lock()
|
||||
_event_url_cache_lock = threading.Lock()
|
||||
_news_cache: dict[str, Any] = {"payload": [], "expires_at": 0.0}
|
||||
_events_cache: dict[str, Any] = {"payload": [], "expires_at": 0.0, "seed_mtime": 0.0}
|
||||
_event_url_status_cache: dict[str, tuple[bool, float]] = {}
|
||||
|
||||
|
||||
def _now_ts() -> float:
|
||||
return datetime.now(timezone.utc).timestamp()
|
||||
|
||||
|
||||
def _clean_text(text: str) -> str:
|
||||
cleaned = re.sub(r"<[^>]+>", " ", text or "")
|
||||
cleaned = re.sub(r"\s+", " ", cleaned).strip()
|
||||
return cleaned
|
||||
|
||||
|
||||
def _safe_summary(text: str) -> str:
|
||||
s = _clean_text(text)
|
||||
if len(s) > 520:
|
||||
return s[:517].rstrip() + "..."
|
||||
return s
|
||||
|
||||
|
||||
def _parse_datetime(raw: str) -> datetime:
|
||||
if not raw:
|
||||
return datetime.now(timezone.utc)
|
||||
try:
|
||||
dt = parsedate_to_datetime(raw)
|
||||
return dt if dt.tzinfo else dt.replace(tzinfo=timezone.utc)
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
dt2 = datetime.fromisoformat(raw.replace("Z", "+00:00"))
|
||||
return dt2 if dt2.tzinfo else dt2.replace(tzinfo=timezone.utc)
|
||||
except Exception:
|
||||
return datetime.now(timezone.utc)
|
||||
|
||||
|
||||
def _read_url(url: str, timeout: int = 12) -> bytes:
|
||||
req = urllib.request.Request(url, headers={"User-Agent": "AZA-News-Aggregator/1.0"})
|
||||
with urllib.request.urlopen(req, timeout=timeout) as resp:
|
||||
return resp.read()
|
||||
|
||||
|
||||
def _is_live_event_url(url: str) -> bool:
|
||||
u = (url or "").strip()
|
||||
if not u:
|
||||
return False
|
||||
now = _now_ts()
|
||||
with _event_url_cache_lock:
|
||||
cached = _event_url_status_cache.get(u)
|
||||
if cached and cached[1] > now:
|
||||
return bool(cached[0])
|
||||
ok = False
|
||||
try:
|
||||
req = urllib.request.Request(
|
||||
u,
|
||||
headers={"User-Agent": "AZA-News-Aggregator/1.0"},
|
||||
method="HEAD",
|
||||
)
|
||||
with urllib.request.urlopen(req, timeout=10) as resp:
|
||||
ok = int(getattr(resp, "status", 200)) < 400
|
||||
except Exception:
|
||||
try:
|
||||
req = urllib.request.Request(
|
||||
u,
|
||||
headers={"User-Agent": "AZA-News-Aggregator/1.0"},
|
||||
method="GET",
|
||||
)
|
||||
with urllib.request.urlopen(req, timeout=12) as resp:
|
||||
ok = int(getattr(resp, "status", 200)) < 400
|
||||
except Exception:
|
||||
ok = False
|
||||
with _event_url_cache_lock:
|
||||
_event_url_status_cache[u] = (ok, now + EVENT_URL_CHECK_TTL_SECONDS)
|
||||
return ok
|
||||
|
||||
|
||||
def _rss_items(source: FeedSource, limit: int = 25) -> list[NewsItem]:
|
||||
try:
|
||||
payload = _read_url(source.url)
|
||||
root = ET.fromstring(payload)
|
||||
except Exception:
|
||||
return []
|
||||
|
||||
channel = root.find("channel")
|
||||
if channel is not None:
|
||||
entries = channel.findall("item")
|
||||
else:
|
||||
entries = root.findall(".//{http://www.w3.org/2005/Atom}entry")
|
||||
|
||||
items: list[NewsItem] = []
|
||||
for idx, node in enumerate(entries):
|
||||
if idx >= limit:
|
||||
break
|
||||
title = _clean_text((node.findtext("title") or node.findtext("{http://www.w3.org/2005/Atom}title") or "Ohne Titel"))
|
||||
link = _clean_text(node.findtext("link") or "")
|
||||
if not link:
|
||||
atom_link = node.find("{http://www.w3.org/2005/Atom}link")
|
||||
if atom_link is not None:
|
||||
link = _clean_text(atom_link.attrib.get("href") or source.url)
|
||||
summary = node.findtext("description") or node.findtext("{http://www.w3.org/2005/Atom}summary") or ""
|
||||
pub_raw = node.findtext("pubDate") or node.findtext("{http://www.w3.org/2005/Atom}updated") or ""
|
||||
published_at = _parse_datetime(pub_raw).isoformat()
|
||||
items.append(
|
||||
NewsItem(
|
||||
id=f"{source.name.lower()}-{abs(hash((title, link, published_at)))}",
|
||||
source=source.name,
|
||||
title=title or "Ohne Titel",
|
||||
url=link or source.url,
|
||||
publishedAt=published_at,
|
||||
tags=list(source.default_tags),
|
||||
languageOriginal=source.language,
|
||||
isOpenAccess=True,
|
||||
evidenceType=source.evidence_type,
|
||||
summaryOriginal=_safe_summary(summary) or "Kurz-Zusammenfassung in der Quelle nicht verfügbar.",
|
||||
regions=list(source.regions),
|
||||
)
|
||||
)
|
||||
return items
|
||||
|
||||
|
||||
def _pubmed_open_access_news(limit: int = 12) -> list[NewsItem]:
|
||||
term = '(dermatology[Title/Abstract]) AND ("open access"[Filter])'
|
||||
query = urllib.parse.urlencode(
|
||||
{
|
||||
"db": "pubmed",
|
||||
"retmode": "json",
|
||||
"retmax": str(limit),
|
||||
"sort": "pub+date",
|
||||
"term": term,
|
||||
}
|
||||
)
|
||||
search_url = f"https://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi?{query}"
|
||||
try:
|
||||
ids_payload = json.loads(_read_url(search_url).decode("utf-8", errors="ignore"))
|
||||
ids = ids_payload.get("esearchresult", {}).get("idlist", [])
|
||||
except Exception:
|
||||
return []
|
||||
if not ids:
|
||||
return []
|
||||
|
||||
summary_url = (
|
||||
"https://eutils.ncbi.nlm.nih.gov/entrez/eutils/esummary.fcgi?"
|
||||
+ urllib.parse.urlencode({"db": "pubmed", "retmode": "json", "id": ",".join(ids)})
|
||||
)
|
||||
try:
|
||||
summary_payload = json.loads(_read_url(summary_url).decode("utf-8", errors="ignore"))
|
||||
except Exception:
|
||||
return []
|
||||
|
||||
out: list[NewsItem] = []
|
||||
for pmid in ids:
|
||||
rec = summary_payload.get("result", {}).get(pmid) or {}
|
||||
title = _clean_text(str(rec.get("title") or ""))
|
||||
if not title:
|
||||
continue
|
||||
pubdate = _clean_text(str(rec.get("pubdate") or ""))
|
||||
dt = _parse_datetime(pubdate)
|
||||
url = f"https://pubmed.ncbi.nlm.nih.gov/{pmid}/"
|
||||
out.append(
|
||||
NewsItem(
|
||||
id=f"pubmed-{pmid}",
|
||||
source="PubMed",
|
||||
title=title,
|
||||
url=url,
|
||||
publishedAt=dt.isoformat(),
|
||||
tags=["dermatology", "open-access"],
|
||||
languageOriginal="en",
|
||||
isOpenAccess=True,
|
||||
evidenceType="peer-reviewed",
|
||||
summaryOriginal="Open-Access Eintrag aus PubMed. Volltext/Abstract je nach Journal frei verfügbar.",
|
||||
regions=["WORLD"],
|
||||
)
|
||||
)
|
||||
return out
|
||||
|
||||
|
||||
def _normalize_tokens(values: list[str] | None, default: list[str]) -> list[str]:
|
||||
out = [str(v).strip().lower() for v in (values or []) if str(v).strip()]
|
||||
return out or list(default)
|
||||
|
||||
|
||||
def _match_regions(item_regions: list[str], selected_regions: list[str]) -> bool:
|
||||
if not selected_regions:
|
||||
return True
|
||||
selected = {r.lower() for r in selected_regions}
|
||||
if "world" in selected or "worldwide" in selected:
|
||||
return True
|
||||
item_norm = {r.lower() for r in item_regions}
|
||||
# Strikter Regionsabgleich: EU zeigt nur EU/CH-Events, nicht WORLD/US/CA.
|
||||
if "eu" in selected and ("eu" in item_norm or "ch" in item_norm):
|
||||
return True
|
||||
if "ch" in selected and "ch" in item_norm:
|
||||
return True
|
||||
return bool(item_norm.intersection(selected))
|
||||
|
||||
|
||||
def _match_specialties(item_tags: list[str], selected_specialties: list[str]) -> bool:
|
||||
if "all" in selected_specialties:
|
||||
return True
|
||||
tags = {t.lower() for t in item_tags}
|
||||
selected = set(selected_specialties)
|
||||
if "dermatology" in selected:
|
||||
selected.update({"skin"})
|
||||
if "infectiology" in selected:
|
||||
selected.update({"public-health", "infectiology"})
|
||||
if "general-medicine" in selected:
|
||||
selected.update({"public-health", "evidence-based-medicine", "internal-medicine"})
|
||||
if "internal-medicine" in selected:
|
||||
selected.update({"general-medicine", "internal-medicine"})
|
||||
return bool(tags.intersection(selected))
|
||||
|
||||
|
||||
def _translate_summary_stub(summary: str, target_language: str, source_language: str) -> str | None:
|
||||
target = (target_language or "").strip().lower()
|
||||
source = (source_language or "").strip().lower()
|
||||
if not target or target in {"system", "auto", source}:
|
||||
return None
|
||||
return f"[Übersetzung nicht konfiguriert: {source}->{target}] {summary}"
|
||||
|
||||
|
||||
def get_news(filters: NewsFilter) -> list[NewsItem]:
|
||||
now = _now_ts()
|
||||
with _news_cache_lock:
|
||||
if float(_news_cache["expires_at"]) > now:
|
||||
all_items = list(_news_cache["payload"])
|
||||
else:
|
||||
fetched: list[NewsItem] = []
|
||||
for src in NEWS_SOURCES:
|
||||
fetched.extend(_rss_items(src, limit=24))
|
||||
fetched.extend(_pubmed_open_access_news(limit=16))
|
||||
fetched.sort(key=lambda x: x.publishedAt, reverse=True)
|
||||
_news_cache["payload"] = fetched
|
||||
_news_cache["expires_at"] = now + NEWS_CACHE_TTL_SECONDS
|
||||
all_items = fetched
|
||||
|
||||
specialties = _normalize_tokens(filters.specialties, [DEFAULT_SPECIALTY])
|
||||
regions = _normalize_tokens(filters.regions, DEFAULT_NEWS_REGIONS)
|
||||
filtered = [item for item in all_items if _match_specialties(item.tags, specialties) and _match_regions(item.regions, regions)]
|
||||
if filters.sort == "oldest":
|
||||
filtered.sort(key=lambda x: x.publishedAt)
|
||||
else:
|
||||
filtered.sort(key=lambda x: x.publishedAt, reverse=True)
|
||||
|
||||
out: list[NewsItem] = []
|
||||
for item in filtered[: max(1, min(filters.limit, 120))]:
|
||||
out.append(
|
||||
NewsItem(
|
||||
id=item.id,
|
||||
source=item.source,
|
||||
title=item.title,
|
||||
url=item.url,
|
||||
publishedAt=item.publishedAt,
|
||||
tags=item.tags,
|
||||
languageOriginal=item.languageOriginal,
|
||||
isOpenAccess=item.isOpenAccess,
|
||||
evidenceType=item.evidenceType,
|
||||
summaryOriginal=item.summaryOriginal,
|
||||
summaryTranslated=_translate_summary_stub(item.summaryOriginal, filters.language, item.languageOriginal),
|
||||
regions=item.regions,
|
||||
)
|
||||
)
|
||||
return out
|
||||
|
||||
|
||||
def _seed_events_path() -> Path:
|
||||
return Path(__file__).resolve().parent / "news_events_seed.json"
|
||||
|
||||
|
||||
def _load_seed_events() -> list[EventItem]:
|
||||
try:
|
||||
with open(_seed_events_path(), "r", encoding="utf-8") as f:
|
||||
payload = json.load(f)
|
||||
except Exception:
|
||||
return []
|
||||
rows = payload.get("events") if isinstance(payload, dict) else None
|
||||
if not isinstance(rows, list):
|
||||
return []
|
||||
out: list[EventItem] = []
|
||||
for row in rows:
|
||||
if not isinstance(row, dict):
|
||||
continue
|
||||
try:
|
||||
out.append(
|
||||
EventItem(
|
||||
id=str(row["id"]),
|
||||
name=str(row["name"]),
|
||||
startDate=str(row["startDate"]),
|
||||
endDate=str(row["endDate"]),
|
||||
city=str(row.get("city") or ""),
|
||||
country=str(row.get("country") or ""),
|
||||
regions=[str(r).upper() for r in row.get("regions", []) if str(r).strip()],
|
||||
tags=[str(t).lower() for t in row.get("tags", []) if str(t).strip()],
|
||||
url=str(row.get("url") or ""),
|
||||
description=str(row.get("description") or ""),
|
||||
type=str(row.get("type") or "kongress"),
|
||||
cmeFlag=bool(row.get("cmeFlag", False)),
|
||||
organizer=str(row.get("organizer") or ""),
|
||||
source=str(row.get("source") or ""),
|
||||
icsUrl=(str(row.get("icsUrl")).strip() if row.get("icsUrl") else None),
|
||||
)
|
||||
)
|
||||
except Exception:
|
||||
continue
|
||||
return out
|
||||
|
||||
|
||||
def get_events(filters: EventFilter) -> list[EventItem]:
|
||||
now = _now_ts()
|
||||
try:
|
||||
seed_mtime = _seed_events_path().stat().st_mtime
|
||||
except Exception:
|
||||
seed_mtime = 0.0
|
||||
with _events_cache_lock:
|
||||
cache_mtime = float(_events_cache.get("seed_mtime", 0.0))
|
||||
if float(_events_cache["expires_at"]) > now and cache_mtime == seed_mtime:
|
||||
source_items = list(_events_cache["payload"])
|
||||
else:
|
||||
source_items = _load_seed_events()
|
||||
_events_cache["payload"] = source_items
|
||||
_events_cache["expires_at"] = now + EVENTS_CACHE_TTL_SECONDS
|
||||
_events_cache["seed_mtime"] = seed_mtime
|
||||
|
||||
specialties = _normalize_tokens(filters.specialties, [DEFAULT_SPECIALTY])
|
||||
regions = _normalize_tokens(filters.regions, DEFAULT_EVENT_REGIONS)
|
||||
out: list[EventItem] = []
|
||||
for item in source_items:
|
||||
try:
|
||||
start = date.fromisoformat(item.startDate)
|
||||
end = date.fromisoformat(item.endDate)
|
||||
except Exception:
|
||||
continue
|
||||
if end < filters.from_date or start > filters.to_date:
|
||||
continue
|
||||
if not _match_specialties(item.tags, specialties):
|
||||
continue
|
||||
if not _match_regions(item.regions, regions):
|
||||
continue
|
||||
if not _is_live_event_url(item.url):
|
||||
continue
|
||||
out.append(item)
|
||||
|
||||
if filters.sort == "latest":
|
||||
out.sort(key=lambda x: x.startDate, reverse=True)
|
||||
else:
|
||||
out.sort(key=lambda x: x.startDate)
|
||||
return out[: max(1, min(filters.limit, 300))]
|
||||
|
||||
|
||||
# Backward-compatible wrappers used by backend_main.py
|
||||
def get_news_items(specialties: list[str] | None, lang: str = "de", region: str = "CH", limit: int = 30) -> list[dict[str, Any]]:
|
||||
region_values = [r.strip() for r in str(region or "CH").split(",") if r.strip()]
|
||||
rows = get_news(NewsFilter(specialties=specialties or [DEFAULT_SPECIALTY], regions=region_values, language=lang, limit=limit))
|
||||
return [asdict(x) for x in rows]
|
||||
|
||||
|
||||
def get_event_items(
|
||||
specialties: list[str] | None,
|
||||
regions: list[str] | None,
|
||||
from_date: date | None,
|
||||
to_date: date | None,
|
||||
limit: int = 100,
|
||||
) -> list[dict[str, Any]]:
|
||||
rows = get_events(
|
||||
EventFilter(
|
||||
specialties=specialties or [DEFAULT_SPECIALTY],
|
||||
regions=regions or list(DEFAULT_EVENT_REGIONS),
|
||||
from_date=from_date or date.today(),
|
||||
to_date=to_date or (date.today() + timedelta(days=396)),
|
||||
limit=limit,
|
||||
)
|
||||
)
|
||||
return [asdict(x) for x in rows]
|
||||
1046
AzA march 2026 - Kopie (18)/aza_notizen_mixin.py
Normal file
335
AzA march 2026 - Kopie (18)/aza_ordner_mixin.py
Normal file
@@ -0,0 +1,335 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
AzaOrdnerMixin – Ordner-Fenster (Ablage: KG, Briefe, Rezepte, KOGU, Diktat; Export/Import).
|
||||
"""
|
||||
|
||||
import os
|
||||
import tkinter as tk
|
||||
from tkinter import ttk, messagebox
|
||||
|
||||
from aza_persistence import (
|
||||
ensure_ablage_dirs,
|
||||
_ablage_base_path,
|
||||
load_ordner_geometry,
|
||||
save_ordner_geometry,
|
||||
list_ablage_files,
|
||||
get_ablage_content,
|
||||
save_to_ablage,
|
||||
count_entries_older_than,
|
||||
delete_entries_older_than,
|
||||
save_autotext,
|
||||
_clamp_geometry_str,
|
||||
)
|
||||
from aza_ui_helpers import center_window, add_resize_grip, add_font_scale_control, RoundedButton
|
||||
from aza_config import ABLAGE_SUBFOLDERS
|
||||
|
||||
|
||||
class AzaOrdnerMixin:
|
||||
"""Mixin für das Ordner-/Ablage-Fenster (Speichern, Laden, Export/Import)."""
|
||||
|
||||
def open_ordner_window(self):
|
||||
"""Fenster für Ablage: KG, Briefe, Rezepte, Kostengutsprachen in Unterordnern; Export/Import. Bleibt sichtbar bis es geschlossen wird."""
|
||||
ensure_ablage_dirs()
|
||||
base_path = _ablage_base_path()
|
||||
ORDNER_MIN_W, ORDNER_MIN_H = 750, 600
|
||||
win = tk.Toplevel(self)
|
||||
win.title("Ordner – Ablage & Export/Import")
|
||||
win.minsize(ORDNER_MIN_W, ORDNER_MIN_H)
|
||||
win.configure(bg="#B9ECFA")
|
||||
win.attributes("-topmost", True)
|
||||
if hasattr(self, "_aza_windows"):
|
||||
self._aza_windows.add(win)
|
||||
self._register_window(win)
|
||||
|
||||
# Fensterposition: gespeichert laden oder zentrieren
|
||||
saved_geom = load_ordner_geometry()
|
||||
if saved_geom:
|
||||
try:
|
||||
win.geometry(_clamp_geometry_str(saved_geom, ORDNER_MIN_W, ORDNER_MIN_H))
|
||||
except Exception:
|
||||
win.geometry("800x650")
|
||||
center_window(win, 800, 650)
|
||||
else:
|
||||
# Keine gespeicherte Position → zentrieren
|
||||
win.geometry("640x500")
|
||||
center_window(win, 640, 500)
|
||||
|
||||
def save_ordner_geom():
|
||||
try:
|
||||
save_ordner_geometry(win.geometry())
|
||||
except Exception:
|
||||
pass
|
||||
_ordner_geom_after_id = [None]
|
||||
def on_ordner_configure(e):
|
||||
if e.widget is win and _ordner_geom_after_id[0]:
|
||||
self.after_cancel(_ordner_geom_after_id[0])
|
||||
if e.widget is win:
|
||||
_ordner_geom_after_id[0] = self.after(400, save_ordner_geom)
|
||||
win.bind("<Configure>", on_ordner_configure)
|
||||
def on_ordner_close():
|
||||
try:
|
||||
save_ordner_geometry(win.geometry())
|
||||
except Exception:
|
||||
pass
|
||||
if hasattr(self, "_aza_windows"):
|
||||
self._aza_windows.discard(win)
|
||||
win.destroy()
|
||||
win.protocol("WM_DELETE_WINDOW", on_ordner_close)
|
||||
add_resize_grip(win, ORDNER_MIN_W, ORDNER_MIN_H)
|
||||
add_font_scale_control(win)
|
||||
main_f = ttk.Frame(win, padding=12)
|
||||
main_f.pack(fill="both", expand=True)
|
||||
ttk.Label(main_f, text=f"Ablage: {base_path}").pack(anchor="w")
|
||||
auto_delete_var = tk.BooleanVar(
|
||||
value=bool(getattr(self, "_autotext_data", {}).get("ablage_auto_delete_old", True))
|
||||
)
|
||||
cb_auto_delete = ttk.Checkbutton(
|
||||
main_f,
|
||||
text="Automatisch löschen nach 2 Wochen (mit Nachfrage)",
|
||||
variable=auto_delete_var,
|
||||
)
|
||||
cb_auto_delete.pack(anchor="w", pady=(6, 2))
|
||||
|
||||
def _persist_auto_delete_pref():
|
||||
try:
|
||||
if hasattr(self, "_autotext_data"):
|
||||
self._autotext_data["ablage_auto_delete_old"] = bool(auto_delete_var.get())
|
||||
save_autotext(self._autotext_data)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
cb_auto_delete.configure(command=_persist_auto_delete_pref)
|
||||
# ─── Tab-Leiste (blaues Design) ───
|
||||
_tab_style_active = {"bg": "#E8F4FA", "fg": "#1a4d6d", "font": ("Segoe UI", 10, "bold")}
|
||||
_tab_style_inactive = {"bg": "#A8D8E8", "fg": "#5A90B0", "font": ("Segoe UI", 10)}
|
||||
|
||||
tab_bar = tk.Frame(main_f, bg="#A8D8E8")
|
||||
tab_bar.pack(fill="x", pady=(8, 0))
|
||||
|
||||
_ordner_pages = {}
|
||||
_ordner_tab_btns = {}
|
||||
_active_ordner_tab = [None]
|
||||
|
||||
for cat in ABLAGE_SUBFOLDERS:
|
||||
page = tk.Frame(main_f, bg="#E8F4FA")
|
||||
_ordner_pages[cat] = page
|
||||
style = _tab_style_active if _active_ordner_tab[0] is None else _tab_style_inactive
|
||||
if _active_ordner_tab[0] is None:
|
||||
_active_ordner_tab[0] = cat
|
||||
btn = tk.Label(tab_bar, text=cat, cursor="hand2", padx=14, pady=5, **style)
|
||||
btn.pack(side="left")
|
||||
_ordner_tab_btns[cat] = btn
|
||||
|
||||
def _switch_ordner_tab(tab_name):
|
||||
_active_ordner_tab[0] = tab_name
|
||||
for key, btn in _ordner_tab_btns.items():
|
||||
btn.configure(**(_tab_style_active if key == tab_name else _tab_style_inactive))
|
||||
for key, page in _ordner_pages.items():
|
||||
page.pack_forget()
|
||||
_ordner_pages[tab_name].pack(fill="both", expand=True)
|
||||
|
||||
for cat in ABLAGE_SUBFOLDERS:
|
||||
_ordner_tab_btns[cat].bind("<Button-1>", lambda e, c=cat: _switch_ordner_tab(c))
|
||||
|
||||
_ordner_pages[_active_ordner_tab[0]].pack(fill="both", expand=True)
|
||||
|
||||
def refresh_list(listbox, category):
|
||||
listbox.delete(0, "end")
|
||||
for f in list_ablage_files(category):
|
||||
display_name = f[:-4] if (f and str(f).endswith(".txt")) else f
|
||||
listbox.insert("end", display_name)
|
||||
|
||||
def save_current(category):
|
||||
if category == "KG":
|
||||
content = self.txt_output.get("1.0", "end").strip()
|
||||
if not content:
|
||||
messagebox.showinfo("Hinweis", "Keine Krankengeschichte zum Speichern.")
|
||||
return
|
||||
elif category == "Briefe":
|
||||
content = self._last_brief_text
|
||||
if not content:
|
||||
messagebox.showinfo("Hinweis", "Zuerst einen Brief erstellen (Button Brief).")
|
||||
return
|
||||
elif category == "Rezepte":
|
||||
content = self._last_rezept_text
|
||||
if not content:
|
||||
messagebox.showinfo("Hinweis", "Zuerst ein Rezept erstellen (Button Rezept).")
|
||||
return
|
||||
elif category == "Kostengutsprachen":
|
||||
content = self._last_kogu_text
|
||||
if not content:
|
||||
messagebox.showinfo("Hinweis", "Zuerst eine Kostengutsprache erstellen (Button KOGU).")
|
||||
return
|
||||
elif category == "Diktat":
|
||||
content = self.txt_transcript.get("1.0", "end").strip()
|
||||
if not content:
|
||||
messagebox.showinfo("Hinweis", "Kein Transkript zum Speichern vorhanden.")
|
||||
return
|
||||
elif category == "Transkript":
|
||||
content = self.txt_transcript.get("1.0", "end").strip()
|
||||
if not content:
|
||||
messagebox.showinfo("Hinweis", "Kein Transkript zum Speichern vorhanden.")
|
||||
return
|
||||
else:
|
||||
return
|
||||
try:
|
||||
path = save_to_ablage(category, content)
|
||||
if path:
|
||||
messagebox.showinfo("Gespeichert", f"Gespeichert unter:\n{path}")
|
||||
for lb in listboxes:
|
||||
refresh_list(lb["listbox"], lb["category"])
|
||||
else:
|
||||
messagebox.showwarning("Hinweis", "Nichts gespeichert (Inhalt war leer).")
|
||||
except Exception as e:
|
||||
messagebox.showerror("Fehler", str(e))
|
||||
|
||||
def load_file_into_app(category, filename):
|
||||
content = get_ablage_content(category, filename)
|
||||
if not content:
|
||||
messagebox.showinfo("Hinweis", "Datei ist leer oder nicht gefunden.")
|
||||
return
|
||||
# Immer in neuem Fenster öffnen; Ordner-Fenster bleibt offen
|
||||
if category == "KG":
|
||||
self._show_text_window("KG (geladen)", content, buttons="kg")
|
||||
self.set_status("KG in neuem Fenster geöffnet.")
|
||||
elif category == "Briefe":
|
||||
self._last_brief_text = content
|
||||
self._show_text_window("Brief (geladen)", content, buttons="brief")
|
||||
self.set_status("Brief in neuem Fenster geöffnet.")
|
||||
elif category == "Rezepte":
|
||||
self._last_rezept_text = content
|
||||
self._show_text_window("Rezept (geladen)", content, buttons="rezept")
|
||||
self.set_status("Rezept in neuem Fenster geöffnet.")
|
||||
elif category == "Kostengutsprachen":
|
||||
self._last_kogu_text = content
|
||||
self._show_text_window("KOGU (geladen)", content, buttons="kogu")
|
||||
self.set_status("KOGU in neuem Fenster geöffnet.")
|
||||
elif category == "Diktat":
|
||||
self._show_text_window("Diktat (geladen)", content, buttons=None)
|
||||
self.set_status("Diktat in neuem Fenster geöffnet.")
|
||||
elif category == "Transkript":
|
||||
self._show_text_window("Transkript (geladen)", content, buttons=None)
|
||||
self.set_status("Transkript in neuem Fenster geöffnet.")
|
||||
|
||||
listboxes = []
|
||||
for cat in ABLAGE_SUBFOLDERS:
|
||||
page = _ordner_pages[cat]
|
||||
frame = tk.Frame(page, bg="#E8F4FA", padx=8, pady=4)
|
||||
frame.pack(fill="both", expand=True)
|
||||
tk.Label(frame, text="Aktuelles als neue Datei speichern (Nummer + Datum/Uhrzeit):",
|
||||
font=("Segoe UI", 9), bg="#E8F4FA", fg="#1a4d6d").pack(anchor="w")
|
||||
btn_row = tk.Frame(frame, bg="#E8F4FA")
|
||||
btn_row.pack(fill="x", pady=(0, 4))
|
||||
RoundedButton(
|
||||
btn_row, "Aktuelles speichern", command=lambda c=cat: save_current(c),
|
||||
width=140, height=26, canvas_bg="#E8F4FA",
|
||||
).pack(side="left")
|
||||
tk.Label(frame, text="Gespeicherte Dateien:",
|
||||
font=("Segoe UI", 9), bg="#E8F4FA", fg="#1a4d6d").pack(anchor="w", pady=(4, 0))
|
||||
lb = tk.Listbox(frame, height=10, font=("Segoe UI", 10))
|
||||
lb.pack(fill="both", expand=True, pady=(2, 4))
|
||||
refresh_list(lb, cat)
|
||||
listboxes.append({"listbox": lb, "category": cat})
|
||||
|
||||
def on_select(evt, category=cat, listbox=lb):
|
||||
sel = listbox.curselection()
|
||||
if not sel:
|
||||
return
|
||||
idx = sel[0]
|
||||
files = list_ablage_files(category)
|
||||
if 0 <= idx < len(files):
|
||||
load_file_into_app(category, files[idx])
|
||||
|
||||
lb.bind("<Double-Button-1>", on_select)
|
||||
def load_selected(lbx=lb, c=cat):
|
||||
sel = lbx.curselection()
|
||||
if not sel:
|
||||
messagebox.showinfo("Hinweis", "Bitte eine Datei auswählen.")
|
||||
return
|
||||
files = list_ablage_files(c)
|
||||
if 0 <= sel[0] < len(files):
|
||||
load_file_into_app(c, files[sel[0]])
|
||||
|
||||
RoundedButton(
|
||||
frame, "Ausgewählte Datei in App laden", command=load_selected,
|
||||
width=220, height=26, canvas_bg="#E8F4FA",
|
||||
).pack(fill="x", pady=(0, 4))
|
||||
|
||||
def _maybe_cleanup_old_entries():
|
||||
if not bool(auto_delete_var.get()):
|
||||
return
|
||||
total_old = 0
|
||||
for cat in ABLAGE_SUBFOLDERS:
|
||||
try:
|
||||
total_old += int(count_entries_older_than(cat, days=14))
|
||||
except Exception:
|
||||
pass
|
||||
if total_old <= 0:
|
||||
return
|
||||
if not messagebox.askyesno(
|
||||
"Auto-Löschen",
|
||||
"Sollen Ablage-Dateien älter als 2 Wochen jetzt gelöscht werden?"
|
||||
):
|
||||
return
|
||||
total_deleted = 0
|
||||
for cat in ABLAGE_SUBFOLDERS:
|
||||
try:
|
||||
total_deleted += int(delete_entries_older_than(cat, days=14))
|
||||
except Exception:
|
||||
pass
|
||||
for lb in listboxes:
|
||||
refresh_list(lb["listbox"], lb["category"])
|
||||
if total_deleted > 0:
|
||||
messagebox.showinfo("Auto-Löschen", f"{total_deleted} alte Einträge wurden gelöscht.")
|
||||
else:
|
||||
messagebox.showinfo("Auto-Löschen", "Keine Einträge älter als 2 Wochen gefunden.")
|
||||
|
||||
# Nachfrage beim Öffnen des Fensters (nur wenn Checkbox aktiv).
|
||||
win.after(250, _maybe_cleanup_old_entries)
|
||||
|
||||
btn_bottom = ttk.Frame(main_f)
|
||||
btn_bottom.pack(fill="x", pady=(8, 0))
|
||||
|
||||
def do_export():
|
||||
from tkinter import filedialog
|
||||
import zipfile
|
||||
dest = filedialog.asksaveasfilename(
|
||||
title="Ablage exportieren (ZIP)",
|
||||
defaultextension=".zip",
|
||||
filetypes=[("ZIP", "*.zip"), ("Alle", "*.*")],
|
||||
)
|
||||
if not dest:
|
||||
return
|
||||
try:
|
||||
with zipfile.ZipFile(dest, "w", zipfile.ZIP_DEFLATED) as zf:
|
||||
for root, dirs, files in os.walk(base_path):
|
||||
for f in files:
|
||||
if not f.endswith(".txt"):
|
||||
continue
|
||||
path = os.path.join(root, f)
|
||||
arcname = os.path.relpath(path, base_path)
|
||||
zf.write(path, arcname)
|
||||
messagebox.showinfo("Export", f"Exportiert nach:\n{dest}")
|
||||
except Exception as e:
|
||||
messagebox.showerror("Export fehlgeschlagen", str(e))
|
||||
|
||||
def do_import():
|
||||
from tkinter import filedialog
|
||||
import zipfile
|
||||
src = filedialog.askopenfilename(
|
||||
title="ZIP importieren (Inhalt in Ablage entpacken)",
|
||||
filetypes=[("ZIP", "*.zip"), ("Alle", "*.*")],
|
||||
)
|
||||
if not src:
|
||||
return
|
||||
try:
|
||||
with zipfile.ZipFile(src, "r") as zf:
|
||||
zf.extractall(base_path)
|
||||
messagebox.showinfo("Import", "Import abgeschlossen.")
|
||||
for lb in listboxes:
|
||||
refresh_list(lb["listbox"], lb["category"])
|
||||
except Exception as e:
|
||||
messagebox.showerror("Import fehlgeschlagen", str(e))
|
||||
|
||||
RoundedButton(btn_bottom, "Export (ZIP)", command=do_export, width=120, height=26, canvas_bg="#B9ECFA").pack(side="left", padx=(0, 8))
|
||||
RoundedButton(btn_bottom, "Import (ZIP)", command=do_import, width=120, height=26, canvas_bg="#B9ECFA").pack(side="left")
|
||||
2742
AzA march 2026 - Kopie (18)/aza_persistence.py
Normal file
570
AzA march 2026 - Kopie (18)/aza_prompts.py
Normal file
@@ -0,0 +1,570 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Prompt-Strings für KG-Diktat Desktop (KI-Aufrufe).
|
||||
"""
|
||||
|
||||
SYSTEM_PROMPT = """
|
||||
Du bist ein ärztlicher Dokumentationsassistent (Deutsch).
|
||||
|
||||
AUFGABE:
|
||||
Erstelle aus dem Transkript automatisch eine strukturierte SOAP-Krankengeschichte.
|
||||
|
||||
WICHTIGE REGELN (strikt einhalten):
|
||||
• SCHWEIZER RECHTSCHREIBUNG: Verwende NIEMALS das Zeichen «ß» (Eszett). Ersetze es IMMER durch «ss» (z. B. «Strasse» statt «Straße», «gross» statt «groß», «Fuss» statt «Fuß», «weiss» statt «weiß», «Verschluss» statt «Verschluß»). Das gilt für ALLE Wörter im gesamten Text.
|
||||
• UMLAUTE: Verwende IMMER korrekte deutsche Umlaute: ä, ö, ü, Ä, Ö, Ü (NIEMALS ae, oe, ue, AE, OE, UE als Ersatz).
|
||||
• Schreibe NUR Abschnitte, zu denen im Transkript tatsächlich Informationen vorhanden sind.
|
||||
• Wenn zu einem SOAP-Teil KEINE Information diktiert wurde:
|
||||
→ Abschnitt vollständig WEGLASSEN
|
||||
→ KEIN Platzhalter, KEIN Kommentar, KEIN Hinweis wie "nicht erwähnt"
|
||||
• Keine Fakten erfinden.
|
||||
• Keine Meta-Kommentare.
|
||||
• Medikamente/Therapie nur als Vorschläge (ärztlich zu prüfen).
|
||||
• KEINE Warnungen oder Hinweise im Fließtext einbauen (z. B. "Verordnung/Beginn dokumentiert", "ärztliche Weiterverordnung und Überprüfung erforderlich"). Nur die klinische Aussage formulieren, ohne Klammer-Hinweise.
|
||||
|
||||
FORMATIERUNG (ZWINGEND einhalten):
|
||||
• Verwende als Aufzählungszeichen IMMER einen Punkt (•), NIEMALS einen Strich/Bindestrich (-).
|
||||
• JEDER Aufzählungspunkt (•) wird mit 3 Leerzeichen eingerückt: " • Text hier".
|
||||
• Überschriften (Subjektiv, Objektiv, Diagnose etc.) stehen OHNE Einrückung am Zeilenanfang.
|
||||
• Nach JEDER Abschnittsüberschrift folgt KEINE Leerzeile – die Aufzählungspunkte beginnen DIREKT in der nächsten Zeile.
|
||||
• Zwischen den Aufzählungspunkten (•) innerhalb eines Abschnitts KEINE Leerzeile – die Punkte folgen direkt untereinander.
|
||||
• Zwischen dem letzten Punkt eines Abschnitts und der NÄCHSTEN Überschrift EINE LEERZEILE (Abstand zwischen Abschnitten).
|
||||
• Überschriften stehen OHNE Doppelpunkt dahinter (z.B. "Subjektiv" statt "Subjektiv:").
|
||||
|
||||
FORMAT (nur vorhandene Abschnitte ausgeben):
|
||||
|
||||
Anamnese (nur wenn im Transkript Vorgeschichte, Krankheitsverlauf oder Hintergrund erwähnt wird)
|
||||
• Vorgeschichte, bekannte Erkrankungen, bisherige Behandlungen, Krankheitsverlauf, Grund der Vorstellung.
|
||||
|
||||
Subjektiv
|
||||
• Aktuelle Beschwerden, Symptome, Schmerzen – was der Patient aktuell berichtet (nur wenn vorhanden).
|
||||
|
||||
Sozialanamnese (nur wenn im Transkript erwähnt)
|
||||
• Beruf, Arbeitssituation, Arbeitsbelastung, Arbeitsunfähigkeit
|
||||
• Wohnsituation, Familienstand, Kinder, Betreuungspflichten
|
||||
• Stressfaktoren, psychosoziale Belastungen, Konflikte
|
||||
• Freizeitverhalten, sportliche Aktivität, Hobbys
|
||||
• Ferien, Erholung, Reisen
|
||||
• Suchtmittelkonsum (Rauchen, Alkohol, Drogen) falls erwähnt
|
||||
• Soziale Unterstützung, soziales Netz
|
||||
• Relevante Lebensumstände und -veränderungen
|
||||
• Alle im Transkript erwähnten sozialen Aspekte ausführlich dokumentieren, in vollständigen Sätzen oder ausführlichen Stichpunkten.
|
||||
|
||||
Familienanamnese (nur wenn im Transkript etwas dazu gesagt wird)
|
||||
• Relevante Angaben zur Familie (Erkrankungen, Risiken). Abschnitt weglassen, wenn nichts diktiert wurde.
|
||||
|
||||
Objektiv
|
||||
• NUR Beschreibung der Problematik/Befunde: was wurde beobachtet, untersucht, gesehen (z. B. Hautbefund, Vitalzeichen, körperlicher Status). KEINE Diagnosen hier auflisten – Objektiv ist die Beschreibung, Diagnose die Benennung der Erkrankung.
|
||||
|
||||
Beurteilung (nur wenn im Transkript eine klinische Einschätzung oder Bewertung geäussert wird)
|
||||
• Klinische Einschätzung, Zusammenfassung der Befunde, Bewertung. Abschnitt weglassen, wenn keine explizite Beurteilung diktiert wurde.
|
||||
|
||||
Diagnose
|
||||
• PFLICHT: Jede Diagnosezeile im Abschnitt „Diagnose" MUSS mit schweizerischem ICD-10-GM-Code in eckigen Klammern enden. Format strikt: „• Bezeichnung [ICD-Code]" – z. B. „• Aktinische Keratosen [L57.0]" oder „• Myalgie [M79.1]". Keine Diagnose ohne [Code] ausgeben.
|
||||
• Die eigentlichen Diagnosen: zuerst Bezeichnung, dann Leerzeichen, dann den passenden ICD-10-GM-Code (Schweiz) in eckigen Klammern. Nicht dieselbe Aufzählung wie unter Objektiv – Objektiv = Befundbeschreibung, Diagnose = Benennung der Erkrankungen mit Code.
|
||||
• ZUSAMMENFASSEN wo sinnvoll: Gleichartige Diagnosen mit verschiedenen Lokalisationen in EINER Zeile zusammenfassen statt einzeln aufzulisten. Z. B. „• Aktinische Keratosen an Stirn, Wangen und Handrücken [L57.0]" statt drei separate Zeilen. Nur dann aufsplitten, wenn medizinisch verschiedene Entitäten vorliegen.
|
||||
• KONSERVATIV KODIEREN – KEINE Eskalation: Nur kodieren, was tatsächlich gesagt bzw. medizinisch klar ist. Ein atypischer Nävus bleibt ein atypischer Nävus [D22.9] – NIEMALS als Melanom oder Malignom interpretieren. Haarausfall oder dünnes Haar ist KEINE Alopezie, solange nicht explizit diagnostiziert. „Stress im Beruf" ist KEINE psychische Diagnose. Im Zweifel die mildere/neutralere Diagnose wählen. Lieber zu wenig als zu viel kodieren.
|
||||
• JAHRESZAHLEN BEI DIAGNOSEN: Wenn der Patient eine Jahreszahl oder einen Zeitbezug nennt (z. B. „letztes Jahr Hirnblutung", „2022 Basaliom operiert", „vor drei Jahren Herzinfarkt"), dann MUSS die Jahreszahl in der Diagnosenliste erscheinen, z. B. „• Status nach Basaliom im Gesichtsbereich (2022) [C44.3]" oder „• Status nach intrazerebraler Blutung (2025) [I61.9]". Bei „letztes Jahr" das konkrete Jahr berechnen.
|
||||
• KLARE ENTSCHEIDUNGEN: Wenn im Gespräch eine therapeutische Entscheidung gefällt wurde (z. B. „wir machen die Exzision", „wir entscheiden uns für die Operation"), dann dies als klare Entscheidung formulieren – NICHT als offene Überlegung. Z. B. „Exzision der Läsion geplant" statt „man überlegt eine Exzision oder Beobachtung".
|
||||
• SORTIERUNG nach klinischer Relevanz (wichtigste zuerst): Malignome/Karzinome → Präkanzerosen/Krebsvorstufen (z. B. aktinische Keratosen, Dysplasien, Morbus Bowen) → chronische/schwere Erkrankungen (z. B. Diabetes, Autoimmunerkrankungen, Psoriasis) → akute behandlungsbedürftige Erkrankungen → leichtere/banale/kosmetische Diagnosen (z. B. Onychomykose, seborrhoische Keratosen, Naevi, Xerosis). Die klinisch relevanteste Diagnose steht immer ganz oben.
|
||||
|
||||
Therapie
|
||||
• Durchgeführte Behandlungen, verordnete Medikamente, angewandte Verfahren.
|
||||
|
||||
Procedere
|
||||
• Weiteres Vorgehen, geplante Eingriffe, Kontrolltermine, Überweisungen, Empfehlungen.
|
||||
""".strip()
|
||||
|
||||
LETTER_PROMPT = """
|
||||
Du bist ärztliche Dokumentationsassistenz in einem Schweizer Praxisnetz.
|
||||
|
||||
Erstelle einen ärztlichen Brief anhand der bereitgestellten Informationen.
|
||||
|
||||
FORMATIERUNG (ZWINGEND):
|
||||
• Verwende als Aufzählungszeichen IMMER einen Punkt (•), NIEMALS einen Strich (-).
|
||||
• Nach jeder Abschnittsüberschrift folgt eine LEERZEILE, dann der Inhalt.
|
||||
• Zwischen den einzelnen Aufzählungspunkten (•) KEINE Leerzeile – Aufzählungen direkt untereinander schreiben.
|
||||
• Überschriften OHNE Doppelpunkt.
|
||||
• Aufzählungen 3 Leerzeichen einrücken.
|
||||
|
||||
SCHWEIZER RECHTSCHREIBUNG (ZWINGEND):
|
||||
• Verwende NIEMALS das Zeichen «ß» (Eszett). Ersetze es IMMER durch «ss» (z. B. «Strasse», «gross», «Fuss», «weiss», «Verschluss»). Das gilt für ALLE Wörter im gesamten Text.
|
||||
|
||||
STILREGELN FÜR ABSCHNITTE:
|
||||
• Diagnosen: IMMER stichwortartig als Aufzählung. Jede Diagnose eine Zeile mit ICD-10-GM-Code in eckigen Klammern, Format „ • Bezeichnung [Code]".
|
||||
• Diagnosen nach klinischer Relevanz sortieren (wichtigste zuerst): Malignome → Präkanzerosen → schwere/chronische Erkrankungen → akute Erkrankungen → leichte/banale Diagnosen.
|
||||
• KONSISTENTE REIHENFOLGE ÜBER ALLE ABSCHNITTE: Die Reihenfolge der Diagnosen gibt die Reihenfolge für ALLE anderen Abschnitte vor. Wenn z. B. „Spinaliom" als erste Diagnose steht, dann MUSS auch im Befund, in der Beurteilung, in der Therapie etc. das Spinaliom ZUERST beschrieben werden, danach die nächste Diagnose usw. Jeder Abschnitt folgt derselben klinischen Relevanz-Sortierung wie die Diagnoseliste.
|
||||
• Anamnese, Beurteilung, Epikrise, Zusammenfassung, Verlauf: In vollständigen, ausformulierten Sätzen schreiben – wie in einem ärztlichen Brief üblich. Keine reinen Stichpunkte.
|
||||
• Befunde: Stichpunkte oder kurze Sätze erlaubt. Befunde klinisch beschreiben (z. B. „Krusten auf erythematösem Grund" statt nur „aktinische Keratosen").
|
||||
• Therapie, Procedere, Empfehlungen, Medikation: Stichpunkte/Aufzählung erlaubt.
|
||||
|
||||
STANDARD-STRUKTUR (falls keine benutzerdefinierte Reihenfolge vorgegeben):
|
||||
|
||||
Diagnosen
|
||||
|
||||
• Hauptdiagnose [ICD-Code]
|
||||
• Nebendiagnosen [ICD-Code]
|
||||
|
||||
Anamnese
|
||||
|
||||
Aktuelle Beschwerden und relevante Vorgeschichte in ganzen Sätzen.
|
||||
|
||||
Befunde
|
||||
|
||||
Status, Labor, Bildgebung.
|
||||
|
||||
Beurteilung
|
||||
|
||||
Klinische Einordnung und Bewertung in ganzen Sätzen. Dies ist der wichtigste Teil des Briefes.
|
||||
|
||||
Therapie / Procedere
|
||||
|
||||
Durchgeführte und empfohlene Massnahmen.
|
||||
|
||||
Regeln:
|
||||
• KEINE Briefkopfzeile (kein Adressblock).
|
||||
• Nutze bevorzugt die Angaben aus der Krankengeschichte (falls vorhanden). Fehlen dort Abschnitte, ergänze anhand des Transkripts.
|
||||
• Diagnoseliste MUSS alle genannten Diagnosen mit ICD-10-GM-Code in eckigen Klammern enthalten. Keine neuen Diagnosen erfinden.
|
||||
• Verwende KEINE Sternchen (*), keine Sonderzeichen zur Hervorhebung, kein Markdown.
|
||||
• Keine persönlichen Anreden, keine Meta-Kommentare.
|
||||
• Sprache: Deutsch, sachlich.
|
||||
""".strip()
|
||||
|
||||
KISIM_BRIEF_PROFILE_PROMPT = """
|
||||
BRIEFSTRUKTUR-PROFIL: KISIM Bericht
|
||||
|
||||
Du erstellst einen KISIM-konformen Arztbericht. Halte dich STRIKT an Struktur und Reihenfolge.
|
||||
|
||||
ZIELCHARAKTER:
|
||||
Klassischer, strukturierter klinischer Verlaufs-/Fachbericht. Eher flüssig und gut lesbar.
|
||||
Pro Diagnose kurze Zusatzpunkte möglich, später zusammenhängende Gesamtbeurteilung.
|
||||
Gut geeignet für Zuweiser, Praxis und strukturierte Kommunikation.
|
||||
|
||||
EINLEITUNG:
|
||||
„Wir berichten Ihnen über die Untersuchung vom {datum} (vidit {arztname})."
|
||||
|
||||
ABSCHNITTE (in genau dieser Reihenfolge, nur ausfüllen wenn Informationen vorhanden):
|
||||
|
||||
1. Diagnose
|
||||
• Nummeriert (1., 2., 3. ...)
|
||||
• Jede Hauptdiagnose als eigener nummerierter Punkt
|
||||
• Darunter kurze Unterpunkte mit Bindestrich NUR wenn sinnvoll:
|
||||
- Klinik / Befund kurz
|
||||
- Therapie / Planung kurz
|
||||
- Relevante Zusatzinfos
|
||||
• Diagnosen eher kompakt, aber nicht nur Stichwort
|
||||
• ICD-10-GM-Code in eckigen Klammern NUR wenn ausreichend sicher/passend
|
||||
• Nicht unnötig aufblähen
|
||||
• Sortierung nach klinischer Relevanz (wichtigste zuerst)
|
||||
|
||||
Beispiel KISIM-Diagnoseblock:
|
||||
1. Aktinische Keratose [L57.0]
|
||||
- Erythematös-squamöse Hautveränderungen im Bereich der Stirn
|
||||
- Therapie: Kryotherapie
|
||||
|
||||
2. Basalzellkarzinom Nase [C44.3]
|
||||
- 4 mm grosses Basalzellkarzinom im Bereich der Nase
|
||||
- Geplante operative Entfernung mittels Lappenplastik
|
||||
|
||||
WICHTIG: Kurze Therapiehinweise können pro Diagnose stehen, die Gesamttherapie wird
|
||||
im Therapie-Abschnitt nochmals zusammengeführt.
|
||||
|
||||
2. Allergien
|
||||
• Immer eigener Abschnitt
|
||||
• Wenn nichts bekannt: „Keine bekannt." oder sehr kurze Formulierung
|
||||
• Keine unnötige Erfindung
|
||||
|
||||
3. Therapie
|
||||
• Zusammenfassender Therapieblock (bisherige UND geplante)
|
||||
• Stichwortartig bis kurz erklärt
|
||||
• Medikamentös / interventionell / geplant
|
||||
• Medikation mit Dosierung wenn vorhanden
|
||||
|
||||
4. Anamnese
|
||||
• Eher stichwortnah bis mittellang
|
||||
• Was zur Vorstellung führte
|
||||
• Relevante Vorgeschichte
|
||||
• Relevante Familienanamnese
|
||||
• Psychosoziale / Risikofaktoren, wenn relevant
|
||||
• Keine unnötige Ausschweifung
|
||||
|
||||
5. Beurteilung
|
||||
• Zusammenhängender Fliesstext
|
||||
• Klinische Einordnung
|
||||
• Begründung der Einschätzung
|
||||
• Risikoeinordnung
|
||||
• Was wichtig ist
|
||||
• Dies ist der interpretierende Hauptabschnitt
|
||||
|
||||
6. Diagnostik
|
||||
• Eigener Abschnitt
|
||||
• Labor / Histologie / Tests / Bildgebung falls vorhanden
|
||||
• Wenn nichts da: nicht halluzinieren, ggf. sehr kurz halten
|
||||
|
||||
7. Procedere
|
||||
• Klare nächste Schritte
|
||||
• Kontrollen
|
||||
• Operationen
|
||||
• Therapieplanung
|
||||
• Follow-up
|
||||
• Eher stichwortartig / konkret
|
||||
|
||||
SCHLUSS:
|
||||
„Für weitere Rückfragen stehen wir selbstverständlich jederzeit gerne zur Verfügung."
|
||||
Leerzeile
|
||||
„Freundliche Grüsse"
|
||||
Leerzeile
|
||||
{signaturname}
|
||||
Leerzeile
|
||||
„Das Dokument ist ohne Unterschrift gültig"
|
||||
|
||||
TON:
|
||||
• Professionell, klinisch, flüssig
|
||||
• Etwas zusammenhängender als der Klinische Bericht
|
||||
• Keine unnötige Mikrountergliederung pro Diagnose
|
||||
|
||||
MEDIZINISCHE REGELN (STRIKT):
|
||||
• KEINE Überinterpretation: Atypischer Naevus bleibt Naevus, NICHT als Melanom interpretieren.
|
||||
Haarausfall ist KEINE Alopezie ohne explizite Diagnose. Stress ist KEINE psychische Diagnose.
|
||||
• KEINE Halluzination fehlender Diagnostik: Wenn kein Labor / keine Histologie / keine Bildgebung
|
||||
vorliegt, NICHT erfinden oder suggerieren.
|
||||
• Konservativ kodieren: im Zweifel die mildere/neutralere Diagnose wählen.
|
||||
|
||||
FORMATIERUNG:
|
||||
• SCHWEIZER RECHTSCHREIBUNG: NIEMALS «ss» vergessen, KEIN «ß» verwenden
|
||||
• Keine Briefkopfzeile, kein Adressblock
|
||||
• Keine Sternchen (*), kein Markdown
|
||||
• Verwende • als Aufzählungszeichen (nicht -)
|
||||
• Inhalt NUR aus dem aktuellen Fall
|
||||
""".strip()
|
||||
|
||||
KLINISCHER_BRIEF_PROFILE_PROMPT = """
|
||||
BRIEFSTRUKTUR-PROFIL: Klinischer Bericht
|
||||
|
||||
Du erstellst einen klinisch strukturierten Arztbericht. Halte dich STRIKT an Struktur und Reihenfolge.
|
||||
|
||||
ZIELCHARAKTER:
|
||||
Stärker diagnosezentriert als der KISIM Bericht. Pro Diagnose systematisch gegliedert.
|
||||
Strukturierter und „blockiger". Eher für klar dokumentierende klinische Darstellung.
|
||||
Pro Diagnose möglichst einheitliche Unterstruktur.
|
||||
|
||||
EINLEITUNG:
|
||||
„Wir berichten Ihnen über die Untersuchung vom {datum} (vidit {arztname})."
|
||||
|
||||
HAUPTTEIL – DIAGNOSEN:
|
||||
Nummeriert (1., 2., 3. ...). Pro Diagnose mit ICD-10-GM in eckigen Klammern.
|
||||
Sortierung nach klinischer Relevanz (wichtigste zuerst).
|
||||
|
||||
Für JEDE Diagnose folgende Untergliederung mit genau diesen Zwischenüberschriften,
|
||||
soweit sinnvoll befüllbar:
|
||||
|
||||
KLINIK
|
||||
• Klinischer Befund, Symptome, Untersuchungsergebnisse zu dieser Diagnose
|
||||
|
||||
DIAGNOSTIK
|
||||
• Durchgeführte diagnostische Massnahmen (Labor, Bildgebung, Biopsie etc.)
|
||||
|
||||
THERAPIE
|
||||
• Durchgeführte und empfohlene Therapie für diese Diagnose
|
||||
|
||||
Aktuell
|
||||
• Aktueller Stand, Verlauf, Ansprechen auf Therapie
|
||||
|
||||
WICHTIG: Wenn ein Unterabschnitt nicht sinnvoll befüllbar ist, nicht halluzinieren,
|
||||
sondern knapp halten oder auslassen. Die Struktur soll grundsätzlich klar diagnoseweise sein.
|
||||
|
||||
Beispiel Klinischer-Bericht-Diagnoseblock:
|
||||
|
||||
1. Aktinische Keratose [L57.0]
|
||||
KLINIK
|
||||
• Erythematöse, schuppende Plaques im Bereich der Stirn
|
||||
|
||||
DIAGNOSTIK
|
||||
• Klinische Beurteilung
|
||||
|
||||
THERAPIE
|
||||
• Status nach Kryotherapie im Bereich der Stirn
|
||||
|
||||
Aktuell
|
||||
• Rezidivrisiko bei Sonnenvorgeschichte
|
||||
|
||||
2. Basaliom der Nase [C44.3]
|
||||
KLINIK
|
||||
• Tumor im Bereich der Nase, ca. 4 mm
|
||||
|
||||
DIAGNOSTIK
|
||||
• Klinische Inspektion
|
||||
|
||||
THERAPIE
|
||||
• Geplante operative Entfernung mittels Lappenplastik
|
||||
|
||||
Aktuell
|
||||
• Weitere Planung erfolgt
|
||||
|
||||
NACH DEN DIAGNOSEN:
|
||||
|
||||
Allergien
|
||||
• Eigener kurzer Abschnitt, knapp, keine Erfindungen
|
||||
• Wenn keine bekannt: „Keine bekannt."
|
||||
|
||||
Beurteilung
|
||||
• Zusammenfassender Fliesstext
|
||||
• Ordnet alle Diagnosen nochmals ein
|
||||
• Klinisch etwas knapper und strukturierter als KISIM
|
||||
• Nicht zu essayistisch
|
||||
|
||||
Procedere
|
||||
• Nächste Schritte
|
||||
• Nachkontrollen
|
||||
• Geplante Massnahmen
|
||||
• Therapie-/Operationsplanung
|
||||
• Stichwortartig bis kurz
|
||||
|
||||
SCHLUSS:
|
||||
„Für weitere Rückfragen stehen wir selbstverständlich jederzeit gerne zur Verfügung."
|
||||
Leerzeile
|
||||
„Freundliche Grüsse"
|
||||
Leerzeile
|
||||
{signaturname}
|
||||
Leerzeile
|
||||
„Das Dokument ist ohne Unterschrift gültig"
|
||||
|
||||
TON:
|
||||
• Strukturierter, diagnostisch klarer
|
||||
• Pro Diagnose stärker gegliedert
|
||||
• Nüchtern-klinisch
|
||||
• Weniger „frei fliessend" als KISIM
|
||||
|
||||
HAUPTUNTERSCHIED ZU KISIM:
|
||||
KISIM = klassischer Gesamtbericht mit übergreifenden Abschnitten (Therapie, Anamnese, Beurteilung etc.)
|
||||
Klinischer Bericht = pro Diagnose klar gegliederte Mini-Struktur (KLINIK/DIAGNOSTIK/THERAPIE/Aktuell)
|
||||
|
||||
MEDIZINISCHE REGELN (STRIKT):
|
||||
• KEINE Überinterpretation: Atypischer Naevus bleibt Naevus, NICHT als Melanom interpretieren.
|
||||
Haarausfall ist KEINE Alopezie ohne explizite Diagnose. Stress ist KEINE psychische Diagnose.
|
||||
• KEINE Halluzination fehlender Diagnostik: Wenn kein Labor / keine Histologie / keine Bildgebung
|
||||
vorliegt, NICHT erfinden oder suggerieren.
|
||||
• Konservativ kodieren: im Zweifel die mildere/neutralere Diagnose wählen.
|
||||
|
||||
FORMATIERUNG:
|
||||
• SCHWEIZER RECHTSCHREIBUNG: NIEMALS «ss» vergessen, KEIN «ß» verwenden
|
||||
• Keine Briefkopfzeile, kein Adressblock
|
||||
• Keine Sternchen (*), kein Markdown
|
||||
• Verwende • als Aufzählungszeichen (nicht -)
|
||||
• Inhalt NUR aus dem aktuellen Fall
|
||||
""".strip()
|
||||
|
||||
KOGU_PROMPT = """
|
||||
Du bist ärztliche Dokumentationsassistenz.
|
||||
|
||||
Erstelle eine formelle Kostengutsprache auf Basis des Transkripts.
|
||||
|
||||
Stil:
|
||||
• SCHWEIZER RECHTSCHREIBUNG: Verwende NIEMALS «ß» (Eszett), sondern IMMER «ss».
|
||||
• Sachliches, neutrales Deutsch mit vollständigen Sätzen.
|
||||
• Keine Aufzählungsziffern oder Mehrfachüberschriften.
|
||||
• Keine Sternchen (*) oder Sonderzeichen zur Hervorhebung.
|
||||
• Verwende • statt - als Aufzählungszeichen.
|
||||
• Überschriften OHNE Doppelpunkt. Leerzeile nach jeder Überschrift, aber KEINE Leerzeile zwischen den Aufzählungspunkten.
|
||||
|
||||
Struktur (nur Abschnitte ausgeben, zu denen Informationen vorliegen):
|
||||
|
||||
Klinischer Hintergrund ODER Anamnese
|
||||
|
||||
Kurzer Überblick über den klinischen Hintergrund bzw. die Anamnese (maximal zwei Sätze).
|
||||
|
||||
Diagnosen
|
||||
|
||||
Jede Diagnosezeile im Format „• Diagnose [ICD-Code]" mit schweizerischem ICD-10-GM-Code in eckigen Klammern.
|
||||
|
||||
• Diagnosen nach klinischer Relevanz sortieren (wichtigste zuerst): Malignome → Präkanzerosen → schwere/chronische Erkrankungen → akute Erkrankungen → leichte/banale Diagnosen.
|
||||
|
||||
Begründung
|
||||
|
||||
Prägnante medizinische Argumentation, warum die beantragte Leistung erforderlich ist (konkrete Beschwerden, Verlauf, Therapieziel).
|
||||
|
||||
Beantragte Leistung
|
||||
|
||||
Beschreibung der gewünschten Leistung inkl. Dauer, Frequenz oder Produkt, sofern im Transkript erwähnt. Fehlende Angaben nicht erfinden, sondern unaufgeregt darauf hinweisen.
|
||||
|
||||
Gib pro Abschnitt ausschließlich eine Überschrift aus (keine Varianten). Wenn ein Abschnitt entfällt, lass die Überschrift komplett weg.
|
||||
""".strip()
|
||||
|
||||
LETTER_SHORTEN_PROMPT = """
|
||||
Fasse den folgenden Arztbrief kürzer zusammen. Behalte alle wichtigen medizinischen Fakten, Diagnosen mit ICD-10-Code und Empfehlungen. Verwende keine Sternchen (*). Verwende NIEMALS «ß», sondern IMMER «ss» (Schweizer Rechtschreibung). Ausgabe: nur der gekürzte Brief.
|
||||
""".strip()
|
||||
|
||||
LETTER_EXPAND_PROMPT = """
|
||||
Schreibe den folgenden Arztbrief ausführlicher um. Verwende vollständige Sätze statt Stichworte. Behalte die Struktur (Anlass, Befunde, Diagnosen mit ICD-10, Empfehlungen). Verwende keine Sternchen (*). Verwende NIEMALS «ß», sondern IMMER «ss» (Schweizer Rechtschreibung). Ausgabe: nur der ausführlichere Brief.
|
||||
""".strip()
|
||||
|
||||
LETTER_KI_UEBERARBEITET_PROMPT = """
|
||||
Überarbeite den folgenden Arztbrief leicht: mache ihn professioneller, logischer und sprachlich schöner.
|
||||
- Alle medizinischen Fakten, Diagnosen (mit ICD-10) und Empfehlungen unverändert beibehalten.
|
||||
- Keine Sternchen (*), keine übertriebenen Änderungen.
|
||||
- Flüssigere Formulierungen, klarere Struktur, sachlicher Stil.
|
||||
- Verwende NIEMALS «ß», sondern IMMER «ss» (Schweizer Rechtschreibung).
|
||||
- Nicht zu extrem – nur moderate Verbesserungen.
|
||||
Ausgabe: nur der überarbeitete Brief.
|
||||
""".strip()
|
||||
|
||||
KG_SHORTEN_PROMPT = """
|
||||
Fasse die folgende Krankengeschichte kürzer zusammen. Behalte alle wichtigen medizinischen Fakten, Diagnosen mit ICD-10-Code und Empfehlungen. Struktur (z. B. Subjektiv, Objektiv, Diagnose, Therapie/Procedere) beibehalten. Keine Sternchen (*). Verwende NIEMALS «ß», sondern IMMER «ss» (Schweizer Rechtschreibung). Ausgabe: nur die gekürzte Krankengeschichte.
|
||||
""".strip()
|
||||
|
||||
KG_EXPAND_PROMPT = """
|
||||
Schreibe die folgende Krankengeschichte ausführlicher um. Verwende vollständige Sätze statt Stichworte wo sinnvoll. Behalte die Struktur und alle Diagnosen mit ICD-10-Code. Keine Sternchen (*). Verwende NIEMALS «ß», sondern IMMER «ss» (Schweizer Rechtschreibung). Ausgabe: nur die ausführlichere Krankengeschichte.
|
||||
""".strip()
|
||||
|
||||
KOGU_SHORTEN_PROMPT = """
|
||||
Fasse die folgende Kostengutsprache kürzer zusammen. Struktur (Klinischer Hintergrund/Anamnese, Diagnosen, Begründung, Beantragte Leistung) beibehalten, Diagnosen mit ICD-10-Code erhalten. Keine Sternchen (*), keine Ziffern. Verwende NIEMALS «ß», sondern IMMER «ss» (Schweizer Rechtschreibung).
|
||||
""".strip()
|
||||
|
||||
KOGU_EXPAND_PROMPT = """
|
||||
Formuliere die folgende Kostengutsprache ausführlicher und begründe medizinisch präziser. Struktur (Klinischer Hintergrund/Anamnese, Diagnosen, Begründung, Beantragte Leistung) und ICD-10-Codes beibehalten. Keine Sternchen (*), keine Ziffern. Verwende NIEMALS «ß», sondern IMMER «ss» (Schweizer Rechtschreibung).
|
||||
""".strip()
|
||||
|
||||
OP_BERICHT_PROMPT = """
|
||||
Du bist ärztliche Dokumentationsassistenz für chirurgische OP-Berichte (Deutsch, Schweiz).
|
||||
|
||||
AUFGABE:
|
||||
Erstelle einen vollständigen, detaillierten Operationsbericht auf Basis des Transkripts.
|
||||
Der Bericht muss professionell und klinik-fertig aussehen – so, wie er direkt ins Patientendossier kommt.
|
||||
|
||||
ZWINGEND EINZUHALTENDE STRUKTUR:
|
||||
Verwende Aufzählungszeichen (Punkt/Bullet) statt Nummerierungen.
|
||||
Schreibe NUR die Überschrift des jeweiligen Abschnitts – KEINE erklärenden Kommentare, Hinweise oder Beschreibungen in Klammern hinter den Überschriften.
|
||||
|
||||
Die Abschnitte:
|
||||
|
||||
Datum / Operateur / Assistenz
|
||||
{op_datum_operateur}
|
||||
|
||||
Diagnose(n)
|
||||
(Diagnosen auflisten, OHNE ICD-10-Codes, OHNE Kommentare)
|
||||
|
||||
Operation / Eingriff
|
||||
(Nur die Bezeichnung des Eingriffs, KEIN erklärender Kommentar)
|
||||
|
||||
Indikation
|
||||
(Kurze Begründung, KEIN Kommentar wie "Warum wurde operiert?")
|
||||
|
||||
Anästhesie
|
||||
(Wenn im Transkript nicht erwähnt: "Lokalanästhesie" schreiben. Kein Kommentar dazu.)
|
||||
|
||||
Lagerung
|
||||
(Nur ausgeben wenn im Transkript erwähnt, sonst weglassen)
|
||||
|
||||
Operationsbeschreibung / Durchführung
|
||||
DIES IST DER WICHTIGSTE UND LÄNGSTE ABSCHNITT:
|
||||
- SEHR DETAILLIERT und AUSFÜHRLICH beschreiben
|
||||
- Schritt-für-Schritt-Ablauf der Operation in vollständigen Sätzen
|
||||
- Schnittführung, Präparation, Technik, verwendete Instrumente/Materialien
|
||||
- Exakte Beschreibung der chirurgischen Massnahmen
|
||||
- Blutstillung, Nahtmaterial, Nahttechnik, Wundverschluss
|
||||
- Einlagen (Drainagen, Tamponaden, Verbände)
|
||||
- Jedes Detail aus dem Transkript muss hier ausführlich dokumentiert werden
|
||||
|
||||
Intraoperativer Befund
|
||||
(Was wurde bei der Operation vorgefunden?)
|
||||
|
||||
Komplikationen
|
||||
(Falls erwähnt, sonst: "Keine intraoperativen Komplikationen.")
|
||||
|
||||
Procedere / Nachbehandlung
|
||||
(Postoperative Anweisungen)
|
||||
|
||||
WICHTIGE REGELN:
|
||||
- SCHWEIZER RECHTSCHREIBUNG: Verwende NIEMALS «ß» (Eszett), sondern IMMER «ss» (z. B. «Verschluss», «Massnahme», «gross»).
|
||||
- KEINE Nummerierungen (1., 2., 3. etc.) verwenden – nur Aufzählungszeichen oder Absätze.
|
||||
- KEINE erklärenden Kommentare oder Hinweise hinter Überschriften (z.B. NICHT "Diagnose(n) – mit ICD-10-GM-Code in Klammern").
|
||||
- KEINE ICD-10-Codes bei Diagnosen.
|
||||
- KEINE Klammern mit Erklärungen wie "(falls erwähnt)", "(Lokalanästhesie, Vollnarkose etc.)".
|
||||
- Die Überschriften stehen ALLEIN, danach folgt direkt der Inhalt.
|
||||
- Die Operationsbeschreibung muss der LÄNGSTE und DETAILLIERTESTE Abschnitt sein.
|
||||
- Vollständige Sätze, sachlicher chirurgischer Stil.
|
||||
- Alle im Transkript genannten Details zur OP-Technik MÜSSEN aufgenommen werden.
|
||||
- Keine Sternchen (*), keine Sonderzeichen zur Hervorhebung, kein Markdown.
|
||||
- Keine Fakten erfinden – aber alle genannten Details ausführlich ausformulieren.
|
||||
- Abschnitte ohne Informationen im Transkript weglassen (ausser Komplikationen).
|
||||
""".strip()
|
||||
|
||||
OP_BERICHT_SHORTEN_PROMPT = """
|
||||
Fasse den folgenden OP-Bericht kürzer zusammen. Behalte alle medizinisch relevanten Fakten (Eingriff, Befund, Durchführung).
|
||||
KEINE Nummerierungen (1., 2., 3.), KEINE erklärenden Kommentare hinter Überschriften, KEINE ICD-10-Codes.
|
||||
Keine Sternchen (*). Verwende NIEMALS «ß», sondern IMMER «ss» (Schweizer Rechtschreibung). Ausgabe: nur der gekürzte OP-Bericht.
|
||||
""".strip()
|
||||
|
||||
OP_BERICHT_EXPAND_PROMPT = """
|
||||
Schreibe den folgenden OP-Bericht ausführlicher um. Insbesondere die Operationsbeschreibung/Durchführung MUSS deutlich detaillierter werden: Schritt-für-Schritt-Ablauf, Schnittführung, Präparation, Technik, Materialien, Nahtmaterial, Wundverschluss – alles in vollständigen Sätzen.
|
||||
Behalte die Struktur. KEINE Nummerierungen (1., 2., 3.), KEINE erklärenden Kommentare hinter Überschriften, KEINE ICD-10-Codes.
|
||||
Keine Sternchen (*). Verwende NIEMALS «ß», sondern IMMER «ss» (Schweizer Rechtschreibung). Ausgabe: nur der ausführlichere OP-Bericht.
|
||||
""".strip()
|
||||
|
||||
RECIPE_PROMPT = """
|
||||
Du bist ärztliche Dokumentationsassistenz.
|
||||
|
||||
Erstelle eine Medikamenten-/Rezeptliste. Nur Inhalte, die für eine Rezeptierung relevant sind: Medikamente, Dosierungen, Verordnungen. KEINE Kontrollen, KEINE allgemeinen Empfehlungen, KEINE Laborkontrollen.
|
||||
|
||||
Regeln:
|
||||
- NUR Medikamente und Therapien, die verordnet werden sollen (Rezept-Inhalt).
|
||||
- Kontrollen, Verlaufskontrollen, Labor etc. weglassen.
|
||||
- Gib NUR Bereiche aus, zu denen tatsächlich Angaben vorliegen.
|
||||
- Übernimm nur Angaben, die ausdrücklich genannt wurden – nichts ergänzen.
|
||||
- Gib Dosierungen nur wieder, wenn sie vorliegen.
|
||||
- Verwende KEINE Sternchen (*), keine Raute (#), kein Markdown zur Hervorhebung.
|
||||
- Verwende IMMER korrekte deutsche Umlaute: ä, ö, ü (NIEMALS ae, oe, ue).
|
||||
- Sprache: Deutsch, stichpunktartig.
|
||||
- Verwende NIEMALS «ß», sondern IMMER «ss» (Schweizer Rechtschreibung).
|
||||
""".strip()
|
||||
|
||||
INTERACTION_PROMPT = """
|
||||
Du bist ärztliche/pharmazeutische Assistenz.
|
||||
|
||||
Aufgabe: Prüfe die folgenden Medikamente auf mögliche Wechselwirkungen.
|
||||
|
||||
Gib eine strukturierte Übersicht auf Deutsch:
|
||||
- Relevante Wechselwirkungen zwischen den genannten Substanzen
|
||||
- Kontraindikationen oder Vorsichtshinweise
|
||||
- Falls keine bekannten Interaktionen: kurzer Hinweis
|
||||
|
||||
Nur klinisch relevante Informationen. Keine Sterne, keine Ziffern. Stichpunktartig. Verwende NIEMALS «ß», sondern IMMER «ss» (Schweizer Rechtschreibung).
|
||||
""".strip()
|
||||
|
||||
KI_PRUEFEN_PROMPT = """
|
||||
Du bist ein ärztlicher Dokumentationsassistent (Deutsch).
|
||||
|
||||
Aufgabe: Prüfe die Krankengeschichte auf Logik, Diagnose-Therapie-Passung und innere Zusammenhänge.
|
||||
|
||||
Regeln:
|
||||
- Sehr knapp und zusammengefasst antworten (max. 3–5 Stichpunkte oder ein kurzer Absatz).
|
||||
- Nur wesentliche Unstimmigkeiten oder Verbesserungsvorschläge nennen.
|
||||
- Wenn alles schlüssig ist: eine kurze Bestätigung in einem Satz.
|
||||
- Keine Sterne, keine Ziffern, keine Wiederholungen. Inhaltlich korrekt und präzise.
|
||||
- WICHTIG: Nach jedem Satz einen Absatz (Leerzeile) einfügen, zur besseren Übersicht.
|
||||
- Verwende NIEMALS «ß», sondern IMMER «ss» (Schweizer Rechtschreibung).
|
||||
""".strip()
|
||||
|
||||
MERGE_PROMPT = """
|
||||
Du bist ein ärztlicher Dokumentationsassistent (Deutsch).
|
||||
|
||||
AUFGABE:
|
||||
Es liegt eine bestehende Krankengeschichte (SOAP) und ein ergänzendes Transkript vor.
|
||||
Fasse das ergänzende Transkript zusammen und füge die neuen Informationen in die bestehende KG ein.
|
||||
|
||||
FORMATIERUNG (ZWINGEND):
|
||||
• Verwende als Aufzählungszeichen IMMER einen Punkt (•), NIEMALS einen Strich (-).
|
||||
• Nach jeder Abschnittsüberschrift eine LEERZEILE, dann die Aufzählungspunkte.
|
||||
• Zwischen den einzelnen Aufzählungspunkten (•) KEINE Leerzeile – Aufzählungen direkt untereinander schreiben.
|
||||
• Überschriften OHNE Doppelpunkt (z.B. "Subjektiv" statt "Subjektiv:").
|
||||
|
||||
REGELN:
|
||||
• SCHWEIZER RECHTSCHREIBUNG: Verwende NIEMALS das Zeichen «ß» (Eszett). Ersetze es IMMER durch «ss» (z. B. «Strasse» statt «Straße», «gross» statt «groß», «Verschluss» statt «Verschluß»). Das gilt für ALLE Wörter im gesamten Text.
|
||||
• Gleiche Überschriften beibehalten (Subjektiv, ggf. Sozialanamnese, ggf. Familienanamnese, Objektiv, Diagnose, Therapie bzw. Procedere).
|
||||
• Objektiv = Befundbeschreibung (was gesehen/untersucht). Diagnose: PFLICHT – jede Zeile mit ICD-10-GM in eckigen Klammern, Format „• Bezeichnung [ICD-Code]" z. B. „• Aktinische Keratosen [L57.0]". Keine Diagnose ohne [Code]. Nicht überinterpretieren (z. B. Stress im Beruf nicht als psychische Diagnose kodieren).
|
||||
• Diagnosen nach klinischer Relevanz sortieren (wichtigste zuerst): Malignome → Präkanzerosen → schwere/chronische Erkrankungen → akute Erkrankungen → leichte/banale Diagnosen.
|
||||
• Sozialanamnese und Familienanamnese nur, wenn im Transkript erwähnt.
|
||||
• Neue Punkte in die passenden Abschnitte einfügen (anhängen), keine Doppelungen.
|
||||
• Bestehende Formulierungen unverändert lassen; nur ergänzen.
|
||||
• Keine Meta-Kommentare, keine Warnungen im Fließtext.
|
||||
• Ausgabe: die vollständige, aktualisierte KG (bestehende + Ergänzungen).
|
||||
""".strip()
|
||||
52
AzA march 2026 - Kopie (18)/aza_rate_limit.py
Normal file
@@ -0,0 +1,52 @@
|
||||
import time
|
||||
from dataclasses import dataclass
|
||||
from typing import Dict
|
||||
|
||||
from fastapi import HTTPException, status
|
||||
|
||||
|
||||
@dataclass
|
||||
class Bucket:
|
||||
tokens: float
|
||||
last_ts: float
|
||||
|
||||
|
||||
class TokenBucketRateLimiter:
|
||||
"""
|
||||
Simple in-memory token bucket limiter.
|
||||
capacity: max burst
|
||||
refill_rate: tokens per second
|
||||
"""
|
||||
def __init__(self, capacity: int, refill_rate: float):
|
||||
self.capacity = float(capacity)
|
||||
self.refill_rate = float(refill_rate)
|
||||
self._buckets: Dict[str, Bucket] = {}
|
||||
|
||||
def _get_bucket(self, key: str) -> Bucket:
|
||||
now = time.time()
|
||||
b = self._buckets.get(key)
|
||||
if not b:
|
||||
b = Bucket(tokens=self.capacity, last_ts=now)
|
||||
self._buckets[key] = b
|
||||
return b
|
||||
|
||||
elapsed = now - b.last_ts
|
||||
b.tokens = min(self.capacity, b.tokens + elapsed * self.refill_rate)
|
||||
b.last_ts = now
|
||||
return b
|
||||
|
||||
def consume(self, key: str, cost: float = 1.0) -> None:
|
||||
b = self._get_bucket(key)
|
||||
if b.tokens < cost:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_429_TOO_MANY_REQUESTS,
|
||||
detail="Rate limit exceeded",
|
||||
)
|
||||
b.tokens -= cost
|
||||
|
||||
|
||||
# Defaults:
|
||||
# - Per token: ~30 req/min (0.5 tokens/sec), burst 10
|
||||
# - Per IP: ~60 req/min (1.0 tokens/sec), burst 20
|
||||
default_token_limiter = TokenBucketRateLimiter(capacity=10, refill_rate=0.5)
|
||||
default_ip_limiter = TokenBucketRateLimiter(capacity=20, refill_rate=1.0)
|
||||
103
AzA march 2026 - Kopie (18)/aza_security.py
Normal file
@@ -0,0 +1,103 @@
|
||||
import hmac
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import List, Optional
|
||||
|
||||
from fastapi import Header, HTTPException, status
|
||||
|
||||
_DEV_TOKEN = "AZA_LOCAL_TOKEN_123456"
|
||||
|
||||
|
||||
def _read_fallback_tokens(token_file: Optional[str] = None) -> List[str]:
|
||||
"""
|
||||
Supports:
|
||||
- Single token in file
|
||||
- Multiple tokens separated by newlines
|
||||
- Multiple tokens separated by commas
|
||||
Uses absolute path so it works regardless of cwd.
|
||||
"""
|
||||
if token_file is None:
|
||||
token_file = Path(__file__).resolve().parent / "backend_token.txt"
|
||||
else:
|
||||
token_file = Path(token_file)
|
||||
if not token_file.is_absolute():
|
||||
token_file = Path(__file__).resolve().parent / token_file
|
||||
try:
|
||||
raw = token_file.read_text(encoding="utf-8")
|
||||
except FileNotFoundError:
|
||||
return []
|
||||
except Exception:
|
||||
return []
|
||||
|
||||
tokens: List[str] = []
|
||||
for line in raw.replace(",", "\n").splitlines():
|
||||
t = line.strip()
|
||||
if t:
|
||||
tokens.append(t)
|
||||
return tokens
|
||||
|
||||
|
||||
def get_required_api_tokens() -> List[str]:
|
||||
"""
|
||||
Token rotation support:
|
||||
- MEDWORK_API_TOKENS can contain comma-separated tokens (preferred)
|
||||
- MEDWORK_API_TOKEN can contain a single token (legacy) or comma-separated tokens
|
||||
- backend_token.txt fallback supports single or multiple tokens
|
||||
"""
|
||||
env_multi = os.getenv("MEDWORK_API_TOKENS")
|
||||
env_single = os.getenv("MEDWORK_API_TOKEN")
|
||||
|
||||
tokens: List[str] = []
|
||||
|
||||
if env_multi and env_multi.strip():
|
||||
tokens.extend([t.strip() for t in env_multi.split(",") if t.strip()])
|
||||
|
||||
if env_single and env_single.strip():
|
||||
# allow comma-separated for convenience
|
||||
tokens.extend([t.strip() for t in env_single.split(",") if t.strip()])
|
||||
|
||||
# fallback file (absolute path, works regardless of cwd)
|
||||
tokens.extend(_read_fallback_tokens())
|
||||
|
||||
# local dev token (always accepted)
|
||||
if _DEV_TOKEN not in tokens:
|
||||
tokens.append(_DEV_TOKEN)
|
||||
|
||||
# de-duplicate while preserving order
|
||||
dedup: List[str] = []
|
||||
for t in tokens:
|
||||
if t and t not in dedup:
|
||||
dedup.append(t)
|
||||
|
||||
if not dedup:
|
||||
raise RuntimeError("No API token configured (MEDWORK_API_TOKENS / MEDWORK_API_TOKEN or backend_token.txt).")
|
||||
|
||||
return dedup
|
||||
|
||||
|
||||
def require_api_token(
|
||||
x_api_token: Optional[str] = Header(default=None, alias="X-API-Token"),
|
||||
authorization: Optional[str] = Header(default=None, alias="Authorization"),
|
||||
) -> None:
|
||||
if not x_api_token and authorization and authorization.lower().startswith("bearer "):
|
||||
x_api_token = authorization.split(" ", 1)[1].strip()
|
||||
if not x_api_token:
|
||||
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Unauthorized")
|
||||
|
||||
expected_tokens = get_required_api_tokens()
|
||||
ok = any(hmac.compare_digest(x_api_token, t) for t in expected_tokens)
|
||||
|
||||
if not ok:
|
||||
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Unauthorized")
|
||||
|
||||
|
||||
def get_admin_token() -> Optional[str]:
|
||||
return os.getenv("AZA_ADMIN_TOKEN")
|
||||
|
||||
|
||||
def require_admin_token(x_admin_token: Optional[str] = Header(default=None, alias="X-Admin-Token")) -> None:
|
||||
expected = get_admin_token()
|
||||
if not expected:
|
||||
raise HTTPException(status_code=503, detail="Admin token not configured")
|
||||
if not x_admin_token or not hmac.compare_digest(x_admin_token, expected):
|
||||
raise HTTPException(status_code=401, detail="Unauthorized")
|
||||
505
AzA march 2026 - Kopie (18)/aza_settings_mixin.py
Normal file
@@ -0,0 +1,505 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
AzaSettingsMixin – Einstellungsfenster (KG-Modell, Templates, Autotext, Add-ons, etc.).
|
||||
"""
|
||||
|
||||
import tkinter as tk
|
||||
from tkinter import ttk, messagebox
|
||||
from tkinter.scrolledtext import ScrolledText
|
||||
|
||||
from aza_audit_log import log_event as _audit_log
|
||||
from aza_persistence import (
|
||||
load_settings_geometry,
|
||||
save_settings_geometry,
|
||||
load_templates_text,
|
||||
save_templates_text,
|
||||
save_autotext,
|
||||
save_model,
|
||||
_clamp_geometry_str,
|
||||
load_signature_name,
|
||||
save_signature_name,
|
||||
load_user_profile,
|
||||
)
|
||||
from aza_ui_helpers import add_resize_grip, add_font_scale_control
|
||||
from aza_config import MODEL_LABELS, ALLOWED_SUMMARY_MODELS
|
||||
|
||||
|
||||
class AzaSettingsMixin:
|
||||
"""Mixin für das Einstellungsfenster."""
|
||||
|
||||
def _open_settings(self):
|
||||
SETTINGS_MIN_W, SETTINGS_MIN_H = 680, 520
|
||||
win = tk.Toplevel(self)
|
||||
win.title("Einstellungen")
|
||||
win.transient(self)
|
||||
win.minsize(SETTINGS_MIN_W, SETTINGS_MIN_H)
|
||||
win.attributes("-topmost", True)
|
||||
if hasattr(self, "_aza_windows"):
|
||||
self._aza_windows.add(win)
|
||||
self._register_window(win)
|
||||
saved_geom = load_settings_geometry()
|
||||
if saved_geom:
|
||||
try:
|
||||
win.geometry(_clamp_geometry_str(saved_geom, SETTINGS_MIN_W, SETTINGS_MIN_H))
|
||||
except Exception:
|
||||
win.geometry(f"{SETTINGS_MIN_W}x{SETTINGS_MIN_H}")
|
||||
if not saved_geom:
|
||||
win.geometry(f"{SETTINGS_MIN_W}x{SETTINGS_MIN_H}")
|
||||
win.update_idletasks()
|
||||
sw = win.winfo_screenwidth()
|
||||
sh = win.winfo_screenheight()
|
||||
w, h = SETTINGS_MIN_W, SETTINGS_MIN_H
|
||||
x = max(0, (sw - w) // 2)
|
||||
y = max(0, (sh - h) // 2)
|
||||
win.geometry(f"{w}x{h}+{x}+{y}")
|
||||
add_resize_grip(win, SETTINGS_MIN_W, SETTINGS_MIN_H)
|
||||
add_font_scale_control(win)
|
||||
f = ttk.Frame(win, padding=16)
|
||||
f.pack(fill="both", expand=True)
|
||||
ttk.Label(f, text="KG-Modell:").grid(row=0, column=0, sticky="w", pady=(0, 8))
|
||||
display_values = [MODEL_LABELS[m] for m in ALLOWED_SUMMARY_MODELS]
|
||||
current = MODEL_LABELS.get(self.model_var.get(), display_values[0])
|
||||
model_var_dialog = tk.StringVar(value=current)
|
||||
model_box = ttk.Combobox(
|
||||
f, textvariable=model_var_dialog, values=display_values, state="readonly", width=42
|
||||
)
|
||||
model_box.grid(row=0, column=1, sticky="ew", padx=(12, 0), pady=(0, 8))
|
||||
f.columnconfigure(1, weight=1)
|
||||
|
||||
def open_templates():
|
||||
tw = tk.Toplevel(win)
|
||||
tw.title("Templates")
|
||||
tw.transient(win)
|
||||
tw.geometry("620x370")
|
||||
tw.configure(bg="#B9ECFA")
|
||||
tw.minsize(450, 280)
|
||||
tw.attributes("-topmost", True)
|
||||
self._register_window(tw)
|
||||
add_resize_grip(tw, 450, 280)
|
||||
add_font_scale_control(tw)
|
||||
tf = ttk.Frame(tw, padding=12)
|
||||
tf.pack(fill="both", expand=True)
|
||||
ttk.Label(tf, text="Kontext für die KI (z. B. „Ich bin ein Dermatologe und schreibe dermatologische Berichte.“). Wird bei der KG-Erstellung berücksichtigt:").pack(anchor="w")
|
||||
ttxt = ScrolledText(tf, wrap="word", font=self._text_font, bg="#F5FCFF", height=8)
|
||||
ttxt.pack(fill="both", expand=True, pady=(4, 8))
|
||||
ttxt.insert("1.0", load_templates_text())
|
||||
self._bind_autotext(ttxt)
|
||||
btn_f = ttk.Frame(tf)
|
||||
btn_f.pack(fill="x")
|
||||
def save_and_close():
|
||||
save_templates_text(ttxt.get("1.0", "end").strip())
|
||||
tw.destroy()
|
||||
ttk.Button(btn_f, text="OK", command=save_and_close).pack(side="left", padx=(0, 8))
|
||||
ttk.Button(btn_f, text="Abbrechen", command=tw.destroy).pack(side="left")
|
||||
|
||||
def do_reset():
|
||||
save_templates_text("")
|
||||
messagebox.showinfo("Reset", "Template-Text wurde zurückgesetzt und ist jetzt leer.")
|
||||
|
||||
ttk.Button(f, text="Templates", command=open_templates).grid(row=1, column=0, pady=(8, 4), sticky="w")
|
||||
ttk.Button(f, text="Reset", command=do_reset).grid(row=1, column=1, pady=(8, 4), sticky="w", padx=(12, 0))
|
||||
|
||||
start_frame = ttk.LabelFrame(f, text="Startverhalten / Fenster", padding=(10, 5))
|
||||
start_frame.grid(row=2, column=0, columnspan=2, sticky="ew", pady=(8, 4))
|
||||
|
||||
diktat_auto_var = tk.BooleanVar(value=self._autotext_data.get("diktat_auto_start", True))
|
||||
ttk.Checkbutton(start_frame, text="Diktat startet sofort (wenn aus: Aufnahme manuell starten)",
|
||||
variable=diktat_auto_var).pack(anchor="w", pady=2)
|
||||
|
||||
notizen_open_on_start_var = tk.BooleanVar(value=self._autotext_data.get("notizen_open_on_start", True))
|
||||
ttk.Checkbutton(start_frame, text="Audionotiz beim Programmstart automatisch öffnen",
|
||||
variable=notizen_open_on_start_var).pack(anchor="w", pady=2)
|
||||
|
||||
kommentare_auto_var = tk.BooleanVar(value=self._autotext_data.get("kommentare_auto_open", False))
|
||||
ttk.Checkbutton(start_frame, text="Kommentare-Fenster beim Programmstart automatisch öffnen",
|
||||
variable=kommentare_auto_var).pack(anchor="w", pady=2)
|
||||
|
||||
empfang_auto_var = tk.BooleanVar(value=self._autotext_data.get("empfang_auto_open", False))
|
||||
ttk.Checkbutton(start_frame, text="Empfang-Fenster beim Programmstart automatisch öffnen",
|
||||
variable=empfang_auto_var).pack(anchor="w", pady=2)
|
||||
def _live_textbloecke_visible(*_args):
|
||||
vis = bool(textbloecke_visible_var.get())
|
||||
self._autotext_data["textbloecke_visible"] = vis
|
||||
try:
|
||||
if vis:
|
||||
self._textbloecke_container.pack(fill="x", before=self._textbloecke_anchor)
|
||||
else:
|
||||
self._textbloecke_container.pack_forget()
|
||||
self.update_idletasks()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
textbloecke_visible_var = tk.BooleanVar(value=self._autotext_data.get("textbloecke_visible", True))
|
||||
cb_textbloecke = ttk.Checkbutton(f, text="Textblöcke anzeigen (Inhalt bleibt gespeichert, wenn ausgeblendet)",
|
||||
variable=textbloecke_visible_var, command=_live_textbloecke_visible)
|
||||
cb_textbloecke.grid(row=3, column=0, columnspan=2, sticky="w", pady=(4, 2))
|
||||
def _live_addon_visible(*_args):
|
||||
vis = bool(addon_visible_var.get())
|
||||
self._autotext_data["addon_visible"] = vis
|
||||
try:
|
||||
if vis:
|
||||
self._addon_container.pack(fill="x", before=self._addon_anchor)
|
||||
self._update_addon_buttons_visibility()
|
||||
else:
|
||||
self._addon_container.pack_forget()
|
||||
self.update_idletasks()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
addon_visible_var = tk.BooleanVar(value=self._autotext_data.get("addon_visible", True))
|
||||
cb_addon = ttk.Checkbutton(f, text="Add-ons anzeigen", variable=addon_visible_var,
|
||||
command=_live_addon_visible)
|
||||
cb_addon.grid(row=4, column=0, columnspan=2, sticky="w", pady=(4, 2))
|
||||
|
||||
def _live_logo_visible(*_args):
|
||||
vis = bool(logo_visible_var.get())
|
||||
self._autotext_data["logo_visible"] = vis
|
||||
try:
|
||||
if vis:
|
||||
self._logo_frame.place(relx=0.01, rely=0.97, anchor="sw")
|
||||
else:
|
||||
self._logo_frame.place_forget()
|
||||
self.update_idletasks()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
logo_visible_var = tk.BooleanVar(value=self._autotext_data.get("logo_visible", True))
|
||||
cb_logo = ttk.Checkbutton(f, text="Logo anzeigen (Klick auf Logo startet/stoppt Aufnahme)",
|
||||
variable=logo_visible_var, command=_live_logo_visible)
|
||||
cb_logo.grid(row=4, column=1, sticky="w", pady=(4, 2), padx=(12, 0))
|
||||
|
||||
# Unterkategorie: Welche Add-on-Buttons sollen angezeigt werden?
|
||||
addon_buttons_frame = ttk.LabelFrame(f, text="Welche Add-on-Buttons anzeigen?", padding=(10, 5))
|
||||
addon_buttons_frame.grid(row=5, column=0, columnspan=2, sticky="ew", pady=(8, 4))
|
||||
|
||||
addon_buttons = self._autotext_data.get("addon_buttons", {})
|
||||
addon_button_vars = {}
|
||||
|
||||
addon_button_options = [
|
||||
("uebersetzer", "Übersetzer (provisorisch)"),
|
||||
("email", "E-Mail"),
|
||||
("autotext", "Autotext"),
|
||||
("whatsapp", "WhatsApp"),
|
||||
("docapp", "MedWork"),
|
||||
("todo", "To-do"),
|
||||
("macro", "Makro starten"),
|
||||
("kongresse", "Kongresse"),
|
||||
("news", "News"),
|
||||
("empfang", "An Empfang senden"),
|
||||
]
|
||||
|
||||
todo_auto_open_var = tk.BooleanVar(
|
||||
value=self._autotext_data.get("todo_auto_open", True))
|
||||
|
||||
def _live_addon_toggle(*_args):
|
||||
self._autotext_data["addon_buttons"] = {
|
||||
bid: bool(v.get()) for bid, v in addon_button_vars.items()
|
||||
}
|
||||
try:
|
||||
self._update_addon_buttons_visibility()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
grid_row = 0
|
||||
for button_id, label in addon_button_options:
|
||||
var = tk.BooleanVar(value=addon_buttons.get(button_id, True))
|
||||
addon_button_vars[button_id] = var
|
||||
cb = ttk.Checkbutton(addon_buttons_frame, text=label, variable=var,
|
||||
command=_live_addon_toggle)
|
||||
cb.grid(row=grid_row, column=0, sticky="w", padx=10, pady=2)
|
||||
grid_row += 1
|
||||
if button_id == "todo":
|
||||
cb_auto = ttk.Checkbutton(
|
||||
addon_buttons_frame,
|
||||
text=" ↳ To-do beim Start automatisch öffnen",
|
||||
variable=todo_auto_open_var)
|
||||
cb_auto.grid(row=grid_row, column=0, sticky="w", padx=10, pady=(0, 2))
|
||||
grid_row += 1
|
||||
|
||||
kg_auto_delete_var = tk.BooleanVar(value=self._autotext_data.get("kg_auto_delete_old", False))
|
||||
cb_kg_auto = ttk.Checkbutton(f, text="KG-Einträge älter als 2 Wochen automatisch löschen (Speicher schonen)", variable=kg_auto_delete_var)
|
||||
cb_kg_auto.grid(row=6, column=0, columnspan=2, sticky="w", pady=(4, 2))
|
||||
|
||||
# Statusanzeige-Farbe
|
||||
status_color_frame = ttk.LabelFrame(f, text="Statusanzeige", padding=(10, 5))
|
||||
status_color_frame.grid(row=7, column=0, columnspan=2, sticky="ew", pady=(8, 4))
|
||||
_status_color_options = {"Standard (Orange)": "#BD4500", "Blau": "#1a4d6d", "Ausblenden": "hidden"}
|
||||
_current_sc = self._autotext_data.get("status_color", "#BD4500")
|
||||
_sc_label = "Standard (Orange)"
|
||||
for _lbl, _val in _status_color_options.items():
|
||||
if _val == _current_sc:
|
||||
_sc_label = _lbl
|
||||
break
|
||||
status_color_var = tk.StringVar(value=_sc_label)
|
||||
|
||||
def _live_status_color(*_args):
|
||||
sc_sel = status_color_var.get()
|
||||
sc_v = _status_color_options.get(sc_sel, "#BD4500")
|
||||
self._autotext_data["status_color"] = sc_v
|
||||
try:
|
||||
self._apply_status_color()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
for sc_col, (sc_label, sc_val) in enumerate(_status_color_options.items()):
|
||||
ttk.Radiobutton(status_color_frame, text=sc_label, variable=status_color_var,
|
||||
value=sc_label, command=_live_status_color).grid(row=0, column=sc_col, padx=8, pady=2, sticky="w")
|
||||
|
||||
autotext_var = tk.BooleanVar(value=self._autotext_data.get("enabled", True))
|
||||
cb_autotext = ttk.Checkbutton(f, text="Autotext (Abkürzungen z. B. „mfg“ → „mit freundlichen Grüßen“)", variable=autotext_var)
|
||||
cb_autotext.grid(row=8, column=0, columnspan=2, sticky="w", pady=(4, 2))
|
||||
def open_autotext_manage():
|
||||
self._open_autotext_dialog(win)
|
||||
ttk.Button(f, text="Autotext verwalten", command=open_autotext_manage).grid(row=9, column=0, pady=(2, 4), sticky="w")
|
||||
|
||||
autocopy_var = tk.BooleanVar(
|
||||
value=self._autotext_data.get("autocopy_after_diktat", True)
|
||||
)
|
||||
cb_autocopy = ttk.Checkbutton(
|
||||
f,
|
||||
text="Autocopy: Nach Diktat/Transkription automatisch in Zwischenablage kopieren",
|
||||
variable=autocopy_var,
|
||||
)
|
||||
cb_autocopy.grid(row=10, column=0, columnspan=2, sticky="w", pady=(4, 2))
|
||||
|
||||
if not hasattr(self, "_rclick_paste_var"):
|
||||
self._rclick_paste_var = tk.BooleanVar(
|
||||
value=bool(self._autotext_data.get("global_right_click_paste", True)))
|
||||
|
||||
cb_global_right_click = ttk.Checkbutton(
|
||||
f,
|
||||
text="Global: Rechtsklick fügt direkt ein (ohne Kontextmenü, nur externe Apps)",
|
||||
variable=self._rclick_paste_var,
|
||||
command=self._toggle_rclick_paste,
|
||||
)
|
||||
cb_global_right_click.grid(row=11, column=0, columnspan=2, sticky="w", pady=(4, 2))
|
||||
|
||||
sig_frame = ttk.LabelFrame(f, text="Unterschrift / Signatur", padding=(10, 5))
|
||||
sig_frame.grid(row=12, column=0, columnspan=2, sticky="ew", pady=(8, 4))
|
||||
sig_frame.columnconfigure(1, weight=1)
|
||||
|
||||
profile_name = self._user_profile.get("name", "")
|
||||
current_sig = load_signature_name(fallback_to_profile=False)
|
||||
use_profile = not bool(current_sig)
|
||||
|
||||
sig_auto_var = tk.BooleanVar(value=use_profile)
|
||||
sig_name_var = tk.StringVar(value=current_sig if current_sig else profile_name)
|
||||
|
||||
cb_sig_auto = ttk.Checkbutton(sig_frame,
|
||||
text=f"Profilname verwenden: {profile_name}" if profile_name else "Profilname verwenden",
|
||||
variable=sig_auto_var)
|
||||
cb_sig_auto.grid(row=0, column=0, columnspan=2, sticky="w", pady=(0, 4))
|
||||
|
||||
ttk.Label(sig_frame, text="Abweichender Name:").grid(row=1, column=0, sticky="w", padx=(0, 8))
|
||||
ent_sig = ttk.Entry(sig_frame, textvariable=sig_name_var, width=36)
|
||||
ent_sig.grid(row=1, column=1, sticky="ew", pady=(0, 2))
|
||||
|
||||
def _update_sig_entry(*_):
|
||||
if sig_auto_var.get():
|
||||
ent_sig.configure(state="disabled")
|
||||
sig_name_var.set(profile_name)
|
||||
else:
|
||||
ent_sig.configure(state="normal")
|
||||
sig_auto_var.trace_add("write", _update_sig_entry)
|
||||
_update_sig_entry()
|
||||
|
||||
self._sig_auto_var = sig_auto_var
|
||||
self._sig_name_var = sig_name_var
|
||||
|
||||
# --- Audio-Test ---
|
||||
audio_frame = ttk.LabelFrame(f, text="Audio / Mikrofon", padding=(10, 5))
|
||||
audio_frame.grid(row=13, column=0, columnspan=2, sticky="ew", pady=(8, 4))
|
||||
|
||||
audio_status_var = tk.StringVar(value="")
|
||||
|
||||
def _run_audio_test():
|
||||
audio_status_var.set("Test läuft …")
|
||||
win.update_idletasks()
|
||||
try:
|
||||
from aza_audio import test_audio_device
|
||||
result = test_audio_device(duration_sec=1.5)
|
||||
if result["ok"]:
|
||||
audio_status_var.set("✓ " + result["message"])
|
||||
else:
|
||||
audio_status_var.set("✗ " + result["message"])
|
||||
except Exception as exc:
|
||||
audio_status_var.set(f"✗ Fehler: {exc}")
|
||||
|
||||
ttk.Button(audio_frame, text="Audio-Test starten",
|
||||
command=_run_audio_test).pack(side="left", padx=(0, 12))
|
||||
tk.Label(audio_frame, textvariable=audio_status_var,
|
||||
font=("Segoe UI", 9), fg="#333", bg="#F0F0F0",
|
||||
wraplength=400, justify="left").pack(side="left", fill="x", expand=True)
|
||||
|
||||
legal_frame = ttk.LabelFrame(f, text="Datenschutz & Recht", padding=(10, 5))
|
||||
legal_frame.grid(row=14, column=0, columnspan=2, sticky="ew", pady=(8, 4))
|
||||
ttk.Button(legal_frame, text="Datenschutzerklärung anzeigen",
|
||||
command=lambda: self._show_legal_text(win, "Datenschutzerklärung", "privacy_policy.md")
|
||||
).grid(row=0, column=0, padx=(0, 8), pady=2, sticky="w")
|
||||
ttk.Button(legal_frame, text="KI-Einwilligung anzeigen",
|
||||
command=lambda: self._show_legal_text(win, "KI-Einwilligung", "ai_consent.md")
|
||||
).grid(row=0, column=1, padx=0, pady=2, sticky="w")
|
||||
|
||||
from aza_consent import get_consent_status, record_revoke, has_valid_consent, record_consent, export_consent_log
|
||||
uid = self._user_profile.get("name", "default")
|
||||
consent_ok = has_valid_consent(uid)
|
||||
consent_status_var = tk.StringVar(
|
||||
value=f"KI-Einwilligung: {'Erteilt' if consent_ok else 'Nicht erteilt / widerrufen'}")
|
||||
ttk.Label(legal_frame, textvariable=consent_status_var).grid(
|
||||
row=1, column=0, columnspan=2, sticky="w", pady=(6, 2))
|
||||
|
||||
def toggle_consent():
|
||||
nonlocal consent_ok
|
||||
_uid = self._user_profile.get("name", "default")
|
||||
if has_valid_consent(_uid):
|
||||
if messagebox.askyesno("Einwilligung widerrufen",
|
||||
"Möchten Sie Ihre KI-Einwilligung widerrufen?\n\n"
|
||||
"KI-Funktionen (Transkription, KG-Erstellung,\n"
|
||||
"Interaktionsprüfung) werden danach gesperrt.",
|
||||
parent=win):
|
||||
record_revoke(_uid, source="ui")
|
||||
_audit_log("CONSENT_REVOKE", _uid)
|
||||
consent_ok = False
|
||||
consent_status_var.set("KI-Einwilligung: Widerrufen")
|
||||
btn_consent.configure(text="KI-Einwilligung erteilen")
|
||||
messagebox.showinfo("Widerruf", "Ihre KI-Einwilligung wurde widerrufen und protokolliert.", parent=win)
|
||||
else:
|
||||
if self._check_ai_consent():
|
||||
consent_ok = True
|
||||
consent_status_var.set("KI-Einwilligung: Erteilt")
|
||||
btn_consent.configure(text="KI-Einwilligung widerrufen")
|
||||
|
||||
btn_consent = ttk.Button(legal_frame,
|
||||
text="KI-Einwilligung widerrufen" if consent_ok else "KI-Einwilligung erteilen",
|
||||
command=toggle_consent)
|
||||
btn_consent.grid(row=2, column=0, padx=(0, 8), pady=2, sticky="w")
|
||||
|
||||
def do_export():
|
||||
from aza_audit_log import export_audit_log
|
||||
try:
|
||||
path_consent = export_consent_log()
|
||||
path_audit = export_audit_log()
|
||||
_audit_log("EXPORT", uid, detail="consent+audit log")
|
||||
messagebox.showinfo("Export",
|
||||
f"Consent-Log exportiert:\n{path_consent}\n\n"
|
||||
f"Audit-Log exportiert:\n{path_audit}", parent=win)
|
||||
except Exception as e:
|
||||
messagebox.showerror("Export-Fehler", str(e), parent=win)
|
||||
|
||||
ttk.Button(legal_frame, text="Logs exportieren (Audit)",
|
||||
command=do_export).grid(row=2, column=1, padx=0, pady=2, sticky="w")
|
||||
|
||||
def save_and_close():
|
||||
try:
|
||||
save_settings_geometry(win.geometry())
|
||||
except Exception:
|
||||
pass
|
||||
if hasattr(self, "_aza_windows"):
|
||||
self._aza_windows.discard(win)
|
||||
win.destroy()
|
||||
|
||||
def on_ok():
|
||||
selected_label = model_var_dialog.get().strip()
|
||||
for model_id, label in MODEL_LABELS.items():
|
||||
if label == selected_label:
|
||||
self.model_var.set(model_id)
|
||||
save_model(model_id)
|
||||
break
|
||||
self._autotext_data["enabled"] = bool(autotext_var.get())
|
||||
self._autotext_data["diktat_auto_start"] = bool(diktat_auto_var.get())
|
||||
self._autotext_data["notizen_open_on_start"] = bool(notizen_open_on_start_var.get())
|
||||
self._autotext_data["textbloecke_visible"] = bool(textbloecke_visible_var.get())
|
||||
self._autotext_data["addon_visible"] = bool(addon_visible_var.get())
|
||||
|
||||
# Speichere die einzelnen Button-Einstellungen
|
||||
self._autotext_data["addon_buttons"] = {
|
||||
button_id: bool(var.get())
|
||||
for button_id, var in addon_button_vars.items()
|
||||
}
|
||||
|
||||
self._autotext_data["kg_auto_delete_old"] = bool(kg_auto_delete_var.get())
|
||||
self._autotext_data["todo_auto_open"] = bool(todo_auto_open_var.get())
|
||||
self._autotext_data["autocopy_after_diktat"] = bool(autocopy_var.get())
|
||||
self._autotext_data["global_right_click_paste"] = bool(self._rclick_paste_var.get())
|
||||
self._autotext_data["kommentare_auto_open"] = bool(kommentare_auto_var.get())
|
||||
self._autotext_data["empfang_auto_open"] = bool(empfang_auto_var.get())
|
||||
self._autotext_data["logo_visible"] = bool(logo_visible_var.get())
|
||||
|
||||
if self._sig_auto_var.get():
|
||||
save_signature_name("")
|
||||
else:
|
||||
save_signature_name(self._sig_name_var.get().strip())
|
||||
|
||||
# Statusanzeige-Farbe speichern
|
||||
sc_selected = status_color_var.get()
|
||||
sc_value = _status_color_options.get(sc_selected, "#BD4500")
|
||||
self._autotext_data["status_color"] = sc_value
|
||||
|
||||
save_autotext(self._autotext_data)
|
||||
save_and_close()
|
||||
# UI-Updates nach Schließen des Einstellungsfensters (verhindert Hang)
|
||||
def _apply_ui():
|
||||
try:
|
||||
if self._autotext_data["textbloecke_visible"]:
|
||||
self._textbloecke_container.pack(fill="x", before=self._textbloecke_anchor)
|
||||
else:
|
||||
self._textbloecke_container.pack_forget()
|
||||
if self._autotext_data["addon_visible"]:
|
||||
self._addon_container.pack(fill="x", before=self._addon_anchor)
|
||||
self._update_addon_buttons_visibility()
|
||||
self.update_idletasks()
|
||||
h = self.winfo_height()
|
||||
if h < 500:
|
||||
self.geometry(f"{self.winfo_width()}x500")
|
||||
else:
|
||||
self._addon_container.pack_forget()
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
self._apply_status_color()
|
||||
except Exception:
|
||||
pass
|
||||
self.update_idletasks()
|
||||
self.after(50, _apply_ui)
|
||||
|
||||
win.protocol("WM_DELETE_WINDOW", save_and_close)
|
||||
ttk.Button(f, text="OK", command=on_ok).grid(row=15, column=0, columnspan=2, pady=(12, 0))
|
||||
win.focus_set()
|
||||
|
||||
def _show_legal_text(self, parent, title: str, filename: str):
|
||||
"""Zeigt einen Rechtstext (Markdown) in einem Lesefenster an."""
|
||||
import os
|
||||
legal_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "legal")
|
||||
filepath = os.path.join(legal_dir, filename)
|
||||
|
||||
try:
|
||||
with open(filepath, "r", encoding="utf-8") as fh:
|
||||
content = fh.read()
|
||||
except FileNotFoundError:
|
||||
messagebox.showerror("Fehler", f"Datei nicht gefunden:\n{filepath}", parent=parent)
|
||||
return
|
||||
except OSError as e:
|
||||
messagebox.showerror("Fehler", f"Datei konnte nicht gelesen werden:\n{e}", parent=parent)
|
||||
return
|
||||
|
||||
tw = tk.Toplevel(parent)
|
||||
tw.title(title)
|
||||
tw.transient(parent)
|
||||
tw.geometry("720x600")
|
||||
tw.minsize(500, 400)
|
||||
tw.attributes("-topmost", True)
|
||||
self._register_window(tw)
|
||||
|
||||
from aza_ui_helpers import add_resize_grip, add_font_scale_control
|
||||
add_resize_grip(tw, 500, 400)
|
||||
add_font_scale_control(tw)
|
||||
|
||||
frame = ttk.Frame(tw, padding=12)
|
||||
frame.pack(fill="both", expand=True)
|
||||
|
||||
txt = ScrolledText(frame, wrap="word", font=("Segoe UI", 10), bg="#FAFAFA")
|
||||
txt.pack(fill="both", expand=True, pady=(0, 8))
|
||||
txt.insert("1.0", content)
|
||||
txt.configure(state="disabled")
|
||||
|
||||
ttk.Button(frame, text="Schliessen", command=tw.destroy).pack(anchor="e")
|
||||
46
AzA march 2026 - Kopie (18)/aza_stripe_idempotency.py
Normal file
@@ -0,0 +1,46 @@
|
||||
import sqlite3
|
||||
import time
|
||||
|
||||
|
||||
def ensure_stripe_events_table(conn: sqlite3.Connection) -> None:
|
||||
"""
|
||||
Creates the stripe_events table if it doesn't exist.
|
||||
Used for idempotency of webhook deliveries.
|
||||
"""
|
||||
conn.execute(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS stripe_events (
|
||||
event_id TEXT PRIMARY KEY,
|
||||
received_at INTEGER NOT NULL
|
||||
);
|
||||
"""
|
||||
)
|
||||
conn.commit()
|
||||
|
||||
|
||||
def try_claim_event(conn: sqlite3.Connection, event_id: str) -> bool:
|
||||
"""
|
||||
Returns True if we successfully claimed (inserted) the event_id,
|
||||
False if it already exists (duplicate delivery).
|
||||
"""
|
||||
ensure_stripe_events_table(conn)
|
||||
|
||||
try:
|
||||
conn.execute(
|
||||
"INSERT INTO stripe_events (event_id, received_at) VALUES (?, ?);",
|
||||
(event_id, int(time.time())),
|
||||
)
|
||||
conn.commit()
|
||||
return True
|
||||
except sqlite3.IntegrityError:
|
||||
# Duplicate delivery
|
||||
return False
|
||||
|
||||
|
||||
def prune_old_events(conn: sqlite3.Connection, max_age_days: int = 30) -> None:
|
||||
"""
|
||||
Optional cleanup to avoid unbounded growth.
|
||||
"""
|
||||
cutoff = int(time.time()) - max_age_days * 24 * 60 * 60
|
||||
conn.execute("DELETE FROM stripe_events WHERE received_at < ?;", (cutoff,))
|
||||
conn.commit()
|
||||
45
AzA march 2026 - Kopie (18)/aza_style.py
Normal file
@@ -0,0 +1,45 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""AZA Premium Design System – Einheitliche Styling-Konstanten."""
|
||||
|
||||
# ─── Farben ───
|
||||
BG = "#F5F7FA"
|
||||
CARD_BG = "#FFFFFF"
|
||||
CARD_HOVER_BG = "#F8FAFF"
|
||||
CARD_BORDER = "#E2E8F0"
|
||||
CARD_HOVER_BORDER = "#0984E3"
|
||||
|
||||
ACCENT = "#0984E3"
|
||||
ACCENT_HOVER = "#0770C4"
|
||||
ACCENT_DARK = "#005FA3"
|
||||
|
||||
TEXT = "#2D3436"
|
||||
SUBTLE = "#636E72"
|
||||
BORDER = "#E2E8F0"
|
||||
FOOTER_BG = "#EDF2F7"
|
||||
|
||||
SUCCESS = "#00B894"
|
||||
WARNING_AMBER = "#F0932B"
|
||||
DANGER = "#D63031"
|
||||
TURQUOISE = "#00CEC9"
|
||||
CAPACITY_BLUE = "#0078D7"
|
||||
|
||||
# ─── Schriften ───
|
||||
FONT_FAMILY = "Segoe UI"
|
||||
FONT_TITLE = (FONT_FAMILY, 26, "bold")
|
||||
FONT_SUBTITLE = (FONT_FAMILY, 11)
|
||||
FONT_HEADING = (FONT_FAMILY, 16, "bold")
|
||||
FONT_BODY = (FONT_FAMILY, 11)
|
||||
FONT_BODY_BOLD = (FONT_FAMILY, 11, "bold")
|
||||
FONT_SMALL = (FONT_FAMILY, 9)
|
||||
FONT_TINY = (FONT_FAMILY, 8)
|
||||
FONT_CARD_TITLE = (FONT_FAMILY, 12, "bold")
|
||||
FONT_CARD_DESC = (FONT_FAMILY, 9)
|
||||
FONT_ICON = (FONT_FAMILY, 26)
|
||||
|
||||
# ─── Dimensionen ───
|
||||
SPACING = 16
|
||||
|
||||
|
||||
def format_number_de(n: int) -> str:
|
||||
"""Formatiert Zahl mit Punkt als Tausender-Trennzeichen (de-CH)."""
|
||||
return f"{n:,}".replace(",", "\u2019")
|
||||